target.def (conditional_register_usage): Define.
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "realmpfr.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic-core.h"
53 #include "builtins.h"
54
55 #ifndef SLOW_UNALIGNED_ACCESS
56 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
57 #endif
58
59 #ifndef PAD_VARARGS_DOWN
60 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
61 #endif
62 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
63
64 struct target_builtins default_target_builtins;
65 #if SWITCHABLE_TARGET
66 struct target_builtins *this_target_builtins = &default_target_builtins;
67 #endif
68
69 /* Define the names of the builtin function types and codes. */
70 const char *const built_in_class_names[4]
71 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
72
73 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
74 const char * built_in_names[(int) END_BUILTINS] =
75 {
76 #include "builtins.def"
77 };
78 #undef DEF_BUILTIN
79
80 /* Setup an array of _DECL trees, make sure each element is
81 initialized to NULL_TREE. */
82 tree built_in_decls[(int) END_BUILTINS];
83 /* Declarations used when constructing the builtin implicitly in the compiler.
84 It may be NULL_TREE when this is invalid (for instance runtime is not
85 required to implement the function call in all cases). */
86 tree implicit_built_in_decls[(int) END_BUILTINS];
87
88 static const char *c_getstr (tree);
89 static rtx c_readstr (const char *, enum machine_mode);
90 static int target_char_cast (tree, char *);
91 static rtx get_memory_rtx (tree, tree);
92 static int apply_args_size (void);
93 static int apply_result_size (void);
94 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
95 static rtx result_vector (int, rtx);
96 #endif
97 static void expand_builtin_update_setjmp_buf (rtx);
98 static void expand_builtin_prefetch (tree);
99 static rtx expand_builtin_apply_args (void);
100 static rtx expand_builtin_apply_args_1 (void);
101 static rtx expand_builtin_apply (rtx, rtx, rtx);
102 static void expand_builtin_return (rtx);
103 static enum type_class type_to_class (tree);
104 static rtx expand_builtin_classify_type (tree);
105 static void expand_errno_check (tree, rtx);
106 static rtx expand_builtin_mathfn (tree, rtx, rtx);
107 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
109 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
110 static rtx expand_builtin_interclass_mathfn (tree, rtx);
111 static rtx expand_builtin_sincos (tree);
112 static rtx expand_builtin_cexpi (tree, rtx);
113 static rtx expand_builtin_int_roundingfn (tree, rtx);
114 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
115 static rtx expand_builtin_next_arg (void);
116 static rtx expand_builtin_va_start (tree);
117 static rtx expand_builtin_va_end (tree);
118 static rtx expand_builtin_va_copy (tree);
119 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_strcmp (tree, rtx);
121 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
122 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
123 static rtx expand_builtin_memcpy (tree, rtx);
124 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
126 enum machine_mode, int);
127 static rtx expand_builtin_strcpy (tree, rtx);
128 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
129 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_strncpy (tree, rtx);
131 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
132 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
133 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
134 static rtx expand_builtin_bzero (tree);
135 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_alloca (tree, bool);
137 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
138 static rtx expand_builtin_frame_address (tree, tree);
139 static tree stabilize_va_list_loc (location_t, tree, int);
140 static rtx expand_builtin_expect (tree, rtx);
141 static tree fold_builtin_constant_p (tree);
142 static tree fold_builtin_expect (location_t, tree, tree);
143 static tree fold_builtin_classify_type (tree);
144 static tree fold_builtin_strlen (location_t, tree, tree);
145 static tree fold_builtin_inf (location_t, tree, int);
146 static tree fold_builtin_nan (tree, tree, int);
147 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
148 static bool validate_arg (const_tree, enum tree_code code);
149 static bool integer_valued_real_p (tree);
150 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
151 static bool readonly_data_expr (tree);
152 static rtx expand_builtin_fabs (tree, rtx, rtx);
153 static rtx expand_builtin_signbit (tree, rtx);
154 static tree fold_builtin_sqrt (location_t, tree, tree);
155 static tree fold_builtin_cbrt (location_t, tree, tree);
156 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
157 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
158 static tree fold_builtin_cos (location_t, tree, tree, tree);
159 static tree fold_builtin_cosh (location_t, tree, tree, tree);
160 static tree fold_builtin_tan (tree, tree);
161 static tree fold_builtin_trunc (location_t, tree, tree);
162 static tree fold_builtin_floor (location_t, tree, tree);
163 static tree fold_builtin_ceil (location_t, tree, tree);
164 static tree fold_builtin_round (location_t, tree, tree);
165 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
166 static tree fold_builtin_bitop (tree, tree);
167 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
168 static tree fold_builtin_strchr (location_t, tree, tree, tree);
169 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
170 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
171 static tree fold_builtin_strcmp (location_t, tree, tree);
172 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
173 static tree fold_builtin_signbit (location_t, tree, tree);
174 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
175 static tree fold_builtin_isascii (location_t, tree);
176 static tree fold_builtin_toascii (location_t, tree);
177 static tree fold_builtin_isdigit (location_t, tree);
178 static tree fold_builtin_fabs (location_t, tree, tree);
179 static tree fold_builtin_abs (location_t, tree, tree);
180 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
181 enum tree_code);
182 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
183 static tree fold_builtin_0 (location_t, tree, bool);
184 static tree fold_builtin_1 (location_t, tree, tree, bool);
185 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
186 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
187 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
188 static tree fold_builtin_varargs (location_t, tree, tree, bool);
189
190 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
191 static tree fold_builtin_strstr (location_t, tree, tree, tree);
192 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
193 static tree fold_builtin_strcat (location_t, tree, tree);
194 static tree fold_builtin_strncat (location_t, tree, tree, tree);
195 static tree fold_builtin_strspn (location_t, tree, tree);
196 static tree fold_builtin_strcspn (location_t, tree, tree);
197 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
198
199 static rtx expand_builtin_object_size (tree);
200 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
201 enum built_in_function);
202 static void maybe_emit_chk_warning (tree, enum built_in_function);
203 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
204 static void maybe_emit_free_warning (tree);
205 static tree fold_builtin_object_size (tree, tree);
206 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
207 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
208 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
209 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
210 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
211 enum built_in_function);
212 static bool init_target_chars (void);
213
214 static unsigned HOST_WIDE_INT target_newline;
215 static unsigned HOST_WIDE_INT target_percent;
216 static unsigned HOST_WIDE_INT target_c;
217 static unsigned HOST_WIDE_INT target_s;
218 static char target_percent_c[3];
219 static char target_percent_s[3];
220 static char target_percent_s_newline[4];
221 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
222 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
223 static tree do_mpfr_arg2 (tree, tree, tree,
224 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
225 static tree do_mpfr_arg3 (tree, tree, tree, tree,
226 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
227 static tree do_mpfr_sincos (tree, tree, tree);
228 static tree do_mpfr_bessel_n (tree, tree, tree,
229 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
230 const REAL_VALUE_TYPE *, bool);
231 static tree do_mpfr_remquo (tree, tree, tree);
232 static tree do_mpfr_lgamma_r (tree, tree, tree);
233
234 /* Return true if NAME starts with __builtin_ or __sync_. */
235
236 bool
237 is_builtin_name (const char *name)
238 {
239 if (strncmp (name, "__builtin_", 10) == 0)
240 return true;
241 if (strncmp (name, "__sync_", 7) == 0)
242 return true;
243 return false;
244 }
245
246
247 /* Return true if DECL is a function symbol representing a built-in. */
248
249 bool
250 is_builtin_fn (tree decl)
251 {
252 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
253 }
254
255
256 /* Return true if NODE should be considered for inline expansion regardless
257 of the optimization level. This means whenever a function is invoked with
258 its "internal" name, which normally contains the prefix "__builtin". */
259
260 static bool
261 called_as_built_in (tree node)
262 {
263 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
264 we want the name used to call the function, not the name it
265 will have. */
266 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
267 return is_builtin_name (name);
268 }
269
270 /* Return the alignment in bits of EXP, an object.
271 Don't return more than MAX_ALIGN no matter what. */
272
273 unsigned int
274 get_object_alignment (tree exp, unsigned int max_align)
275 {
276 HOST_WIDE_INT bitsize, bitpos;
277 tree offset;
278 enum machine_mode mode;
279 int unsignedp, volatilep;
280 unsigned int align, inner;
281
282 /* Get the innermost object and the constant (bitpos) and possibly
283 variable (offset) offset of the access. */
284 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
285 &mode, &unsignedp, &volatilep, true);
286
287 /* Extract alignment information from the innermost object and
288 possibly adjust bitpos and offset. */
289 if (TREE_CODE (exp) == CONST_DECL)
290 exp = DECL_INITIAL (exp);
291 if (DECL_P (exp)
292 && TREE_CODE (exp) != LABEL_DECL)
293 align = DECL_ALIGN (exp);
294 else if (CONSTANT_CLASS_P (exp))
295 {
296 align = TYPE_ALIGN (TREE_TYPE (exp));
297 #ifdef CONSTANT_ALIGNMENT
298 align = (unsigned)CONSTANT_ALIGNMENT (exp, align);
299 #endif
300 }
301 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
302 align = TYPE_ALIGN (TREE_TYPE (exp));
303 else if (TREE_CODE (exp) == INDIRECT_REF)
304 align = TYPE_ALIGN (TREE_TYPE (exp));
305 else if (TREE_CODE (exp) == MEM_REF)
306 {
307 tree addr = TREE_OPERAND (exp, 0);
308 struct ptr_info_def *pi;
309 if (TREE_CODE (addr) == BIT_AND_EXPR
310 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
311 {
312 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
313 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
314 align *= BITS_PER_UNIT;
315 addr = TREE_OPERAND (addr, 0);
316 }
317 else
318 align = BITS_PER_UNIT;
319 if (TREE_CODE (addr) == SSA_NAME
320 && (pi = SSA_NAME_PTR_INFO (addr)))
321 {
322 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
323 align = MAX (pi->align * BITS_PER_UNIT, align);
324 }
325 else if (TREE_CODE (addr) == ADDR_EXPR)
326 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0),
327 max_align));
328 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
329 }
330 else if (TREE_CODE (exp) == TARGET_MEM_REF)
331 {
332 struct ptr_info_def *pi;
333 tree addr = TMR_BASE (exp);
334 if (TREE_CODE (addr) == BIT_AND_EXPR
335 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
336 {
337 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
338 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
339 align *= BITS_PER_UNIT;
340 addr = TREE_OPERAND (addr, 0);
341 }
342 else
343 align = BITS_PER_UNIT;
344 if (TREE_CODE (addr) == SSA_NAME
345 && (pi = SSA_NAME_PTR_INFO (addr)))
346 {
347 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
348 align = MAX (pi->align * BITS_PER_UNIT, align);
349 }
350 else if (TREE_CODE (addr) == ADDR_EXPR)
351 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0),
352 max_align));
353 if (TMR_OFFSET (exp))
354 bitpos += TREE_INT_CST_LOW (TMR_OFFSET (exp)) * BITS_PER_UNIT;
355 if (TMR_INDEX (exp) && TMR_STEP (exp))
356 {
357 unsigned HOST_WIDE_INT step = TREE_INT_CST_LOW (TMR_STEP (exp));
358 align = MIN (align, (step & -step) * BITS_PER_UNIT);
359 }
360 else if (TMR_INDEX (exp))
361 align = BITS_PER_UNIT;
362 if (TMR_INDEX2 (exp))
363 align = BITS_PER_UNIT;
364 }
365 else
366 align = BITS_PER_UNIT;
367
368 /* If there is a non-constant offset part extract the maximum
369 alignment that can prevail. */
370 inner = max_align;
371 while (offset)
372 {
373 tree next_offset;
374
375 if (TREE_CODE (offset) == PLUS_EXPR)
376 {
377 next_offset = TREE_OPERAND (offset, 0);
378 offset = TREE_OPERAND (offset, 1);
379 }
380 else
381 next_offset = NULL;
382 if (host_integerp (offset, 1))
383 {
384 /* Any overflow in calculating offset_bits won't change
385 the alignment. */
386 unsigned offset_bits
387 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
388
389 if (offset_bits)
390 inner = MIN (inner, (offset_bits & -offset_bits));
391 }
392 else if (TREE_CODE (offset) == MULT_EXPR
393 && host_integerp (TREE_OPERAND (offset, 1), 1))
394 {
395 /* Any overflow in calculating offset_factor won't change
396 the alignment. */
397 unsigned offset_factor
398 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
399 * BITS_PER_UNIT);
400
401 if (offset_factor)
402 inner = MIN (inner, (offset_factor & -offset_factor));
403 }
404 else
405 {
406 inner = MIN (inner, BITS_PER_UNIT);
407 break;
408 }
409 offset = next_offset;
410 }
411
412 /* Alignment is innermost object alignment adjusted by the constant
413 and non-constant offset parts. */
414 align = MIN (align, inner);
415 bitpos = bitpos & (align - 1);
416
417 /* align and bitpos now specify known low bits of the pointer.
418 ptr & (align - 1) == bitpos. */
419
420 if (bitpos != 0)
421 align = (bitpos & -bitpos);
422
423 return MIN (align, max_align);
424 }
425
426 /* Returns true iff we can trust that alignment information has been
427 calculated properly. */
428
429 bool
430 can_trust_pointer_alignment (void)
431 {
432 /* We rely on TER to compute accurate alignment information. */
433 return (optimize && flag_tree_ter);
434 }
435
436 /* Return the alignment in bits of EXP, a pointer valued expression.
437 But don't return more than MAX_ALIGN no matter what.
438 The alignment returned is, by default, the alignment of the thing that
439 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
440
441 Otherwise, look at the expression to see if we can do better, i.e., if the
442 expression is actually pointing at an object whose alignment is tighter. */
443
444 unsigned int
445 get_pointer_alignment (tree exp, unsigned int max_align)
446 {
447 STRIP_NOPS (exp);
448
449 if (TREE_CODE (exp) == ADDR_EXPR)
450 return get_object_alignment (TREE_OPERAND (exp, 0), max_align);
451 else if (TREE_CODE (exp) == SSA_NAME
452 && POINTER_TYPE_P (TREE_TYPE (exp)))
453 {
454 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
455 unsigned align;
456 if (!pi)
457 return BITS_PER_UNIT;
458 if (pi->misalign != 0)
459 align = (pi->misalign & -pi->misalign);
460 else
461 align = pi->align;
462 return MIN (max_align, align * BITS_PER_UNIT);
463 }
464
465 return POINTER_TYPE_P (TREE_TYPE (exp)) ? BITS_PER_UNIT : 0;
466 }
467
468 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
469 way, because it could contain a zero byte in the middle.
470 TREE_STRING_LENGTH is the size of the character array, not the string.
471
472 ONLY_VALUE should be nonzero if the result is not going to be emitted
473 into the instruction stream and zero if it is going to be expanded.
474 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
475 is returned, otherwise NULL, since
476 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
477 evaluate the side-effects.
478
479 The value returned is of type `ssizetype'.
480
481 Unfortunately, string_constant can't access the values of const char
482 arrays with initializers, so neither can we do so here. */
483
484 tree
485 c_strlen (tree src, int only_value)
486 {
487 tree offset_node;
488 HOST_WIDE_INT offset;
489 int max;
490 const char *ptr;
491 location_t loc;
492
493 STRIP_NOPS (src);
494 if (TREE_CODE (src) == COND_EXPR
495 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
496 {
497 tree len1, len2;
498
499 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
500 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
501 if (tree_int_cst_equal (len1, len2))
502 return len1;
503 }
504
505 if (TREE_CODE (src) == COMPOUND_EXPR
506 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
507 return c_strlen (TREE_OPERAND (src, 1), only_value);
508
509 loc = EXPR_LOC_OR_HERE (src);
510
511 src = string_constant (src, &offset_node);
512 if (src == 0)
513 return NULL_TREE;
514
515 max = TREE_STRING_LENGTH (src) - 1;
516 ptr = TREE_STRING_POINTER (src);
517
518 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
519 {
520 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
521 compute the offset to the following null if we don't know where to
522 start searching for it. */
523 int i;
524
525 for (i = 0; i < max; i++)
526 if (ptr[i] == 0)
527 return NULL_TREE;
528
529 /* We don't know the starting offset, but we do know that the string
530 has no internal zero bytes. We can assume that the offset falls
531 within the bounds of the string; otherwise, the programmer deserves
532 what he gets. Subtract the offset from the length of the string,
533 and return that. This would perhaps not be valid if we were dealing
534 with named arrays in addition to literal string constants. */
535
536 return size_diffop_loc (loc, size_int (max), offset_node);
537 }
538
539 /* We have a known offset into the string. Start searching there for
540 a null character if we can represent it as a single HOST_WIDE_INT. */
541 if (offset_node == 0)
542 offset = 0;
543 else if (! host_integerp (offset_node, 0))
544 offset = -1;
545 else
546 offset = tree_low_cst (offset_node, 0);
547
548 /* If the offset is known to be out of bounds, warn, and call strlen at
549 runtime. */
550 if (offset < 0 || offset > max)
551 {
552 /* Suppress multiple warnings for propagated constant strings. */
553 if (! TREE_NO_WARNING (src))
554 {
555 warning_at (loc, 0, "offset outside bounds of constant string");
556 TREE_NO_WARNING (src) = 1;
557 }
558 return NULL_TREE;
559 }
560
561 /* Use strlen to search for the first zero byte. Since any strings
562 constructed with build_string will have nulls appended, we win even
563 if we get handed something like (char[4])"abcd".
564
565 Since OFFSET is our starting index into the string, no further
566 calculation is needed. */
567 return ssize_int (strlen (ptr + offset));
568 }
569
570 /* Return a char pointer for a C string if it is a string constant
571 or sum of string constant and integer constant. */
572
573 static const char *
574 c_getstr (tree src)
575 {
576 tree offset_node;
577
578 src = string_constant (src, &offset_node);
579 if (src == 0)
580 return 0;
581
582 if (offset_node == 0)
583 return TREE_STRING_POINTER (src);
584 else if (!host_integerp (offset_node, 1)
585 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
586 return 0;
587
588 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
589 }
590
591 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
592 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
593
594 static rtx
595 c_readstr (const char *str, enum machine_mode mode)
596 {
597 HOST_WIDE_INT c[2];
598 HOST_WIDE_INT ch;
599 unsigned int i, j;
600
601 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
602
603 c[0] = 0;
604 c[1] = 0;
605 ch = 1;
606 for (i = 0; i < GET_MODE_SIZE (mode); i++)
607 {
608 j = i;
609 if (WORDS_BIG_ENDIAN)
610 j = GET_MODE_SIZE (mode) - i - 1;
611 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
612 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
613 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
614 j *= BITS_PER_UNIT;
615 gcc_assert (j < 2 * HOST_BITS_PER_WIDE_INT);
616
617 if (ch)
618 ch = (unsigned char) str[i];
619 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
620 }
621 return immed_double_const (c[0], c[1], mode);
622 }
623
624 /* Cast a target constant CST to target CHAR and if that value fits into
625 host char type, return zero and put that value into variable pointed to by
626 P. */
627
628 static int
629 target_char_cast (tree cst, char *p)
630 {
631 unsigned HOST_WIDE_INT val, hostval;
632
633 if (!host_integerp (cst, 1)
634 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
635 return 1;
636
637 val = tree_low_cst (cst, 1);
638 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
639 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
640
641 hostval = val;
642 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
643 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
644
645 if (val != hostval)
646 return 1;
647
648 *p = hostval;
649 return 0;
650 }
651
652 /* Similar to save_expr, but assumes that arbitrary code is not executed
653 in between the multiple evaluations. In particular, we assume that a
654 non-addressable local variable will not be modified. */
655
656 static tree
657 builtin_save_expr (tree exp)
658 {
659 if (TREE_ADDRESSABLE (exp) == 0
660 && (TREE_CODE (exp) == PARM_DECL
661 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
662 return exp;
663
664 return save_expr (exp);
665 }
666
667 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
668 times to get the address of either a higher stack frame, or a return
669 address located within it (depending on FNDECL_CODE). */
670
671 static rtx
672 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
673 {
674 int i;
675
676 #ifdef INITIAL_FRAME_ADDRESS_RTX
677 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
678 #else
679 rtx tem;
680
681 /* For a zero count with __builtin_return_address, we don't care what
682 frame address we return, because target-specific definitions will
683 override us. Therefore frame pointer elimination is OK, and using
684 the soft frame pointer is OK.
685
686 For a nonzero count, or a zero count with __builtin_frame_address,
687 we require a stable offset from the current frame pointer to the
688 previous one, so we must use the hard frame pointer, and
689 we must disable frame pointer elimination. */
690 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
691 tem = frame_pointer_rtx;
692 else
693 {
694 tem = hard_frame_pointer_rtx;
695
696 /* Tell reload not to eliminate the frame pointer. */
697 crtl->accesses_prior_frames = 1;
698 }
699 #endif
700
701 /* Some machines need special handling before we can access
702 arbitrary frames. For example, on the SPARC, we must first flush
703 all register windows to the stack. */
704 #ifdef SETUP_FRAME_ADDRESSES
705 if (count > 0)
706 SETUP_FRAME_ADDRESSES ();
707 #endif
708
709 /* On the SPARC, the return address is not in the frame, it is in a
710 register. There is no way to access it off of the current frame
711 pointer, but it can be accessed off the previous frame pointer by
712 reading the value from the register window save area. */
713 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
714 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
715 count--;
716 #endif
717
718 /* Scan back COUNT frames to the specified frame. */
719 for (i = 0; i < count; i++)
720 {
721 /* Assume the dynamic chain pointer is in the word that the
722 frame address points to, unless otherwise specified. */
723 #ifdef DYNAMIC_CHAIN_ADDRESS
724 tem = DYNAMIC_CHAIN_ADDRESS (tem);
725 #endif
726 tem = memory_address (Pmode, tem);
727 tem = gen_frame_mem (Pmode, tem);
728 tem = copy_to_reg (tem);
729 }
730
731 /* For __builtin_frame_address, return what we've got. But, on
732 the SPARC for example, we may have to add a bias. */
733 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
734 #ifdef FRAME_ADDR_RTX
735 return FRAME_ADDR_RTX (tem);
736 #else
737 return tem;
738 #endif
739
740 /* For __builtin_return_address, get the return address from that frame. */
741 #ifdef RETURN_ADDR_RTX
742 tem = RETURN_ADDR_RTX (count, tem);
743 #else
744 tem = memory_address (Pmode,
745 plus_constant (tem, GET_MODE_SIZE (Pmode)));
746 tem = gen_frame_mem (Pmode, tem);
747 #endif
748 return tem;
749 }
750
751 /* Alias set used for setjmp buffer. */
752 static alias_set_type setjmp_alias_set = -1;
753
754 /* Construct the leading half of a __builtin_setjmp call. Control will
755 return to RECEIVER_LABEL. This is also called directly by the SJLJ
756 exception handling code. */
757
758 void
759 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
760 {
761 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
762 rtx stack_save;
763 rtx mem;
764
765 if (setjmp_alias_set == -1)
766 setjmp_alias_set = new_alias_set ();
767
768 buf_addr = convert_memory_address (Pmode, buf_addr);
769
770 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
771
772 /* We store the frame pointer and the address of receiver_label in
773 the buffer and use the rest of it for the stack save area, which
774 is machine-dependent. */
775
776 mem = gen_rtx_MEM (Pmode, buf_addr);
777 set_mem_alias_set (mem, setjmp_alias_set);
778 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
779
780 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
781 set_mem_alias_set (mem, setjmp_alias_set);
782
783 emit_move_insn (validize_mem (mem),
784 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
785
786 stack_save = gen_rtx_MEM (sa_mode,
787 plus_constant (buf_addr,
788 2 * GET_MODE_SIZE (Pmode)));
789 set_mem_alias_set (stack_save, setjmp_alias_set);
790 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
791
792 /* If there is further processing to do, do it. */
793 #ifdef HAVE_builtin_setjmp_setup
794 if (HAVE_builtin_setjmp_setup)
795 emit_insn (gen_builtin_setjmp_setup (buf_addr));
796 #endif
797
798 /* Tell optimize_save_area_alloca that extra work is going to
799 need to go on during alloca. */
800 cfun->calls_setjmp = 1;
801
802 /* We have a nonlocal label. */
803 cfun->has_nonlocal_label = 1;
804 }
805
806 /* Construct the trailing part of a __builtin_setjmp call. This is
807 also called directly by the SJLJ exception handling code. */
808
809 void
810 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
811 {
812 rtx chain;
813
814 /* Clobber the FP when we get here, so we have to make sure it's
815 marked as used by this function. */
816 emit_use (hard_frame_pointer_rtx);
817
818 /* Mark the static chain as clobbered here so life information
819 doesn't get messed up for it. */
820 chain = targetm.calls.static_chain (current_function_decl, true);
821 if (chain && REG_P (chain))
822 emit_clobber (chain);
823
824 /* Now put in the code to restore the frame pointer, and argument
825 pointer, if needed. */
826 #ifdef HAVE_nonlocal_goto
827 if (! HAVE_nonlocal_goto)
828 #endif
829 {
830 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
831 /* This might change the hard frame pointer in ways that aren't
832 apparent to early optimization passes, so force a clobber. */
833 emit_clobber (hard_frame_pointer_rtx);
834 }
835
836 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
837 if (fixed_regs[ARG_POINTER_REGNUM])
838 {
839 #ifdef ELIMINABLE_REGS
840 size_t i;
841 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
842
843 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
844 if (elim_regs[i].from == ARG_POINTER_REGNUM
845 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
846 break;
847
848 if (i == ARRAY_SIZE (elim_regs))
849 #endif
850 {
851 /* Now restore our arg pointer from the address at which it
852 was saved in our stack frame. */
853 emit_move_insn (crtl->args.internal_arg_pointer,
854 copy_to_reg (get_arg_pointer_save_area ()));
855 }
856 }
857 #endif
858
859 #ifdef HAVE_builtin_setjmp_receiver
860 if (HAVE_builtin_setjmp_receiver)
861 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
862 else
863 #endif
864 #ifdef HAVE_nonlocal_goto_receiver
865 if (HAVE_nonlocal_goto_receiver)
866 emit_insn (gen_nonlocal_goto_receiver ());
867 else
868 #endif
869 { /* Nothing */ }
870
871 /* We must not allow the code we just generated to be reordered by
872 scheduling. Specifically, the update of the frame pointer must
873 happen immediately, not later. */
874 emit_insn (gen_blockage ());
875 }
876
877 /* __builtin_longjmp is passed a pointer to an array of five words (not
878 all will be used on all machines). It operates similarly to the C
879 library function of the same name, but is more efficient. Much of
880 the code below is copied from the handling of non-local gotos. */
881
882 static void
883 expand_builtin_longjmp (rtx buf_addr, rtx value)
884 {
885 rtx fp, lab, stack, insn, last;
886 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
887
888 /* DRAP is needed for stack realign if longjmp is expanded to current
889 function */
890 if (SUPPORTS_STACK_ALIGNMENT)
891 crtl->need_drap = true;
892
893 if (setjmp_alias_set == -1)
894 setjmp_alias_set = new_alias_set ();
895
896 buf_addr = convert_memory_address (Pmode, buf_addr);
897
898 buf_addr = force_reg (Pmode, buf_addr);
899
900 /* We require that the user must pass a second argument of 1, because
901 that is what builtin_setjmp will return. */
902 gcc_assert (value == const1_rtx);
903
904 last = get_last_insn ();
905 #ifdef HAVE_builtin_longjmp
906 if (HAVE_builtin_longjmp)
907 emit_insn (gen_builtin_longjmp (buf_addr));
908 else
909 #endif
910 {
911 fp = gen_rtx_MEM (Pmode, buf_addr);
912 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
913 GET_MODE_SIZE (Pmode)));
914
915 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
916 2 * GET_MODE_SIZE (Pmode)));
917 set_mem_alias_set (fp, setjmp_alias_set);
918 set_mem_alias_set (lab, setjmp_alias_set);
919 set_mem_alias_set (stack, setjmp_alias_set);
920
921 /* Pick up FP, label, and SP from the block and jump. This code is
922 from expand_goto in stmt.c; see there for detailed comments. */
923 #ifdef HAVE_nonlocal_goto
924 if (HAVE_nonlocal_goto)
925 /* We have to pass a value to the nonlocal_goto pattern that will
926 get copied into the static_chain pointer, but it does not matter
927 what that value is, because builtin_setjmp does not use it. */
928 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
929 else
930 #endif
931 {
932 lab = copy_to_reg (lab);
933
934 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
935 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
936
937 emit_move_insn (hard_frame_pointer_rtx, fp);
938 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
939
940 emit_use (hard_frame_pointer_rtx);
941 emit_use (stack_pointer_rtx);
942 emit_indirect_jump (lab);
943 }
944 }
945
946 /* Search backwards and mark the jump insn as a non-local goto.
947 Note that this precludes the use of __builtin_longjmp to a
948 __builtin_setjmp target in the same function. However, we've
949 already cautioned the user that these functions are for
950 internal exception handling use only. */
951 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
952 {
953 gcc_assert (insn != last);
954
955 if (JUMP_P (insn))
956 {
957 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
958 break;
959 }
960 else if (CALL_P (insn))
961 break;
962 }
963 }
964
965 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
966 and the address of the save area. */
967
968 static rtx
969 expand_builtin_nonlocal_goto (tree exp)
970 {
971 tree t_label, t_save_area;
972 rtx r_label, r_save_area, r_fp, r_sp, insn;
973
974 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
975 return NULL_RTX;
976
977 t_label = CALL_EXPR_ARG (exp, 0);
978 t_save_area = CALL_EXPR_ARG (exp, 1);
979
980 r_label = expand_normal (t_label);
981 r_label = convert_memory_address (Pmode, r_label);
982 r_save_area = expand_normal (t_save_area);
983 r_save_area = convert_memory_address (Pmode, r_save_area);
984 /* Copy the address of the save location to a register just in case it was based
985 on the frame pointer. */
986 r_save_area = copy_to_reg (r_save_area);
987 r_fp = gen_rtx_MEM (Pmode, r_save_area);
988 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
989 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
990
991 crtl->has_nonlocal_goto = 1;
992
993 #ifdef HAVE_nonlocal_goto
994 /* ??? We no longer need to pass the static chain value, afaik. */
995 if (HAVE_nonlocal_goto)
996 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
997 else
998 #endif
999 {
1000 r_label = copy_to_reg (r_label);
1001
1002 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1003 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1004
1005 /* Restore frame pointer for containing function.
1006 This sets the actual hard register used for the frame pointer
1007 to the location of the function's incoming static chain info.
1008 The non-local goto handler will then adjust it to contain the
1009 proper value and reload the argument pointer, if needed. */
1010 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1011 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
1012
1013 /* USE of hard_frame_pointer_rtx added for consistency;
1014 not clear if really needed. */
1015 emit_use (hard_frame_pointer_rtx);
1016 emit_use (stack_pointer_rtx);
1017
1018 /* If the architecture is using a GP register, we must
1019 conservatively assume that the target function makes use of it.
1020 The prologue of functions with nonlocal gotos must therefore
1021 initialize the GP register to the appropriate value, and we
1022 must then make sure that this value is live at the point
1023 of the jump. (Note that this doesn't necessarily apply
1024 to targets with a nonlocal_goto pattern; they are free
1025 to implement it in their own way. Note also that this is
1026 a no-op if the GP register is a global invariant.) */
1027 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1028 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1029 emit_use (pic_offset_table_rtx);
1030
1031 emit_indirect_jump (r_label);
1032 }
1033
1034 /* Search backwards to the jump insn and mark it as a
1035 non-local goto. */
1036 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1037 {
1038 if (JUMP_P (insn))
1039 {
1040 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1041 break;
1042 }
1043 else if (CALL_P (insn))
1044 break;
1045 }
1046
1047 return const0_rtx;
1048 }
1049
1050 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1051 (not all will be used on all machines) that was passed to __builtin_setjmp.
1052 It updates the stack pointer in that block to correspond to the current
1053 stack pointer. */
1054
1055 static void
1056 expand_builtin_update_setjmp_buf (rtx buf_addr)
1057 {
1058 enum machine_mode sa_mode = Pmode;
1059 rtx stack_save;
1060
1061
1062 #ifdef HAVE_save_stack_nonlocal
1063 if (HAVE_save_stack_nonlocal)
1064 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1065 #endif
1066 #ifdef STACK_SAVEAREA_MODE
1067 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1068 #endif
1069
1070 stack_save
1071 = gen_rtx_MEM (sa_mode,
1072 memory_address
1073 (sa_mode,
1074 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1075
1076 #ifdef HAVE_setjmp
1077 if (HAVE_setjmp)
1078 emit_insn (gen_setjmp ());
1079 #endif
1080
1081 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1082 }
1083
1084 /* Expand a call to __builtin_prefetch. For a target that does not support
1085 data prefetch, evaluate the memory address argument in case it has side
1086 effects. */
1087
1088 static void
1089 expand_builtin_prefetch (tree exp)
1090 {
1091 tree arg0, arg1, arg2;
1092 int nargs;
1093 rtx op0, op1, op2;
1094
1095 if (!validate_arglist (exp, POINTER_TYPE, 0))
1096 return;
1097
1098 arg0 = CALL_EXPR_ARG (exp, 0);
1099
1100 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1101 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1102 locality). */
1103 nargs = call_expr_nargs (exp);
1104 if (nargs > 1)
1105 arg1 = CALL_EXPR_ARG (exp, 1);
1106 else
1107 arg1 = integer_zero_node;
1108 if (nargs > 2)
1109 arg2 = CALL_EXPR_ARG (exp, 2);
1110 else
1111 arg2 = integer_three_node;
1112
1113 /* Argument 0 is an address. */
1114 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1115
1116 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1117 if (TREE_CODE (arg1) != INTEGER_CST)
1118 {
1119 error ("second argument to %<__builtin_prefetch%> must be a constant");
1120 arg1 = integer_zero_node;
1121 }
1122 op1 = expand_normal (arg1);
1123 /* Argument 1 must be either zero or one. */
1124 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1125 {
1126 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1127 " using zero");
1128 op1 = const0_rtx;
1129 }
1130
1131 /* Argument 2 (locality) must be a compile-time constant int. */
1132 if (TREE_CODE (arg2) != INTEGER_CST)
1133 {
1134 error ("third argument to %<__builtin_prefetch%> must be a constant");
1135 arg2 = integer_zero_node;
1136 }
1137 op2 = expand_normal (arg2);
1138 /* Argument 2 must be 0, 1, 2, or 3. */
1139 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1140 {
1141 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1142 op2 = const0_rtx;
1143 }
1144
1145 #ifdef HAVE_prefetch
1146 if (HAVE_prefetch)
1147 {
1148 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1149 (op0,
1150 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1151 || (GET_MODE (op0) != Pmode))
1152 {
1153 op0 = convert_memory_address (Pmode, op0);
1154 op0 = force_reg (Pmode, op0);
1155 }
1156 emit_insn (gen_prefetch (op0, op1, op2));
1157 }
1158 #endif
1159
1160 /* Don't do anything with direct references to volatile memory, but
1161 generate code to handle other side effects. */
1162 if (!MEM_P (op0) && side_effects_p (op0))
1163 emit_insn (op0);
1164 }
1165
1166 /* Get a MEM rtx for expression EXP which is the address of an operand
1167 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1168 the maximum length of the block of memory that might be accessed or
1169 NULL if unknown. */
1170
1171 static rtx
1172 get_memory_rtx (tree exp, tree len)
1173 {
1174 tree orig_exp = exp;
1175 rtx addr, mem;
1176 HOST_WIDE_INT off;
1177
1178 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1179 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1180 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1181 exp = TREE_OPERAND (exp, 0);
1182
1183 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1184 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1185
1186 /* Get an expression we can use to find the attributes to assign to MEM.
1187 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1188 we can. First remove any nops. */
1189 while (CONVERT_EXPR_P (exp)
1190 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1191 exp = TREE_OPERAND (exp, 0);
1192
1193 off = 0;
1194 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1195 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1196 && host_integerp (TREE_OPERAND (exp, 1), 0)
1197 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1198 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1199 else if (TREE_CODE (exp) == ADDR_EXPR)
1200 exp = TREE_OPERAND (exp, 0);
1201 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1202 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1203 else
1204 exp = NULL;
1205
1206 /* Honor attributes derived from exp, except for the alias set
1207 (as builtin stringops may alias with anything) and the size
1208 (as stringops may access multiple array elements). */
1209 if (exp)
1210 {
1211 set_mem_attributes (mem, exp, 0);
1212
1213 if (off)
1214 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1215
1216 /* Allow the string and memory builtins to overflow from one
1217 field into another, see http://gcc.gnu.org/PR23561.
1218 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1219 memory accessed by the string or memory builtin will fit
1220 within the field. */
1221 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1222 {
1223 tree mem_expr = MEM_EXPR (mem);
1224 HOST_WIDE_INT offset = -1, length = -1;
1225 tree inner = exp;
1226
1227 while (TREE_CODE (inner) == ARRAY_REF
1228 || CONVERT_EXPR_P (inner)
1229 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1230 || TREE_CODE (inner) == SAVE_EXPR)
1231 inner = TREE_OPERAND (inner, 0);
1232
1233 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1234
1235 if (MEM_OFFSET (mem)
1236 && CONST_INT_P (MEM_OFFSET (mem)))
1237 offset = INTVAL (MEM_OFFSET (mem));
1238
1239 if (offset >= 0 && len && host_integerp (len, 0))
1240 length = tree_low_cst (len, 0);
1241
1242 while (TREE_CODE (inner) == COMPONENT_REF)
1243 {
1244 tree field = TREE_OPERAND (inner, 1);
1245 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1246 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1247
1248 /* Bitfields are generally not byte-addressable. */
1249 gcc_assert (!DECL_BIT_FIELD (field)
1250 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1251 % BITS_PER_UNIT) == 0
1252 && host_integerp (DECL_SIZE (field), 0)
1253 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1254 % BITS_PER_UNIT) == 0));
1255
1256 /* If we can prove that the memory starting at XEXP (mem, 0) and
1257 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1258 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1259 fields without DECL_SIZE_UNIT like flexible array members. */
1260 if (length >= 0
1261 && DECL_SIZE_UNIT (field)
1262 && host_integerp (DECL_SIZE_UNIT (field), 0))
1263 {
1264 HOST_WIDE_INT size
1265 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1266 if (offset <= size
1267 && length <= size
1268 && offset + length <= size)
1269 break;
1270 }
1271
1272 if (offset >= 0
1273 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1274 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1275 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1276 / BITS_PER_UNIT;
1277 else
1278 {
1279 offset = -1;
1280 length = -1;
1281 }
1282
1283 mem_expr = TREE_OPERAND (mem_expr, 0);
1284 inner = TREE_OPERAND (inner, 0);
1285 }
1286
1287 if (mem_expr == NULL)
1288 offset = -1;
1289 if (mem_expr != MEM_EXPR (mem))
1290 {
1291 set_mem_expr (mem, mem_expr);
1292 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1293 }
1294 }
1295 set_mem_alias_set (mem, 0);
1296 set_mem_size (mem, NULL_RTX);
1297 }
1298
1299 return mem;
1300 }
1301 \f
1302 /* Built-in functions to perform an untyped call and return. */
1303
1304 #define apply_args_mode \
1305 (this_target_builtins->x_apply_args_mode)
1306 #define apply_result_mode \
1307 (this_target_builtins->x_apply_result_mode)
1308
1309 /* Return the size required for the block returned by __builtin_apply_args,
1310 and initialize apply_args_mode. */
1311
1312 static int
1313 apply_args_size (void)
1314 {
1315 static int size = -1;
1316 int align;
1317 unsigned int regno;
1318 enum machine_mode mode;
1319
1320 /* The values computed by this function never change. */
1321 if (size < 0)
1322 {
1323 /* The first value is the incoming arg-pointer. */
1324 size = GET_MODE_SIZE (Pmode);
1325
1326 /* The second value is the structure value address unless this is
1327 passed as an "invisible" first argument. */
1328 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1329 size += GET_MODE_SIZE (Pmode);
1330
1331 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1332 if (FUNCTION_ARG_REGNO_P (regno))
1333 {
1334 mode = targetm.calls.get_raw_arg_mode (regno);
1335
1336 gcc_assert (mode != VOIDmode);
1337
1338 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1339 if (size % align != 0)
1340 size = CEIL (size, align) * align;
1341 size += GET_MODE_SIZE (mode);
1342 apply_args_mode[regno] = mode;
1343 }
1344 else
1345 {
1346 apply_args_mode[regno] = VOIDmode;
1347 }
1348 }
1349 return size;
1350 }
1351
1352 /* Return the size required for the block returned by __builtin_apply,
1353 and initialize apply_result_mode. */
1354
1355 static int
1356 apply_result_size (void)
1357 {
1358 static int size = -1;
1359 int align, regno;
1360 enum machine_mode mode;
1361
1362 /* The values computed by this function never change. */
1363 if (size < 0)
1364 {
1365 size = 0;
1366
1367 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1368 if (targetm.calls.function_value_regno_p (regno))
1369 {
1370 mode = targetm.calls.get_raw_result_mode (regno);
1371
1372 gcc_assert (mode != VOIDmode);
1373
1374 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1375 if (size % align != 0)
1376 size = CEIL (size, align) * align;
1377 size += GET_MODE_SIZE (mode);
1378 apply_result_mode[regno] = mode;
1379 }
1380 else
1381 apply_result_mode[regno] = VOIDmode;
1382
1383 /* Allow targets that use untyped_call and untyped_return to override
1384 the size so that machine-specific information can be stored here. */
1385 #ifdef APPLY_RESULT_SIZE
1386 size = APPLY_RESULT_SIZE;
1387 #endif
1388 }
1389 return size;
1390 }
1391
1392 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1393 /* Create a vector describing the result block RESULT. If SAVEP is true,
1394 the result block is used to save the values; otherwise it is used to
1395 restore the values. */
1396
1397 static rtx
1398 result_vector (int savep, rtx result)
1399 {
1400 int regno, size, align, nelts;
1401 enum machine_mode mode;
1402 rtx reg, mem;
1403 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1404
1405 size = nelts = 0;
1406 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1407 if ((mode = apply_result_mode[regno]) != VOIDmode)
1408 {
1409 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1410 if (size % align != 0)
1411 size = CEIL (size, align) * align;
1412 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1413 mem = adjust_address (result, mode, size);
1414 savevec[nelts++] = (savep
1415 ? gen_rtx_SET (VOIDmode, mem, reg)
1416 : gen_rtx_SET (VOIDmode, reg, mem));
1417 size += GET_MODE_SIZE (mode);
1418 }
1419 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1420 }
1421 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1422
1423 /* Save the state required to perform an untyped call with the same
1424 arguments as were passed to the current function. */
1425
1426 static rtx
1427 expand_builtin_apply_args_1 (void)
1428 {
1429 rtx registers, tem;
1430 int size, align, regno;
1431 enum machine_mode mode;
1432 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1433
1434 /* Create a block where the arg-pointer, structure value address,
1435 and argument registers can be saved. */
1436 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1437
1438 /* Walk past the arg-pointer and structure value address. */
1439 size = GET_MODE_SIZE (Pmode);
1440 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1441 size += GET_MODE_SIZE (Pmode);
1442
1443 /* Save each register used in calling a function to the block. */
1444 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1445 if ((mode = apply_args_mode[regno]) != VOIDmode)
1446 {
1447 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1448 if (size % align != 0)
1449 size = CEIL (size, align) * align;
1450
1451 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1452
1453 emit_move_insn (adjust_address (registers, mode, size), tem);
1454 size += GET_MODE_SIZE (mode);
1455 }
1456
1457 /* Save the arg pointer to the block. */
1458 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1459 #ifdef STACK_GROWS_DOWNWARD
1460 /* We need the pointer as the caller actually passed them to us, not
1461 as we might have pretended they were passed. Make sure it's a valid
1462 operand, as emit_move_insn isn't expected to handle a PLUS. */
1463 tem
1464 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1465 NULL_RTX);
1466 #endif
1467 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1468
1469 size = GET_MODE_SIZE (Pmode);
1470
1471 /* Save the structure value address unless this is passed as an
1472 "invisible" first argument. */
1473 if (struct_incoming_value)
1474 {
1475 emit_move_insn (adjust_address (registers, Pmode, size),
1476 copy_to_reg (struct_incoming_value));
1477 size += GET_MODE_SIZE (Pmode);
1478 }
1479
1480 /* Return the address of the block. */
1481 return copy_addr_to_reg (XEXP (registers, 0));
1482 }
1483
1484 /* __builtin_apply_args returns block of memory allocated on
1485 the stack into which is stored the arg pointer, structure
1486 value address, static chain, and all the registers that might
1487 possibly be used in performing a function call. The code is
1488 moved to the start of the function so the incoming values are
1489 saved. */
1490
1491 static rtx
1492 expand_builtin_apply_args (void)
1493 {
1494 /* Don't do __builtin_apply_args more than once in a function.
1495 Save the result of the first call and reuse it. */
1496 if (apply_args_value != 0)
1497 return apply_args_value;
1498 {
1499 /* When this function is called, it means that registers must be
1500 saved on entry to this function. So we migrate the
1501 call to the first insn of this function. */
1502 rtx temp;
1503 rtx seq;
1504
1505 start_sequence ();
1506 temp = expand_builtin_apply_args_1 ();
1507 seq = get_insns ();
1508 end_sequence ();
1509
1510 apply_args_value = temp;
1511
1512 /* Put the insns after the NOTE that starts the function.
1513 If this is inside a start_sequence, make the outer-level insn
1514 chain current, so the code is placed at the start of the
1515 function. If internal_arg_pointer is a non-virtual pseudo,
1516 it needs to be placed after the function that initializes
1517 that pseudo. */
1518 push_topmost_sequence ();
1519 if (REG_P (crtl->args.internal_arg_pointer)
1520 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1521 emit_insn_before (seq, parm_birth_insn);
1522 else
1523 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1524 pop_topmost_sequence ();
1525 return temp;
1526 }
1527 }
1528
1529 /* Perform an untyped call and save the state required to perform an
1530 untyped return of whatever value was returned by the given function. */
1531
1532 static rtx
1533 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1534 {
1535 int size, align, regno;
1536 enum machine_mode mode;
1537 rtx incoming_args, result, reg, dest, src, call_insn;
1538 rtx old_stack_level = 0;
1539 rtx call_fusage = 0;
1540 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1541
1542 arguments = convert_memory_address (Pmode, arguments);
1543
1544 /* Create a block where the return registers can be saved. */
1545 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1546
1547 /* Fetch the arg pointer from the ARGUMENTS block. */
1548 incoming_args = gen_reg_rtx (Pmode);
1549 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1550 #ifndef STACK_GROWS_DOWNWARD
1551 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1552 incoming_args, 0, OPTAB_LIB_WIDEN);
1553 #endif
1554
1555 /* Push a new argument block and copy the arguments. Do not allow
1556 the (potential) memcpy call below to interfere with our stack
1557 manipulations. */
1558 do_pending_stack_adjust ();
1559 NO_DEFER_POP;
1560
1561 /* Save the stack with nonlocal if available. */
1562 #ifdef HAVE_save_stack_nonlocal
1563 if (HAVE_save_stack_nonlocal)
1564 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1565 else
1566 #endif
1567 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1568
1569 /* Allocate a block of memory onto the stack and copy the memory
1570 arguments to the outgoing arguments address. We can pass TRUE
1571 as the 4th argument because we just saved the stack pointer
1572 and will restore it right after the call. */
1573 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1574
1575 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1576 may have already set current_function_calls_alloca to true.
1577 current_function_calls_alloca won't be set if argsize is zero,
1578 so we have to guarantee need_drap is true here. */
1579 if (SUPPORTS_STACK_ALIGNMENT)
1580 crtl->need_drap = true;
1581
1582 dest = virtual_outgoing_args_rtx;
1583 #ifndef STACK_GROWS_DOWNWARD
1584 if (CONST_INT_P (argsize))
1585 dest = plus_constant (dest, -INTVAL (argsize));
1586 else
1587 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1588 #endif
1589 dest = gen_rtx_MEM (BLKmode, dest);
1590 set_mem_align (dest, PARM_BOUNDARY);
1591 src = gen_rtx_MEM (BLKmode, incoming_args);
1592 set_mem_align (src, PARM_BOUNDARY);
1593 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1594
1595 /* Refer to the argument block. */
1596 apply_args_size ();
1597 arguments = gen_rtx_MEM (BLKmode, arguments);
1598 set_mem_align (arguments, PARM_BOUNDARY);
1599
1600 /* Walk past the arg-pointer and structure value address. */
1601 size = GET_MODE_SIZE (Pmode);
1602 if (struct_value)
1603 size += GET_MODE_SIZE (Pmode);
1604
1605 /* Restore each of the registers previously saved. Make USE insns
1606 for each of these registers for use in making the call. */
1607 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1608 if ((mode = apply_args_mode[regno]) != VOIDmode)
1609 {
1610 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1611 if (size % align != 0)
1612 size = CEIL (size, align) * align;
1613 reg = gen_rtx_REG (mode, regno);
1614 emit_move_insn (reg, adjust_address (arguments, mode, size));
1615 use_reg (&call_fusage, reg);
1616 size += GET_MODE_SIZE (mode);
1617 }
1618
1619 /* Restore the structure value address unless this is passed as an
1620 "invisible" first argument. */
1621 size = GET_MODE_SIZE (Pmode);
1622 if (struct_value)
1623 {
1624 rtx value = gen_reg_rtx (Pmode);
1625 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1626 emit_move_insn (struct_value, value);
1627 if (REG_P (struct_value))
1628 use_reg (&call_fusage, struct_value);
1629 size += GET_MODE_SIZE (Pmode);
1630 }
1631
1632 /* All arguments and registers used for the call are set up by now! */
1633 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1634
1635 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1636 and we don't want to load it into a register as an optimization,
1637 because prepare_call_address already did it if it should be done. */
1638 if (GET_CODE (function) != SYMBOL_REF)
1639 function = memory_address (FUNCTION_MODE, function);
1640
1641 /* Generate the actual call instruction and save the return value. */
1642 #ifdef HAVE_untyped_call
1643 if (HAVE_untyped_call)
1644 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1645 result, result_vector (1, result)));
1646 else
1647 #endif
1648 #ifdef HAVE_call_value
1649 if (HAVE_call_value)
1650 {
1651 rtx valreg = 0;
1652
1653 /* Locate the unique return register. It is not possible to
1654 express a call that sets more than one return register using
1655 call_value; use untyped_call for that. In fact, untyped_call
1656 only needs to save the return registers in the given block. */
1657 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1658 if ((mode = apply_result_mode[regno]) != VOIDmode)
1659 {
1660 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1661
1662 valreg = gen_rtx_REG (mode, regno);
1663 }
1664
1665 emit_call_insn (GEN_CALL_VALUE (valreg,
1666 gen_rtx_MEM (FUNCTION_MODE, function),
1667 const0_rtx, NULL_RTX, const0_rtx));
1668
1669 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1670 }
1671 else
1672 #endif
1673 gcc_unreachable ();
1674
1675 /* Find the CALL insn we just emitted, and attach the register usage
1676 information. */
1677 call_insn = last_call_insn ();
1678 add_function_usage_to (call_insn, call_fusage);
1679
1680 /* Restore the stack. */
1681 #ifdef HAVE_save_stack_nonlocal
1682 if (HAVE_save_stack_nonlocal)
1683 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1684 else
1685 #endif
1686 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1687
1688 OK_DEFER_POP;
1689
1690 /* Return the address of the result block. */
1691 result = copy_addr_to_reg (XEXP (result, 0));
1692 return convert_memory_address (ptr_mode, result);
1693 }
1694
1695 /* Perform an untyped return. */
1696
1697 static void
1698 expand_builtin_return (rtx result)
1699 {
1700 int size, align, regno;
1701 enum machine_mode mode;
1702 rtx reg;
1703 rtx call_fusage = 0;
1704
1705 result = convert_memory_address (Pmode, result);
1706
1707 apply_result_size ();
1708 result = gen_rtx_MEM (BLKmode, result);
1709
1710 #ifdef HAVE_untyped_return
1711 if (HAVE_untyped_return)
1712 {
1713 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1714 emit_barrier ();
1715 return;
1716 }
1717 #endif
1718
1719 /* Restore the return value and note that each value is used. */
1720 size = 0;
1721 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1722 if ((mode = apply_result_mode[regno]) != VOIDmode)
1723 {
1724 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1725 if (size % align != 0)
1726 size = CEIL (size, align) * align;
1727 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1728 emit_move_insn (reg, adjust_address (result, mode, size));
1729
1730 push_to_sequence (call_fusage);
1731 emit_use (reg);
1732 call_fusage = get_insns ();
1733 end_sequence ();
1734 size += GET_MODE_SIZE (mode);
1735 }
1736
1737 /* Put the USE insns before the return. */
1738 emit_insn (call_fusage);
1739
1740 /* Return whatever values was restored by jumping directly to the end
1741 of the function. */
1742 expand_naked_return ();
1743 }
1744
1745 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1746
1747 static enum type_class
1748 type_to_class (tree type)
1749 {
1750 switch (TREE_CODE (type))
1751 {
1752 case VOID_TYPE: return void_type_class;
1753 case INTEGER_TYPE: return integer_type_class;
1754 case ENUMERAL_TYPE: return enumeral_type_class;
1755 case BOOLEAN_TYPE: return boolean_type_class;
1756 case POINTER_TYPE: return pointer_type_class;
1757 case REFERENCE_TYPE: return reference_type_class;
1758 case OFFSET_TYPE: return offset_type_class;
1759 case REAL_TYPE: return real_type_class;
1760 case COMPLEX_TYPE: return complex_type_class;
1761 case FUNCTION_TYPE: return function_type_class;
1762 case METHOD_TYPE: return method_type_class;
1763 case RECORD_TYPE: return record_type_class;
1764 case UNION_TYPE:
1765 case QUAL_UNION_TYPE: return union_type_class;
1766 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1767 ? string_type_class : array_type_class);
1768 case LANG_TYPE: return lang_type_class;
1769 default: return no_type_class;
1770 }
1771 }
1772
1773 /* Expand a call EXP to __builtin_classify_type. */
1774
1775 static rtx
1776 expand_builtin_classify_type (tree exp)
1777 {
1778 if (call_expr_nargs (exp))
1779 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1780 return GEN_INT (no_type_class);
1781 }
1782
1783 /* This helper macro, meant to be used in mathfn_built_in below,
1784 determines which among a set of three builtin math functions is
1785 appropriate for a given type mode. The `F' and `L' cases are
1786 automatically generated from the `double' case. */
1787 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1788 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1789 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1790 fcodel = BUILT_IN_MATHFN##L ; break;
1791 /* Similar to above, but appends _R after any F/L suffix. */
1792 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1793 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1794 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1795 fcodel = BUILT_IN_MATHFN##L_R ; break;
1796
1797 /* Return mathematic function equivalent to FN but operating directly
1798 on TYPE, if available. If IMPLICIT is true find the function in
1799 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1800 can't do the conversion, return zero. */
1801
1802 static tree
1803 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1804 {
1805 tree const *const fn_arr
1806 = implicit ? implicit_built_in_decls : built_in_decls;
1807 enum built_in_function fcode, fcodef, fcodel;
1808
1809 switch (fn)
1810 {
1811 CASE_MATHFN (BUILT_IN_ACOS)
1812 CASE_MATHFN (BUILT_IN_ACOSH)
1813 CASE_MATHFN (BUILT_IN_ASIN)
1814 CASE_MATHFN (BUILT_IN_ASINH)
1815 CASE_MATHFN (BUILT_IN_ATAN)
1816 CASE_MATHFN (BUILT_IN_ATAN2)
1817 CASE_MATHFN (BUILT_IN_ATANH)
1818 CASE_MATHFN (BUILT_IN_CBRT)
1819 CASE_MATHFN (BUILT_IN_CEIL)
1820 CASE_MATHFN (BUILT_IN_CEXPI)
1821 CASE_MATHFN (BUILT_IN_COPYSIGN)
1822 CASE_MATHFN (BUILT_IN_COS)
1823 CASE_MATHFN (BUILT_IN_COSH)
1824 CASE_MATHFN (BUILT_IN_DREM)
1825 CASE_MATHFN (BUILT_IN_ERF)
1826 CASE_MATHFN (BUILT_IN_ERFC)
1827 CASE_MATHFN (BUILT_IN_EXP)
1828 CASE_MATHFN (BUILT_IN_EXP10)
1829 CASE_MATHFN (BUILT_IN_EXP2)
1830 CASE_MATHFN (BUILT_IN_EXPM1)
1831 CASE_MATHFN (BUILT_IN_FABS)
1832 CASE_MATHFN (BUILT_IN_FDIM)
1833 CASE_MATHFN (BUILT_IN_FLOOR)
1834 CASE_MATHFN (BUILT_IN_FMA)
1835 CASE_MATHFN (BUILT_IN_FMAX)
1836 CASE_MATHFN (BUILT_IN_FMIN)
1837 CASE_MATHFN (BUILT_IN_FMOD)
1838 CASE_MATHFN (BUILT_IN_FREXP)
1839 CASE_MATHFN (BUILT_IN_GAMMA)
1840 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1841 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1842 CASE_MATHFN (BUILT_IN_HYPOT)
1843 CASE_MATHFN (BUILT_IN_ILOGB)
1844 CASE_MATHFN (BUILT_IN_INF)
1845 CASE_MATHFN (BUILT_IN_ISINF)
1846 CASE_MATHFN (BUILT_IN_J0)
1847 CASE_MATHFN (BUILT_IN_J1)
1848 CASE_MATHFN (BUILT_IN_JN)
1849 CASE_MATHFN (BUILT_IN_LCEIL)
1850 CASE_MATHFN (BUILT_IN_LDEXP)
1851 CASE_MATHFN (BUILT_IN_LFLOOR)
1852 CASE_MATHFN (BUILT_IN_LGAMMA)
1853 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1854 CASE_MATHFN (BUILT_IN_LLCEIL)
1855 CASE_MATHFN (BUILT_IN_LLFLOOR)
1856 CASE_MATHFN (BUILT_IN_LLRINT)
1857 CASE_MATHFN (BUILT_IN_LLROUND)
1858 CASE_MATHFN (BUILT_IN_LOG)
1859 CASE_MATHFN (BUILT_IN_LOG10)
1860 CASE_MATHFN (BUILT_IN_LOG1P)
1861 CASE_MATHFN (BUILT_IN_LOG2)
1862 CASE_MATHFN (BUILT_IN_LOGB)
1863 CASE_MATHFN (BUILT_IN_LRINT)
1864 CASE_MATHFN (BUILT_IN_LROUND)
1865 CASE_MATHFN (BUILT_IN_MODF)
1866 CASE_MATHFN (BUILT_IN_NAN)
1867 CASE_MATHFN (BUILT_IN_NANS)
1868 CASE_MATHFN (BUILT_IN_NEARBYINT)
1869 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1870 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1871 CASE_MATHFN (BUILT_IN_POW)
1872 CASE_MATHFN (BUILT_IN_POWI)
1873 CASE_MATHFN (BUILT_IN_POW10)
1874 CASE_MATHFN (BUILT_IN_REMAINDER)
1875 CASE_MATHFN (BUILT_IN_REMQUO)
1876 CASE_MATHFN (BUILT_IN_RINT)
1877 CASE_MATHFN (BUILT_IN_ROUND)
1878 CASE_MATHFN (BUILT_IN_SCALB)
1879 CASE_MATHFN (BUILT_IN_SCALBLN)
1880 CASE_MATHFN (BUILT_IN_SCALBN)
1881 CASE_MATHFN (BUILT_IN_SIGNBIT)
1882 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1883 CASE_MATHFN (BUILT_IN_SIN)
1884 CASE_MATHFN (BUILT_IN_SINCOS)
1885 CASE_MATHFN (BUILT_IN_SINH)
1886 CASE_MATHFN (BUILT_IN_SQRT)
1887 CASE_MATHFN (BUILT_IN_TAN)
1888 CASE_MATHFN (BUILT_IN_TANH)
1889 CASE_MATHFN (BUILT_IN_TGAMMA)
1890 CASE_MATHFN (BUILT_IN_TRUNC)
1891 CASE_MATHFN (BUILT_IN_Y0)
1892 CASE_MATHFN (BUILT_IN_Y1)
1893 CASE_MATHFN (BUILT_IN_YN)
1894
1895 default:
1896 return NULL_TREE;
1897 }
1898
1899 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1900 return fn_arr[fcode];
1901 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1902 return fn_arr[fcodef];
1903 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1904 return fn_arr[fcodel];
1905 else
1906 return NULL_TREE;
1907 }
1908
1909 /* Like mathfn_built_in_1(), but always use the implicit array. */
1910
1911 tree
1912 mathfn_built_in (tree type, enum built_in_function fn)
1913 {
1914 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1915 }
1916
1917 /* If errno must be maintained, expand the RTL to check if the result,
1918 TARGET, of a built-in function call, EXP, is NaN, and if so set
1919 errno to EDOM. */
1920
1921 static void
1922 expand_errno_check (tree exp, rtx target)
1923 {
1924 rtx lab = gen_label_rtx ();
1925
1926 /* Test the result; if it is NaN, set errno=EDOM because
1927 the argument was not in the domain. */
1928 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1929 NULL_RTX, NULL_RTX, lab,
1930 /* The jump is very likely. */
1931 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1932
1933 #ifdef TARGET_EDOM
1934 /* If this built-in doesn't throw an exception, set errno directly. */
1935 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1936 {
1937 #ifdef GEN_ERRNO_RTX
1938 rtx errno_rtx = GEN_ERRNO_RTX;
1939 #else
1940 rtx errno_rtx
1941 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1942 #endif
1943 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1944 emit_label (lab);
1945 return;
1946 }
1947 #endif
1948
1949 /* Make sure the library call isn't expanded as a tail call. */
1950 CALL_EXPR_TAILCALL (exp) = 0;
1951
1952 /* We can't set errno=EDOM directly; let the library call do it.
1953 Pop the arguments right away in case the call gets deleted. */
1954 NO_DEFER_POP;
1955 expand_call (exp, target, 0);
1956 OK_DEFER_POP;
1957 emit_label (lab);
1958 }
1959
1960 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1961 Return NULL_RTX if a normal call should be emitted rather than expanding
1962 the function in-line. EXP is the expression that is a call to the builtin
1963 function; if convenient, the result should be placed in TARGET.
1964 SUBTARGET may be used as the target for computing one of EXP's operands. */
1965
1966 static rtx
1967 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1968 {
1969 optab builtin_optab;
1970 rtx op0, insns;
1971 tree fndecl = get_callee_fndecl (exp);
1972 enum machine_mode mode;
1973 bool errno_set = false;
1974 tree arg;
1975
1976 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1977 return NULL_RTX;
1978
1979 arg = CALL_EXPR_ARG (exp, 0);
1980
1981 switch (DECL_FUNCTION_CODE (fndecl))
1982 {
1983 CASE_FLT_FN (BUILT_IN_SQRT):
1984 errno_set = ! tree_expr_nonnegative_p (arg);
1985 builtin_optab = sqrt_optab;
1986 break;
1987 CASE_FLT_FN (BUILT_IN_EXP):
1988 errno_set = true; builtin_optab = exp_optab; break;
1989 CASE_FLT_FN (BUILT_IN_EXP10):
1990 CASE_FLT_FN (BUILT_IN_POW10):
1991 errno_set = true; builtin_optab = exp10_optab; break;
1992 CASE_FLT_FN (BUILT_IN_EXP2):
1993 errno_set = true; builtin_optab = exp2_optab; break;
1994 CASE_FLT_FN (BUILT_IN_EXPM1):
1995 errno_set = true; builtin_optab = expm1_optab; break;
1996 CASE_FLT_FN (BUILT_IN_LOGB):
1997 errno_set = true; builtin_optab = logb_optab; break;
1998 CASE_FLT_FN (BUILT_IN_LOG):
1999 errno_set = true; builtin_optab = log_optab; break;
2000 CASE_FLT_FN (BUILT_IN_LOG10):
2001 errno_set = true; builtin_optab = log10_optab; break;
2002 CASE_FLT_FN (BUILT_IN_LOG2):
2003 errno_set = true; builtin_optab = log2_optab; break;
2004 CASE_FLT_FN (BUILT_IN_LOG1P):
2005 errno_set = true; builtin_optab = log1p_optab; break;
2006 CASE_FLT_FN (BUILT_IN_ASIN):
2007 builtin_optab = asin_optab; break;
2008 CASE_FLT_FN (BUILT_IN_ACOS):
2009 builtin_optab = acos_optab; break;
2010 CASE_FLT_FN (BUILT_IN_TAN):
2011 builtin_optab = tan_optab; break;
2012 CASE_FLT_FN (BUILT_IN_ATAN):
2013 builtin_optab = atan_optab; break;
2014 CASE_FLT_FN (BUILT_IN_FLOOR):
2015 builtin_optab = floor_optab; break;
2016 CASE_FLT_FN (BUILT_IN_CEIL):
2017 builtin_optab = ceil_optab; break;
2018 CASE_FLT_FN (BUILT_IN_TRUNC):
2019 builtin_optab = btrunc_optab; break;
2020 CASE_FLT_FN (BUILT_IN_ROUND):
2021 builtin_optab = round_optab; break;
2022 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2023 builtin_optab = nearbyint_optab;
2024 if (flag_trapping_math)
2025 break;
2026 /* Else fallthrough and expand as rint. */
2027 CASE_FLT_FN (BUILT_IN_RINT):
2028 builtin_optab = rint_optab; break;
2029 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2030 builtin_optab = significand_optab; break;
2031 default:
2032 gcc_unreachable ();
2033 }
2034
2035 /* Make a suitable register to place result in. */
2036 mode = TYPE_MODE (TREE_TYPE (exp));
2037
2038 if (! flag_errno_math || ! HONOR_NANS (mode))
2039 errno_set = false;
2040
2041 /* Before working hard, check whether the instruction is available. */
2042 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2043 && (!errno_set || !optimize_insn_for_size_p ()))
2044 {
2045 target = gen_reg_rtx (mode);
2046
2047 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2048 need to expand the argument again. This way, we will not perform
2049 side-effects more the once. */
2050 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2051
2052 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2053
2054 start_sequence ();
2055
2056 /* Compute into TARGET.
2057 Set TARGET to wherever the result comes back. */
2058 target = expand_unop (mode, builtin_optab, op0, target, 0);
2059
2060 if (target != 0)
2061 {
2062 if (errno_set)
2063 expand_errno_check (exp, target);
2064
2065 /* Output the entire sequence. */
2066 insns = get_insns ();
2067 end_sequence ();
2068 emit_insn (insns);
2069 return target;
2070 }
2071
2072 /* If we were unable to expand via the builtin, stop the sequence
2073 (without outputting the insns) and call to the library function
2074 with the stabilized argument list. */
2075 end_sequence ();
2076 }
2077
2078 return expand_call (exp, target, target == const0_rtx);
2079 }
2080
2081 /* Expand a call to the builtin binary math functions (pow and atan2).
2082 Return NULL_RTX if a normal call should be emitted rather than expanding the
2083 function in-line. EXP is the expression that is a call to the builtin
2084 function; if convenient, the result should be placed in TARGET.
2085 SUBTARGET may be used as the target for computing one of EXP's
2086 operands. */
2087
2088 static rtx
2089 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2090 {
2091 optab builtin_optab;
2092 rtx op0, op1, insns;
2093 int op1_type = REAL_TYPE;
2094 tree fndecl = get_callee_fndecl (exp);
2095 tree arg0, arg1;
2096 enum machine_mode mode;
2097 bool errno_set = true;
2098
2099 switch (DECL_FUNCTION_CODE (fndecl))
2100 {
2101 CASE_FLT_FN (BUILT_IN_SCALBN):
2102 CASE_FLT_FN (BUILT_IN_SCALBLN):
2103 CASE_FLT_FN (BUILT_IN_LDEXP):
2104 op1_type = INTEGER_TYPE;
2105 default:
2106 break;
2107 }
2108
2109 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2110 return NULL_RTX;
2111
2112 arg0 = CALL_EXPR_ARG (exp, 0);
2113 arg1 = CALL_EXPR_ARG (exp, 1);
2114
2115 switch (DECL_FUNCTION_CODE (fndecl))
2116 {
2117 CASE_FLT_FN (BUILT_IN_POW):
2118 builtin_optab = pow_optab; break;
2119 CASE_FLT_FN (BUILT_IN_ATAN2):
2120 builtin_optab = atan2_optab; break;
2121 CASE_FLT_FN (BUILT_IN_SCALB):
2122 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2123 return 0;
2124 builtin_optab = scalb_optab; break;
2125 CASE_FLT_FN (BUILT_IN_SCALBN):
2126 CASE_FLT_FN (BUILT_IN_SCALBLN):
2127 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2128 return 0;
2129 /* Fall through... */
2130 CASE_FLT_FN (BUILT_IN_LDEXP):
2131 builtin_optab = ldexp_optab; break;
2132 CASE_FLT_FN (BUILT_IN_FMOD):
2133 builtin_optab = fmod_optab; break;
2134 CASE_FLT_FN (BUILT_IN_REMAINDER):
2135 CASE_FLT_FN (BUILT_IN_DREM):
2136 builtin_optab = remainder_optab; break;
2137 default:
2138 gcc_unreachable ();
2139 }
2140
2141 /* Make a suitable register to place result in. */
2142 mode = TYPE_MODE (TREE_TYPE (exp));
2143
2144 /* Before working hard, check whether the instruction is available. */
2145 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2146 return NULL_RTX;
2147
2148 target = gen_reg_rtx (mode);
2149
2150 if (! flag_errno_math || ! HONOR_NANS (mode))
2151 errno_set = false;
2152
2153 if (errno_set && optimize_insn_for_size_p ())
2154 return 0;
2155
2156 /* Always stabilize the argument list. */
2157 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2158 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2159
2160 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2161 op1 = expand_normal (arg1);
2162
2163 start_sequence ();
2164
2165 /* Compute into TARGET.
2166 Set TARGET to wherever the result comes back. */
2167 target = expand_binop (mode, builtin_optab, op0, op1,
2168 target, 0, OPTAB_DIRECT);
2169
2170 /* If we were unable to expand via the builtin, stop the sequence
2171 (without outputting the insns) and call to the library function
2172 with the stabilized argument list. */
2173 if (target == 0)
2174 {
2175 end_sequence ();
2176 return expand_call (exp, target, target == const0_rtx);
2177 }
2178
2179 if (errno_set)
2180 expand_errno_check (exp, target);
2181
2182 /* Output the entire sequence. */
2183 insns = get_insns ();
2184 end_sequence ();
2185 emit_insn (insns);
2186
2187 return target;
2188 }
2189
2190 /* Expand a call to the builtin trinary math functions (fma).
2191 Return NULL_RTX if a normal call should be emitted rather than expanding the
2192 function in-line. EXP is the expression that is a call to the builtin
2193 function; if convenient, the result should be placed in TARGET.
2194 SUBTARGET may be used as the target for computing one of EXP's
2195 operands. */
2196
2197 static rtx
2198 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2199 {
2200 optab builtin_optab;
2201 rtx op0, op1, op2, insns;
2202 tree fndecl = get_callee_fndecl (exp);
2203 tree arg0, arg1, arg2;
2204 enum machine_mode mode;
2205
2206 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2207 return NULL_RTX;
2208
2209 arg0 = CALL_EXPR_ARG (exp, 0);
2210 arg1 = CALL_EXPR_ARG (exp, 1);
2211 arg2 = CALL_EXPR_ARG (exp, 2);
2212
2213 switch (DECL_FUNCTION_CODE (fndecl))
2214 {
2215 CASE_FLT_FN (BUILT_IN_FMA):
2216 builtin_optab = fma_optab; break;
2217 default:
2218 gcc_unreachable ();
2219 }
2220
2221 /* Make a suitable register to place result in. */
2222 mode = TYPE_MODE (TREE_TYPE (exp));
2223
2224 /* Before working hard, check whether the instruction is available. */
2225 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2226 return NULL_RTX;
2227
2228 target = gen_reg_rtx (mode);
2229
2230 /* Always stabilize the argument list. */
2231 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2232 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2233 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2234
2235 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2236 op1 = expand_normal (arg1);
2237 op2 = expand_normal (arg2);
2238
2239 start_sequence ();
2240
2241 /* Compute into TARGET.
2242 Set TARGET to wherever the result comes back. */
2243 target = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2244 target, 0);
2245
2246 /* If we were unable to expand via the builtin, stop the sequence
2247 (without outputting the insns) and call to the library function
2248 with the stabilized argument list. */
2249 if (target == 0)
2250 {
2251 end_sequence ();
2252 return expand_call (exp, target, target == const0_rtx);
2253 }
2254
2255 /* Output the entire sequence. */
2256 insns = get_insns ();
2257 end_sequence ();
2258 emit_insn (insns);
2259
2260 return target;
2261 }
2262
2263 /* Expand a call to the builtin sin and cos math functions.
2264 Return NULL_RTX if a normal call should be emitted rather than expanding the
2265 function in-line. EXP is the expression that is a call to the builtin
2266 function; if convenient, the result should be placed in TARGET.
2267 SUBTARGET may be used as the target for computing one of EXP's
2268 operands. */
2269
2270 static rtx
2271 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2272 {
2273 optab builtin_optab;
2274 rtx op0, insns;
2275 tree fndecl = get_callee_fndecl (exp);
2276 enum machine_mode mode;
2277 tree arg;
2278
2279 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2280 return NULL_RTX;
2281
2282 arg = CALL_EXPR_ARG (exp, 0);
2283
2284 switch (DECL_FUNCTION_CODE (fndecl))
2285 {
2286 CASE_FLT_FN (BUILT_IN_SIN):
2287 CASE_FLT_FN (BUILT_IN_COS):
2288 builtin_optab = sincos_optab; break;
2289 default:
2290 gcc_unreachable ();
2291 }
2292
2293 /* Make a suitable register to place result in. */
2294 mode = TYPE_MODE (TREE_TYPE (exp));
2295
2296 /* Check if sincos insn is available, otherwise fallback
2297 to sin or cos insn. */
2298 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2299 switch (DECL_FUNCTION_CODE (fndecl))
2300 {
2301 CASE_FLT_FN (BUILT_IN_SIN):
2302 builtin_optab = sin_optab; break;
2303 CASE_FLT_FN (BUILT_IN_COS):
2304 builtin_optab = cos_optab; break;
2305 default:
2306 gcc_unreachable ();
2307 }
2308
2309 /* Before working hard, check whether the instruction is available. */
2310 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2311 {
2312 target = gen_reg_rtx (mode);
2313
2314 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2315 need to expand the argument again. This way, we will not perform
2316 side-effects more the once. */
2317 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2318
2319 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2320
2321 start_sequence ();
2322
2323 /* Compute into TARGET.
2324 Set TARGET to wherever the result comes back. */
2325 if (builtin_optab == sincos_optab)
2326 {
2327 int result;
2328
2329 switch (DECL_FUNCTION_CODE (fndecl))
2330 {
2331 CASE_FLT_FN (BUILT_IN_SIN):
2332 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2333 break;
2334 CASE_FLT_FN (BUILT_IN_COS):
2335 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2336 break;
2337 default:
2338 gcc_unreachable ();
2339 }
2340 gcc_assert (result);
2341 }
2342 else
2343 {
2344 target = expand_unop (mode, builtin_optab, op0, target, 0);
2345 }
2346
2347 if (target != 0)
2348 {
2349 /* Output the entire sequence. */
2350 insns = get_insns ();
2351 end_sequence ();
2352 emit_insn (insns);
2353 return target;
2354 }
2355
2356 /* If we were unable to expand via the builtin, stop the sequence
2357 (without outputting the insns) and call to the library function
2358 with the stabilized argument list. */
2359 end_sequence ();
2360 }
2361
2362 target = expand_call (exp, target, target == const0_rtx);
2363
2364 return target;
2365 }
2366
2367 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2368 return an RTL instruction code that implements the functionality.
2369 If that isn't possible or available return CODE_FOR_nothing. */
2370
2371 static enum insn_code
2372 interclass_mathfn_icode (tree arg, tree fndecl)
2373 {
2374 bool errno_set = false;
2375 optab builtin_optab = 0;
2376 enum machine_mode mode;
2377
2378 switch (DECL_FUNCTION_CODE (fndecl))
2379 {
2380 CASE_FLT_FN (BUILT_IN_ILOGB):
2381 errno_set = true; builtin_optab = ilogb_optab; break;
2382 CASE_FLT_FN (BUILT_IN_ISINF):
2383 builtin_optab = isinf_optab; break;
2384 case BUILT_IN_ISNORMAL:
2385 case BUILT_IN_ISFINITE:
2386 CASE_FLT_FN (BUILT_IN_FINITE):
2387 case BUILT_IN_FINITED32:
2388 case BUILT_IN_FINITED64:
2389 case BUILT_IN_FINITED128:
2390 case BUILT_IN_ISINFD32:
2391 case BUILT_IN_ISINFD64:
2392 case BUILT_IN_ISINFD128:
2393 /* These builtins have no optabs (yet). */
2394 break;
2395 default:
2396 gcc_unreachable ();
2397 }
2398
2399 /* There's no easy way to detect the case we need to set EDOM. */
2400 if (flag_errno_math && errno_set)
2401 return CODE_FOR_nothing;
2402
2403 /* Optab mode depends on the mode of the input argument. */
2404 mode = TYPE_MODE (TREE_TYPE (arg));
2405
2406 if (builtin_optab)
2407 return optab_handler (builtin_optab, mode);
2408 return CODE_FOR_nothing;
2409 }
2410
2411 /* Expand a call to one of the builtin math functions that operate on
2412 floating point argument and output an integer result (ilogb, isinf,
2413 isnan, etc).
2414 Return 0 if a normal call should be emitted rather than expanding the
2415 function in-line. EXP is the expression that is a call to the builtin
2416 function; if convenient, the result should be placed in TARGET. */
2417
2418 static rtx
2419 expand_builtin_interclass_mathfn (tree exp, rtx target)
2420 {
2421 enum insn_code icode = CODE_FOR_nothing;
2422 rtx op0;
2423 tree fndecl = get_callee_fndecl (exp);
2424 enum machine_mode mode;
2425 tree arg;
2426
2427 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2428 return NULL_RTX;
2429
2430 arg = CALL_EXPR_ARG (exp, 0);
2431 icode = interclass_mathfn_icode (arg, fndecl);
2432 mode = TYPE_MODE (TREE_TYPE (arg));
2433
2434 if (icode != CODE_FOR_nothing)
2435 {
2436 rtx last = get_last_insn ();
2437 tree orig_arg = arg;
2438 /* Make a suitable register to place result in. */
2439 if (!target
2440 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))
2441 || !insn_data[icode].operand[0].predicate (target, GET_MODE (target)))
2442 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2443
2444 gcc_assert (insn_data[icode].operand[0].predicate
2445 (target, GET_MODE (target)));
2446
2447 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2448 need to expand the argument again. This way, we will not perform
2449 side-effects more the once. */
2450 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2451
2452 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2453
2454 if (mode != GET_MODE (op0))
2455 op0 = convert_to_mode (mode, op0, 0);
2456
2457 /* Compute into TARGET.
2458 Set TARGET to wherever the result comes back. */
2459 if (maybe_emit_unop_insn (icode, target, op0, UNKNOWN))
2460 return target;
2461 delete_insns_since (last);
2462 CALL_EXPR_ARG (exp, 0) = orig_arg;
2463 }
2464
2465 return NULL_RTX;
2466 }
2467
2468 /* Expand a call to the builtin sincos math function.
2469 Return NULL_RTX if a normal call should be emitted rather than expanding the
2470 function in-line. EXP is the expression that is a call to the builtin
2471 function. */
2472
2473 static rtx
2474 expand_builtin_sincos (tree exp)
2475 {
2476 rtx op0, op1, op2, target1, target2;
2477 enum machine_mode mode;
2478 tree arg, sinp, cosp;
2479 int result;
2480 location_t loc = EXPR_LOCATION (exp);
2481 tree alias_type, alias_off;
2482
2483 if (!validate_arglist (exp, REAL_TYPE,
2484 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2485 return NULL_RTX;
2486
2487 arg = CALL_EXPR_ARG (exp, 0);
2488 sinp = CALL_EXPR_ARG (exp, 1);
2489 cosp = CALL_EXPR_ARG (exp, 2);
2490
2491 /* Make a suitable register to place result in. */
2492 mode = TYPE_MODE (TREE_TYPE (arg));
2493
2494 /* Check if sincos insn is available, otherwise emit the call. */
2495 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2496 return NULL_RTX;
2497
2498 target1 = gen_reg_rtx (mode);
2499 target2 = gen_reg_rtx (mode);
2500
2501 op0 = expand_normal (arg);
2502 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2503 alias_off = build_int_cst (alias_type, 0);
2504 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2505 sinp, alias_off));
2506 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2507 cosp, alias_off));
2508
2509 /* Compute into target1 and target2.
2510 Set TARGET to wherever the result comes back. */
2511 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2512 gcc_assert (result);
2513
2514 /* Move target1 and target2 to the memory locations indicated
2515 by op1 and op2. */
2516 emit_move_insn (op1, target1);
2517 emit_move_insn (op2, target2);
2518
2519 return const0_rtx;
2520 }
2521
2522 /* Expand a call to the internal cexpi builtin to the sincos math function.
2523 EXP is the expression that is a call to the builtin function; if convenient,
2524 the result should be placed in TARGET. */
2525
2526 static rtx
2527 expand_builtin_cexpi (tree exp, rtx target)
2528 {
2529 tree fndecl = get_callee_fndecl (exp);
2530 tree arg, type;
2531 enum machine_mode mode;
2532 rtx op0, op1, op2;
2533 location_t loc = EXPR_LOCATION (exp);
2534
2535 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2536 return NULL_RTX;
2537
2538 arg = CALL_EXPR_ARG (exp, 0);
2539 type = TREE_TYPE (arg);
2540 mode = TYPE_MODE (TREE_TYPE (arg));
2541
2542 /* Try expanding via a sincos optab, fall back to emitting a libcall
2543 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2544 is only generated from sincos, cexp or if we have either of them. */
2545 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2546 {
2547 op1 = gen_reg_rtx (mode);
2548 op2 = gen_reg_rtx (mode);
2549
2550 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2551
2552 /* Compute into op1 and op2. */
2553 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2554 }
2555 else if (TARGET_HAS_SINCOS)
2556 {
2557 tree call, fn = NULL_TREE;
2558 tree top1, top2;
2559 rtx op1a, op2a;
2560
2561 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2562 fn = built_in_decls[BUILT_IN_SINCOSF];
2563 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2564 fn = built_in_decls[BUILT_IN_SINCOS];
2565 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2566 fn = built_in_decls[BUILT_IN_SINCOSL];
2567 else
2568 gcc_unreachable ();
2569
2570 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2571 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2572 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2573 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2574 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2575 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2576
2577 /* Make sure not to fold the sincos call again. */
2578 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2579 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2580 call, 3, arg, top1, top2));
2581 }
2582 else
2583 {
2584 tree call, fn = NULL_TREE, narg;
2585 tree ctype = build_complex_type (type);
2586
2587 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2588 fn = built_in_decls[BUILT_IN_CEXPF];
2589 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2590 fn = built_in_decls[BUILT_IN_CEXP];
2591 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2592 fn = built_in_decls[BUILT_IN_CEXPL];
2593 else
2594 gcc_unreachable ();
2595
2596 /* If we don't have a decl for cexp create one. This is the
2597 friendliest fallback if the user calls __builtin_cexpi
2598 without full target C99 function support. */
2599 if (fn == NULL_TREE)
2600 {
2601 tree fntype;
2602 const char *name = NULL;
2603
2604 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2605 name = "cexpf";
2606 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2607 name = "cexp";
2608 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2609 name = "cexpl";
2610
2611 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2612 fn = build_fn_decl (name, fntype);
2613 }
2614
2615 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2616 build_real (type, dconst0), arg);
2617
2618 /* Make sure not to fold the cexp call again. */
2619 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2620 return expand_expr (build_call_nary (ctype, call, 1, narg),
2621 target, VOIDmode, EXPAND_NORMAL);
2622 }
2623
2624 /* Now build the proper return type. */
2625 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2626 make_tree (TREE_TYPE (arg), op2),
2627 make_tree (TREE_TYPE (arg), op1)),
2628 target, VOIDmode, EXPAND_NORMAL);
2629 }
2630
2631 /* Conveniently construct a function call expression. FNDECL names the
2632 function to be called, N is the number of arguments, and the "..."
2633 parameters are the argument expressions. Unlike build_call_exr
2634 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2635
2636 static tree
2637 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2638 {
2639 va_list ap;
2640 tree fntype = TREE_TYPE (fndecl);
2641 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2642
2643 va_start (ap, n);
2644 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2645 va_end (ap);
2646 SET_EXPR_LOCATION (fn, loc);
2647 return fn;
2648 }
2649
2650 /* Expand a call to one of the builtin rounding functions gcc defines
2651 as an extension (lfloor and lceil). As these are gcc extensions we
2652 do not need to worry about setting errno to EDOM.
2653 If expanding via optab fails, lower expression to (int)(floor(x)).
2654 EXP is the expression that is a call to the builtin function;
2655 if convenient, the result should be placed in TARGET. */
2656
2657 static rtx
2658 expand_builtin_int_roundingfn (tree exp, rtx target)
2659 {
2660 convert_optab builtin_optab;
2661 rtx op0, insns, tmp;
2662 tree fndecl = get_callee_fndecl (exp);
2663 enum built_in_function fallback_fn;
2664 tree fallback_fndecl;
2665 enum machine_mode mode;
2666 tree arg;
2667
2668 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2669 gcc_unreachable ();
2670
2671 arg = CALL_EXPR_ARG (exp, 0);
2672
2673 switch (DECL_FUNCTION_CODE (fndecl))
2674 {
2675 CASE_FLT_FN (BUILT_IN_LCEIL):
2676 CASE_FLT_FN (BUILT_IN_LLCEIL):
2677 builtin_optab = lceil_optab;
2678 fallback_fn = BUILT_IN_CEIL;
2679 break;
2680
2681 CASE_FLT_FN (BUILT_IN_LFLOOR):
2682 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2683 builtin_optab = lfloor_optab;
2684 fallback_fn = BUILT_IN_FLOOR;
2685 break;
2686
2687 default:
2688 gcc_unreachable ();
2689 }
2690
2691 /* Make a suitable register to place result in. */
2692 mode = TYPE_MODE (TREE_TYPE (exp));
2693
2694 target = gen_reg_rtx (mode);
2695
2696 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2697 need to expand the argument again. This way, we will not perform
2698 side-effects more the once. */
2699 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2700
2701 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2702
2703 start_sequence ();
2704
2705 /* Compute into TARGET. */
2706 if (expand_sfix_optab (target, op0, builtin_optab))
2707 {
2708 /* Output the entire sequence. */
2709 insns = get_insns ();
2710 end_sequence ();
2711 emit_insn (insns);
2712 return target;
2713 }
2714
2715 /* If we were unable to expand via the builtin, stop the sequence
2716 (without outputting the insns). */
2717 end_sequence ();
2718
2719 /* Fall back to floating point rounding optab. */
2720 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2721
2722 /* For non-C99 targets we may end up without a fallback fndecl here
2723 if the user called __builtin_lfloor directly. In this case emit
2724 a call to the floor/ceil variants nevertheless. This should result
2725 in the best user experience for not full C99 targets. */
2726 if (fallback_fndecl == NULL_TREE)
2727 {
2728 tree fntype;
2729 const char *name = NULL;
2730
2731 switch (DECL_FUNCTION_CODE (fndecl))
2732 {
2733 case BUILT_IN_LCEIL:
2734 case BUILT_IN_LLCEIL:
2735 name = "ceil";
2736 break;
2737 case BUILT_IN_LCEILF:
2738 case BUILT_IN_LLCEILF:
2739 name = "ceilf";
2740 break;
2741 case BUILT_IN_LCEILL:
2742 case BUILT_IN_LLCEILL:
2743 name = "ceill";
2744 break;
2745 case BUILT_IN_LFLOOR:
2746 case BUILT_IN_LLFLOOR:
2747 name = "floor";
2748 break;
2749 case BUILT_IN_LFLOORF:
2750 case BUILT_IN_LLFLOORF:
2751 name = "floorf";
2752 break;
2753 case BUILT_IN_LFLOORL:
2754 case BUILT_IN_LLFLOORL:
2755 name = "floorl";
2756 break;
2757 default:
2758 gcc_unreachable ();
2759 }
2760
2761 fntype = build_function_type_list (TREE_TYPE (arg),
2762 TREE_TYPE (arg), NULL_TREE);
2763 fallback_fndecl = build_fn_decl (name, fntype);
2764 }
2765
2766 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2767
2768 tmp = expand_normal (exp);
2769
2770 /* Truncate the result of floating point optab to integer
2771 via expand_fix (). */
2772 target = gen_reg_rtx (mode);
2773 expand_fix (target, tmp, 0);
2774
2775 return target;
2776 }
2777
2778 /* Expand a call to one of the builtin math functions doing integer
2779 conversion (lrint).
2780 Return 0 if a normal call should be emitted rather than expanding the
2781 function in-line. EXP is the expression that is a call to the builtin
2782 function; if convenient, the result should be placed in TARGET. */
2783
2784 static rtx
2785 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2786 {
2787 convert_optab builtin_optab;
2788 rtx op0, insns;
2789 tree fndecl = get_callee_fndecl (exp);
2790 tree arg;
2791 enum machine_mode mode;
2792
2793 /* There's no easy way to detect the case we need to set EDOM. */
2794 if (flag_errno_math)
2795 return NULL_RTX;
2796
2797 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2798 gcc_unreachable ();
2799
2800 arg = CALL_EXPR_ARG (exp, 0);
2801
2802 switch (DECL_FUNCTION_CODE (fndecl))
2803 {
2804 CASE_FLT_FN (BUILT_IN_LRINT):
2805 CASE_FLT_FN (BUILT_IN_LLRINT):
2806 builtin_optab = lrint_optab; break;
2807 CASE_FLT_FN (BUILT_IN_LROUND):
2808 CASE_FLT_FN (BUILT_IN_LLROUND):
2809 builtin_optab = lround_optab; break;
2810 default:
2811 gcc_unreachable ();
2812 }
2813
2814 /* Make a suitable register to place result in. */
2815 mode = TYPE_MODE (TREE_TYPE (exp));
2816
2817 target = gen_reg_rtx (mode);
2818
2819 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2820 need to expand the argument again. This way, we will not perform
2821 side-effects more the once. */
2822 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2823
2824 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2825
2826 start_sequence ();
2827
2828 if (expand_sfix_optab (target, op0, builtin_optab))
2829 {
2830 /* Output the entire sequence. */
2831 insns = get_insns ();
2832 end_sequence ();
2833 emit_insn (insns);
2834 return target;
2835 }
2836
2837 /* If we were unable to expand via the builtin, stop the sequence
2838 (without outputting the insns) and call to the library function
2839 with the stabilized argument list. */
2840 end_sequence ();
2841
2842 target = expand_call (exp, target, target == const0_rtx);
2843
2844 return target;
2845 }
2846
2847 /* To evaluate powi(x,n), the floating point value x raised to the
2848 constant integer exponent n, we use a hybrid algorithm that
2849 combines the "window method" with look-up tables. For an
2850 introduction to exponentiation algorithms and "addition chains",
2851 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2852 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2853 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2854 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2855
2856 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2857 multiplications to inline before calling the system library's pow
2858 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2859 so this default never requires calling pow, powf or powl. */
2860
2861 #ifndef POWI_MAX_MULTS
2862 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2863 #endif
2864
2865 /* The size of the "optimal power tree" lookup table. All
2866 exponents less than this value are simply looked up in the
2867 powi_table below. This threshold is also used to size the
2868 cache of pseudo registers that hold intermediate results. */
2869 #define POWI_TABLE_SIZE 256
2870
2871 /* The size, in bits of the window, used in the "window method"
2872 exponentiation algorithm. This is equivalent to a radix of
2873 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2874 #define POWI_WINDOW_SIZE 3
2875
2876 /* The following table is an efficient representation of an
2877 "optimal power tree". For each value, i, the corresponding
2878 value, j, in the table states than an optimal evaluation
2879 sequence for calculating pow(x,i) can be found by evaluating
2880 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2881 100 integers is given in Knuth's "Seminumerical algorithms". */
2882
2883 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2884 {
2885 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2886 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2887 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2888 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2889 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2890 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2891 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2892 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2893 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2894 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2895 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2896 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2897 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2898 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2899 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2900 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2901 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2902 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2903 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2904 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2905 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2906 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2907 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2908 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2909 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2910 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2911 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2912 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2913 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2914 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2915 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2916 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2917 };
2918
2919
2920 /* Return the number of multiplications required to calculate
2921 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2922 subroutine of powi_cost. CACHE is an array indicating
2923 which exponents have already been calculated. */
2924
2925 static int
2926 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2927 {
2928 /* If we've already calculated this exponent, then this evaluation
2929 doesn't require any additional multiplications. */
2930 if (cache[n])
2931 return 0;
2932
2933 cache[n] = true;
2934 return powi_lookup_cost (n - powi_table[n], cache)
2935 + powi_lookup_cost (powi_table[n], cache) + 1;
2936 }
2937
2938 /* Return the number of multiplications required to calculate
2939 powi(x,n) for an arbitrary x, given the exponent N. This
2940 function needs to be kept in sync with expand_powi below. */
2941
2942 static int
2943 powi_cost (HOST_WIDE_INT n)
2944 {
2945 bool cache[POWI_TABLE_SIZE];
2946 unsigned HOST_WIDE_INT digit;
2947 unsigned HOST_WIDE_INT val;
2948 int result;
2949
2950 if (n == 0)
2951 return 0;
2952
2953 /* Ignore the reciprocal when calculating the cost. */
2954 val = (n < 0) ? -n : n;
2955
2956 /* Initialize the exponent cache. */
2957 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2958 cache[1] = true;
2959
2960 result = 0;
2961
2962 while (val >= POWI_TABLE_SIZE)
2963 {
2964 if (val & 1)
2965 {
2966 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2967 result += powi_lookup_cost (digit, cache)
2968 + POWI_WINDOW_SIZE + 1;
2969 val >>= POWI_WINDOW_SIZE;
2970 }
2971 else
2972 {
2973 val >>= 1;
2974 result++;
2975 }
2976 }
2977
2978 return result + powi_lookup_cost (val, cache);
2979 }
2980
2981 /* Recursive subroutine of expand_powi. This function takes the array,
2982 CACHE, of already calculated exponents and an exponent N and returns
2983 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2984
2985 static rtx
2986 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2987 {
2988 unsigned HOST_WIDE_INT digit;
2989 rtx target, result;
2990 rtx op0, op1;
2991
2992 if (n < POWI_TABLE_SIZE)
2993 {
2994 if (cache[n])
2995 return cache[n];
2996
2997 target = gen_reg_rtx (mode);
2998 cache[n] = target;
2999
3000 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
3001 op1 = expand_powi_1 (mode, powi_table[n], cache);
3002 }
3003 else if (n & 1)
3004 {
3005 target = gen_reg_rtx (mode);
3006 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
3007 op0 = expand_powi_1 (mode, n - digit, cache);
3008 op1 = expand_powi_1 (mode, digit, cache);
3009 }
3010 else
3011 {
3012 target = gen_reg_rtx (mode);
3013 op0 = expand_powi_1 (mode, n >> 1, cache);
3014 op1 = op0;
3015 }
3016
3017 result = expand_mult (mode, op0, op1, target, 0);
3018 if (result != target)
3019 emit_move_insn (target, result);
3020 return target;
3021 }
3022
3023 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
3024 floating point operand in mode MODE, and N is the exponent. This
3025 function needs to be kept in sync with powi_cost above. */
3026
3027 static rtx
3028 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
3029 {
3030 rtx cache[POWI_TABLE_SIZE];
3031 rtx result;
3032
3033 if (n == 0)
3034 return CONST1_RTX (mode);
3035
3036 memset (cache, 0, sizeof (cache));
3037 cache[1] = x;
3038
3039 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
3040
3041 /* If the original exponent was negative, reciprocate the result. */
3042 if (n < 0)
3043 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3044 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3045
3046 return result;
3047 }
3048
3049 /* Fold a builtin function call to pow, powf, or powl into a series of sqrts or
3050 cbrts. Return NULL_RTX if no simplification can be made or expand the tree
3051 if we can simplify it. */
3052 static rtx
3053 expand_builtin_pow_root (location_t loc, tree arg0, tree arg1, tree type,
3054 rtx subtarget)
3055 {
3056 if (TREE_CODE (arg1) == REAL_CST
3057 && !TREE_OVERFLOW (arg1)
3058 && flag_unsafe_math_optimizations)
3059 {
3060 enum machine_mode mode = TYPE_MODE (type);
3061 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
3062 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
3063 REAL_VALUE_TYPE c = TREE_REAL_CST (arg1);
3064 tree op = NULL_TREE;
3065
3066 if (sqrtfn)
3067 {
3068 /* Optimize pow (x, 0.5) into sqrt. */
3069 if (REAL_VALUES_EQUAL (c, dconsthalf))
3070 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3071
3072 else
3073 {
3074 REAL_VALUE_TYPE dconst1_4 = dconst1;
3075 REAL_VALUE_TYPE dconst3_4;
3076 SET_REAL_EXP (&dconst1_4, REAL_EXP (&dconst1_4) - 2);
3077
3078 real_from_integer (&dconst3_4, VOIDmode, 3, 0, 0);
3079 SET_REAL_EXP (&dconst3_4, REAL_EXP (&dconst3_4) - 2);
3080
3081 /* Optimize pow (x, 0.25) into sqrt (sqrt (x)). Assume on most
3082 machines that a builtin sqrt instruction is smaller than a
3083 call to pow with 0.25, so do this optimization even if
3084 -Os. */
3085 if (REAL_VALUES_EQUAL (c, dconst1_4))
3086 {
3087 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3088 op = build_call_nofold_loc (loc, sqrtfn, 1, op);
3089 }
3090
3091 /* Optimize pow (x, 0.75) = sqrt (x) * sqrt (sqrt (x)) unless we
3092 are optimizing for space. */
3093 else if (optimize_insn_for_speed_p ()
3094 && !TREE_SIDE_EFFECTS (arg0)
3095 && REAL_VALUES_EQUAL (c, dconst3_4))
3096 {
3097 tree sqrt1 = build_call_expr_loc (loc, sqrtfn, 1, arg0);
3098 tree sqrt2 = builtin_save_expr (sqrt1);
3099 tree sqrt3 = build_call_expr_loc (loc, sqrtfn, 1, sqrt1);
3100 op = fold_build2_loc (loc, MULT_EXPR, type, sqrt2, sqrt3);
3101 }
3102 }
3103 }
3104
3105 /* Check whether we can do cbrt insstead of pow (x, 1./3.) and
3106 cbrt/sqrts instead of pow (x, 1./6.). */
3107 if (cbrtfn && ! op
3108 && (tree_expr_nonnegative_p (arg0) || !HONOR_NANS (mode)))
3109 {
3110 /* First try 1/3. */
3111 REAL_VALUE_TYPE dconst1_3
3112 = real_value_truncate (mode, dconst_third ());
3113
3114 if (REAL_VALUES_EQUAL (c, dconst1_3))
3115 op = build_call_nofold_loc (loc, cbrtfn, 1, arg0);
3116
3117 /* Now try 1/6. */
3118 else if (optimize_insn_for_speed_p ())
3119 {
3120 REAL_VALUE_TYPE dconst1_6 = dconst1_3;
3121 SET_REAL_EXP (&dconst1_6, REAL_EXP (&dconst1_6) - 1);
3122
3123 if (REAL_VALUES_EQUAL (c, dconst1_6))
3124 {
3125 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3126 op = build_call_nofold_loc (loc, cbrtfn, 1, op);
3127 }
3128 }
3129 }
3130
3131 if (op)
3132 return expand_expr (op, subtarget, mode, EXPAND_NORMAL);
3133 }
3134
3135 return NULL_RTX;
3136 }
3137
3138 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
3139 a normal call should be emitted rather than expanding the function
3140 in-line. EXP is the expression that is a call to the builtin
3141 function; if convenient, the result should be placed in TARGET. */
3142
3143 static rtx
3144 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
3145 {
3146 tree arg0, arg1;
3147 tree fn, narg0;
3148 tree type = TREE_TYPE (exp);
3149 REAL_VALUE_TYPE cint, c, c2;
3150 HOST_WIDE_INT n;
3151 rtx op, op2;
3152 enum machine_mode mode = TYPE_MODE (type);
3153
3154 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
3155 return NULL_RTX;
3156
3157 arg0 = CALL_EXPR_ARG (exp, 0);
3158 arg1 = CALL_EXPR_ARG (exp, 1);
3159
3160 if (TREE_CODE (arg1) != REAL_CST
3161 || TREE_OVERFLOW (arg1))
3162 return expand_builtin_mathfn_2 (exp, target, subtarget);
3163
3164 /* Handle constant exponents. */
3165
3166 /* For integer valued exponents we can expand to an optimal multiplication
3167 sequence using expand_powi. */
3168 c = TREE_REAL_CST (arg1);
3169 n = real_to_integer (&c);
3170 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3171 if (real_identical (&c, &cint)
3172 && ((n >= -1 && n <= 2)
3173 || (flag_unsafe_math_optimizations
3174 && optimize_insn_for_speed_p ()
3175 && powi_cost (n) <= POWI_MAX_MULTS)))
3176 {
3177 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3178 if (n != 1)
3179 {
3180 op = force_reg (mode, op);
3181 op = expand_powi (op, mode, n);
3182 }
3183 return op;
3184 }
3185
3186 narg0 = builtin_save_expr (arg0);
3187
3188 /* If the exponent is not integer valued, check if it is half of an integer.
3189 In this case we can expand to sqrt (x) * x**(n/2). */
3190 fn = mathfn_built_in (type, BUILT_IN_SQRT);
3191 if (fn != NULL_TREE)
3192 {
3193 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
3194 n = real_to_integer (&c2);
3195 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3196 if (real_identical (&c2, &cint)
3197 && ((flag_unsafe_math_optimizations
3198 && optimize_insn_for_speed_p ()
3199 && powi_cost (n/2) <= POWI_MAX_MULTS)
3200 /* Even the c == 0.5 case cannot be done unconditionally
3201 when we need to preserve signed zeros, as
3202 pow (-0, 0.5) is +0, while sqrt(-0) is -0. */
3203 || (!HONOR_SIGNED_ZEROS (mode) && n == 1)
3204 /* For c == 1.5 we can assume that x * sqrt (x) is always
3205 smaller than pow (x, 1.5) if sqrt will not be expanded
3206 as a call. */
3207 || (n == 3
3208 && optab_handler (sqrt_optab, mode) != CODE_FOR_nothing)))
3209 {
3210 tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
3211 narg0);
3212 /* Use expand_expr in case the newly built call expression
3213 was folded to a non-call. */
3214 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3215 if (n != 1)
3216 {
3217 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3218 op2 = force_reg (mode, op2);
3219 op2 = expand_powi (op2, mode, abs (n / 2));
3220 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3221 0, OPTAB_LIB_WIDEN);
3222 /* If the original exponent was negative, reciprocate the
3223 result. */
3224 if (n < 0)
3225 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3226 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3227 }
3228 return op;
3229 }
3230 }
3231
3232 /* Check whether we can do a series of sqrt or cbrt's instead of the pow
3233 call. */
3234 op = expand_builtin_pow_root (EXPR_LOCATION (exp), arg0, arg1, type,
3235 subtarget);
3236 if (op)
3237 return op;
3238
3239 /* Try if the exponent is a third of an integer. In this case
3240 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3241 different from pow (x, 1./3.) due to rounding and behavior
3242 with negative x we need to constrain this transformation to
3243 unsafe math and positive x or finite math. */
3244 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3245 if (fn != NULL_TREE
3246 && flag_unsafe_math_optimizations
3247 && (tree_expr_nonnegative_p (arg0)
3248 || !HONOR_NANS (mode)))
3249 {
3250 REAL_VALUE_TYPE dconst3;
3251 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3252 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3253 real_round (&c2, mode, &c2);
3254 n = real_to_integer (&c2);
3255 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3256 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3257 real_convert (&c2, mode, &c2);
3258 if (real_identical (&c2, &c)
3259 && ((optimize_insn_for_speed_p ()
3260 && powi_cost (n/3) <= POWI_MAX_MULTS)
3261 || n == 1))
3262 {
3263 tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
3264 narg0);
3265 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3266 if (abs (n) % 3 == 2)
3267 op = expand_simple_binop (mode, MULT, op, op, op,
3268 0, OPTAB_LIB_WIDEN);
3269 if (n != 1)
3270 {
3271 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3272 op2 = force_reg (mode, op2);
3273 op2 = expand_powi (op2, mode, abs (n / 3));
3274 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3275 0, OPTAB_LIB_WIDEN);
3276 /* If the original exponent was negative, reciprocate the
3277 result. */
3278 if (n < 0)
3279 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3280 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3281 }
3282 return op;
3283 }
3284 }
3285
3286 /* Fall back to optab expansion. */
3287 return expand_builtin_mathfn_2 (exp, target, subtarget);
3288 }
3289
3290 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3291 a normal call should be emitted rather than expanding the function
3292 in-line. EXP is the expression that is a call to the builtin
3293 function; if convenient, the result should be placed in TARGET. */
3294
3295 static rtx
3296 expand_builtin_powi (tree exp, rtx target)
3297 {
3298 tree arg0, arg1;
3299 rtx op0, op1;
3300 enum machine_mode mode;
3301 enum machine_mode mode2;
3302
3303 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3304 return NULL_RTX;
3305
3306 arg0 = CALL_EXPR_ARG (exp, 0);
3307 arg1 = CALL_EXPR_ARG (exp, 1);
3308 mode = TYPE_MODE (TREE_TYPE (exp));
3309
3310 /* Handle constant power. */
3311
3312 if (TREE_CODE (arg1) == INTEGER_CST
3313 && !TREE_OVERFLOW (arg1))
3314 {
3315 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3316
3317 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3318 Otherwise, check the number of multiplications required. */
3319 if ((TREE_INT_CST_HIGH (arg1) == 0
3320 || TREE_INT_CST_HIGH (arg1) == -1)
3321 && ((n >= -1 && n <= 2)
3322 || (optimize_insn_for_speed_p ()
3323 && powi_cost (n) <= POWI_MAX_MULTS)))
3324 {
3325 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
3326 op0 = force_reg (mode, op0);
3327 return expand_powi (op0, mode, n);
3328 }
3329 }
3330
3331 /* Emit a libcall to libgcc. */
3332
3333 /* Mode of the 2nd argument must match that of an int. */
3334 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3335
3336 if (target == NULL_RTX)
3337 target = gen_reg_rtx (mode);
3338
3339 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
3340 if (GET_MODE (op0) != mode)
3341 op0 = convert_to_mode (mode, op0, 0);
3342 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3343 if (GET_MODE (op1) != mode2)
3344 op1 = convert_to_mode (mode2, op1, 0);
3345
3346 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3347 target, LCT_CONST, mode, 2,
3348 op0, mode, op1, mode2);
3349
3350 return target;
3351 }
3352
3353 /* Expand expression EXP which is a call to the strlen builtin. Return
3354 NULL_RTX if we failed the caller should emit a normal call, otherwise
3355 try to get the result in TARGET, if convenient. */
3356
3357 static rtx
3358 expand_builtin_strlen (tree exp, rtx target,
3359 enum machine_mode target_mode)
3360 {
3361 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3362 return NULL_RTX;
3363 else
3364 {
3365 rtx pat;
3366 tree len;
3367 tree src = CALL_EXPR_ARG (exp, 0);
3368 rtx result, src_reg, char_rtx, before_strlen;
3369 enum machine_mode insn_mode = target_mode, char_mode;
3370 enum insn_code icode = CODE_FOR_nothing;
3371 unsigned int align;
3372
3373 /* If the length can be computed at compile-time, return it. */
3374 len = c_strlen (src, 0);
3375 if (len)
3376 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3377
3378 /* If the length can be computed at compile-time and is constant
3379 integer, but there are side-effects in src, evaluate
3380 src for side-effects, then return len.
3381 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3382 can be optimized into: i++; x = 3; */
3383 len = c_strlen (src, 1);
3384 if (len && TREE_CODE (len) == INTEGER_CST)
3385 {
3386 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3387 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3388 }
3389
3390 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3391
3392 /* If SRC is not a pointer type, don't do this operation inline. */
3393 if (align == 0)
3394 return NULL_RTX;
3395
3396 /* Bail out if we can't compute strlen in the right mode. */
3397 while (insn_mode != VOIDmode)
3398 {
3399 icode = optab_handler (strlen_optab, insn_mode);
3400 if (icode != CODE_FOR_nothing)
3401 break;
3402
3403 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3404 }
3405 if (insn_mode == VOIDmode)
3406 return NULL_RTX;
3407
3408 /* Make a place to write the result of the instruction. */
3409 result = target;
3410 if (! (result != 0
3411 && REG_P (result)
3412 && GET_MODE (result) == insn_mode
3413 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3414 result = gen_reg_rtx (insn_mode);
3415
3416 /* Make a place to hold the source address. We will not expand
3417 the actual source until we are sure that the expansion will
3418 not fail -- there are trees that cannot be expanded twice. */
3419 src_reg = gen_reg_rtx (Pmode);
3420
3421 /* Mark the beginning of the strlen sequence so we can emit the
3422 source operand later. */
3423 before_strlen = get_last_insn ();
3424
3425 char_rtx = const0_rtx;
3426 char_mode = insn_data[(int) icode].operand[2].mode;
3427 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3428 char_mode))
3429 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3430
3431 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3432 char_rtx, GEN_INT (align));
3433 if (! pat)
3434 return NULL_RTX;
3435 emit_insn (pat);
3436
3437 /* Now that we are assured of success, expand the source. */
3438 start_sequence ();
3439 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3440 if (pat != src_reg)
3441 emit_move_insn (src_reg, pat);
3442 pat = get_insns ();
3443 end_sequence ();
3444
3445 if (before_strlen)
3446 emit_insn_after (pat, before_strlen);
3447 else
3448 emit_insn_before (pat, get_insns ());
3449
3450 /* Return the value in the proper mode for this function. */
3451 if (GET_MODE (result) == target_mode)
3452 target = result;
3453 else if (target != 0)
3454 convert_move (target, result, 0);
3455 else
3456 target = convert_to_mode (target_mode, result, 0);
3457
3458 return target;
3459 }
3460 }
3461
3462 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3463 bytes from constant string DATA + OFFSET and return it as target
3464 constant. */
3465
3466 static rtx
3467 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3468 enum machine_mode mode)
3469 {
3470 const char *str = (const char *) data;
3471
3472 gcc_assert (offset >= 0
3473 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3474 <= strlen (str) + 1));
3475
3476 return c_readstr (str + offset, mode);
3477 }
3478
3479 /* Expand a call EXP to the memcpy builtin.
3480 Return NULL_RTX if we failed, the caller should emit a normal call,
3481 otherwise try to get the result in TARGET, if convenient (and in
3482 mode MODE if that's convenient). */
3483
3484 static rtx
3485 expand_builtin_memcpy (tree exp, rtx target)
3486 {
3487 if (!validate_arglist (exp,
3488 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3489 return NULL_RTX;
3490 else
3491 {
3492 tree dest = CALL_EXPR_ARG (exp, 0);
3493 tree src = CALL_EXPR_ARG (exp, 1);
3494 tree len = CALL_EXPR_ARG (exp, 2);
3495 const char *src_str;
3496 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3497 unsigned int dest_align
3498 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3499 rtx dest_mem, src_mem, dest_addr, len_rtx;
3500 HOST_WIDE_INT expected_size = -1;
3501 unsigned int expected_align = 0;
3502
3503 /* If DEST is not a pointer type, call the normal function. */
3504 if (dest_align == 0)
3505 return NULL_RTX;
3506
3507 /* If either SRC is not a pointer type, don't do this
3508 operation in-line. */
3509 if (src_align == 0)
3510 return NULL_RTX;
3511
3512 if (currently_expanding_gimple_stmt)
3513 stringop_block_profile (currently_expanding_gimple_stmt,
3514 &expected_align, &expected_size);
3515
3516 if (expected_align < dest_align)
3517 expected_align = dest_align;
3518 dest_mem = get_memory_rtx (dest, len);
3519 set_mem_align (dest_mem, dest_align);
3520 len_rtx = expand_normal (len);
3521 src_str = c_getstr (src);
3522
3523 /* If SRC is a string constant and block move would be done
3524 by pieces, we can avoid loading the string from memory
3525 and only stored the computed constants. */
3526 if (src_str
3527 && CONST_INT_P (len_rtx)
3528 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3529 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3530 CONST_CAST (char *, src_str),
3531 dest_align, false))
3532 {
3533 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3534 builtin_memcpy_read_str,
3535 CONST_CAST (char *, src_str),
3536 dest_align, false, 0);
3537 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3538 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3539 return dest_mem;
3540 }
3541
3542 src_mem = get_memory_rtx (src, len);
3543 set_mem_align (src_mem, src_align);
3544
3545 /* Copy word part most expediently. */
3546 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3547 CALL_EXPR_TAILCALL (exp)
3548 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3549 expected_align, expected_size);
3550
3551 if (dest_addr == 0)
3552 {
3553 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3554 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3555 }
3556 return dest_addr;
3557 }
3558 }
3559
3560 /* Expand a call EXP to the mempcpy builtin.
3561 Return NULL_RTX if we failed; the caller should emit a normal call,
3562 otherwise try to get the result in TARGET, if convenient (and in
3563 mode MODE if that's convenient). If ENDP is 0 return the
3564 destination pointer, if ENDP is 1 return the end pointer ala
3565 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3566 stpcpy. */
3567
3568 static rtx
3569 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3570 {
3571 if (!validate_arglist (exp,
3572 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3573 return NULL_RTX;
3574 else
3575 {
3576 tree dest = CALL_EXPR_ARG (exp, 0);
3577 tree src = CALL_EXPR_ARG (exp, 1);
3578 tree len = CALL_EXPR_ARG (exp, 2);
3579 return expand_builtin_mempcpy_args (dest, src, len,
3580 target, mode, /*endp=*/ 1);
3581 }
3582 }
3583
3584 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3585 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3586 so that this can also be called without constructing an actual CALL_EXPR.
3587 The other arguments and return value are the same as for
3588 expand_builtin_mempcpy. */
3589
3590 static rtx
3591 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3592 rtx target, enum machine_mode mode, int endp)
3593 {
3594 /* If return value is ignored, transform mempcpy into memcpy. */
3595 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3596 {
3597 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3598 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3599 dest, src, len);
3600 return expand_expr (result, target, mode, EXPAND_NORMAL);
3601 }
3602 else
3603 {
3604 const char *src_str;
3605 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3606 unsigned int dest_align
3607 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3608 rtx dest_mem, src_mem, len_rtx;
3609
3610 /* If either SRC or DEST is not a pointer type, don't do this
3611 operation in-line. */
3612 if (dest_align == 0 || src_align == 0)
3613 return NULL_RTX;
3614
3615 /* If LEN is not constant, call the normal function. */
3616 if (! host_integerp (len, 1))
3617 return NULL_RTX;
3618
3619 len_rtx = expand_normal (len);
3620 src_str = c_getstr (src);
3621
3622 /* If SRC is a string constant and block move would be done
3623 by pieces, we can avoid loading the string from memory
3624 and only stored the computed constants. */
3625 if (src_str
3626 && CONST_INT_P (len_rtx)
3627 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3628 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3629 CONST_CAST (char *, src_str),
3630 dest_align, false))
3631 {
3632 dest_mem = get_memory_rtx (dest, len);
3633 set_mem_align (dest_mem, dest_align);
3634 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3635 builtin_memcpy_read_str,
3636 CONST_CAST (char *, src_str),
3637 dest_align, false, endp);
3638 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3639 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3640 return dest_mem;
3641 }
3642
3643 if (CONST_INT_P (len_rtx)
3644 && can_move_by_pieces (INTVAL (len_rtx),
3645 MIN (dest_align, src_align)))
3646 {
3647 dest_mem = get_memory_rtx (dest, len);
3648 set_mem_align (dest_mem, dest_align);
3649 src_mem = get_memory_rtx (src, len);
3650 set_mem_align (src_mem, src_align);
3651 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3652 MIN (dest_align, src_align), endp);
3653 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3654 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3655 return dest_mem;
3656 }
3657
3658 return NULL_RTX;
3659 }
3660 }
3661
3662 #ifndef HAVE_movstr
3663 # define HAVE_movstr 0
3664 # define CODE_FOR_movstr CODE_FOR_nothing
3665 #endif
3666
3667 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3668 we failed, the caller should emit a normal call, otherwise try to
3669 get the result in TARGET, if convenient. If ENDP is 0 return the
3670 destination pointer, if ENDP is 1 return the end pointer ala
3671 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3672 stpcpy. */
3673
3674 static rtx
3675 expand_movstr (tree dest, tree src, rtx target, int endp)
3676 {
3677 rtx end;
3678 rtx dest_mem;
3679 rtx src_mem;
3680 rtx insn;
3681 const struct insn_data_d * data;
3682
3683 if (!HAVE_movstr)
3684 return NULL_RTX;
3685
3686 dest_mem = get_memory_rtx (dest, NULL);
3687 src_mem = get_memory_rtx (src, NULL);
3688 data = insn_data + CODE_FOR_movstr;
3689 if (!endp)
3690 {
3691 target = force_reg (Pmode, XEXP (dest_mem, 0));
3692 dest_mem = replace_equiv_address (dest_mem, target);
3693 end = gen_reg_rtx (Pmode);
3694 }
3695 else
3696 {
3697 if (target == 0
3698 || target == const0_rtx
3699 || ! (*data->operand[0].predicate) (target, Pmode))
3700 {
3701 end = gen_reg_rtx (Pmode);
3702 if (target != const0_rtx)
3703 target = end;
3704 }
3705 else
3706 end = target;
3707 }
3708
3709 if (data->operand[0].mode != VOIDmode)
3710 end = gen_lowpart (data->operand[0].mode, end);
3711
3712 insn = data->genfun (end, dest_mem, src_mem);
3713
3714 gcc_assert (insn);
3715
3716 emit_insn (insn);
3717
3718 /* movstr is supposed to set end to the address of the NUL
3719 terminator. If the caller requested a mempcpy-like return value,
3720 adjust it. */
3721 if (endp == 1 && target != const0_rtx)
3722 {
3723 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3724 emit_move_insn (target, force_operand (tem, NULL_RTX));
3725 }
3726
3727 return target;
3728 }
3729
3730 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3731 NULL_RTX if we failed the caller should emit a normal call, otherwise
3732 try to get the result in TARGET, if convenient (and in mode MODE if that's
3733 convenient). */
3734
3735 static rtx
3736 expand_builtin_strcpy (tree exp, rtx target)
3737 {
3738 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3739 {
3740 tree dest = CALL_EXPR_ARG (exp, 0);
3741 tree src = CALL_EXPR_ARG (exp, 1);
3742 return expand_builtin_strcpy_args (dest, src, target);
3743 }
3744 return NULL_RTX;
3745 }
3746
3747 /* Helper function to do the actual work for expand_builtin_strcpy. The
3748 arguments to the builtin_strcpy call DEST and SRC are broken out
3749 so that this can also be called without constructing an actual CALL_EXPR.
3750 The other arguments and return value are the same as for
3751 expand_builtin_strcpy. */
3752
3753 static rtx
3754 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3755 {
3756 return expand_movstr (dest, src, target, /*endp=*/0);
3757 }
3758
3759 /* Expand a call EXP to the stpcpy builtin.
3760 Return NULL_RTX if we failed the caller should emit a normal call,
3761 otherwise try to get the result in TARGET, if convenient (and in
3762 mode MODE if that's convenient). */
3763
3764 static rtx
3765 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3766 {
3767 tree dst, src;
3768 location_t loc = EXPR_LOCATION (exp);
3769
3770 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3771 return NULL_RTX;
3772
3773 dst = CALL_EXPR_ARG (exp, 0);
3774 src = CALL_EXPR_ARG (exp, 1);
3775
3776 /* If return value is ignored, transform stpcpy into strcpy. */
3777 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3778 {
3779 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3780 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3781 return expand_expr (result, target, mode, EXPAND_NORMAL);
3782 }
3783 else
3784 {
3785 tree len, lenp1;
3786 rtx ret;
3787
3788 /* Ensure we get an actual string whose length can be evaluated at
3789 compile-time, not an expression containing a string. This is
3790 because the latter will potentially produce pessimized code
3791 when used to produce the return value. */
3792 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3793 return expand_movstr (dst, src, target, /*endp=*/2);
3794
3795 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3796 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3797 target, mode, /*endp=*/2);
3798
3799 if (ret)
3800 return ret;
3801
3802 if (TREE_CODE (len) == INTEGER_CST)
3803 {
3804 rtx len_rtx = expand_normal (len);
3805
3806 if (CONST_INT_P (len_rtx))
3807 {
3808 ret = expand_builtin_strcpy_args (dst, src, target);
3809
3810 if (ret)
3811 {
3812 if (! target)
3813 {
3814 if (mode != VOIDmode)
3815 target = gen_reg_rtx (mode);
3816 else
3817 target = gen_reg_rtx (GET_MODE (ret));
3818 }
3819 if (GET_MODE (target) != GET_MODE (ret))
3820 ret = gen_lowpart (GET_MODE (target), ret);
3821
3822 ret = plus_constant (ret, INTVAL (len_rtx));
3823 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3824 gcc_assert (ret);
3825
3826 return target;
3827 }
3828 }
3829 }
3830
3831 return expand_movstr (dst, src, target, /*endp=*/2);
3832 }
3833 }
3834
3835 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3836 bytes from constant string DATA + OFFSET and return it as target
3837 constant. */
3838
3839 rtx
3840 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3841 enum machine_mode mode)
3842 {
3843 const char *str = (const char *) data;
3844
3845 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3846 return const0_rtx;
3847
3848 return c_readstr (str + offset, mode);
3849 }
3850
3851 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3852 NULL_RTX if we failed the caller should emit a normal call. */
3853
3854 static rtx
3855 expand_builtin_strncpy (tree exp, rtx target)
3856 {
3857 location_t loc = EXPR_LOCATION (exp);
3858
3859 if (validate_arglist (exp,
3860 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3861 {
3862 tree dest = CALL_EXPR_ARG (exp, 0);
3863 tree src = CALL_EXPR_ARG (exp, 1);
3864 tree len = CALL_EXPR_ARG (exp, 2);
3865 tree slen = c_strlen (src, 1);
3866
3867 /* We must be passed a constant len and src parameter. */
3868 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3869 return NULL_RTX;
3870
3871 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3872
3873 /* We're required to pad with trailing zeros if the requested
3874 len is greater than strlen(s2)+1. In that case try to
3875 use store_by_pieces, if it fails, punt. */
3876 if (tree_int_cst_lt (slen, len))
3877 {
3878 unsigned int dest_align
3879 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3880 const char *p = c_getstr (src);
3881 rtx dest_mem;
3882
3883 if (!p || dest_align == 0 || !host_integerp (len, 1)
3884 || !can_store_by_pieces (tree_low_cst (len, 1),
3885 builtin_strncpy_read_str,
3886 CONST_CAST (char *, p),
3887 dest_align, false))
3888 return NULL_RTX;
3889
3890 dest_mem = get_memory_rtx (dest, len);
3891 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3892 builtin_strncpy_read_str,
3893 CONST_CAST (char *, p), dest_align, false, 0);
3894 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3895 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3896 return dest_mem;
3897 }
3898 }
3899 return NULL_RTX;
3900 }
3901
3902 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3903 bytes from constant string DATA + OFFSET and return it as target
3904 constant. */
3905
3906 rtx
3907 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3908 enum machine_mode mode)
3909 {
3910 const char *c = (const char *) data;
3911 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3912
3913 memset (p, *c, GET_MODE_SIZE (mode));
3914
3915 return c_readstr (p, mode);
3916 }
3917
3918 /* Callback routine for store_by_pieces. Return the RTL of a register
3919 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3920 char value given in the RTL register data. For example, if mode is
3921 4 bytes wide, return the RTL for 0x01010101*data. */
3922
3923 static rtx
3924 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3925 enum machine_mode mode)
3926 {
3927 rtx target, coeff;
3928 size_t size;
3929 char *p;
3930
3931 size = GET_MODE_SIZE (mode);
3932 if (size == 1)
3933 return (rtx) data;
3934
3935 p = XALLOCAVEC (char, size);
3936 memset (p, 1, size);
3937 coeff = c_readstr (p, mode);
3938
3939 target = convert_to_mode (mode, (rtx) data, 1);
3940 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3941 return force_reg (mode, target);
3942 }
3943
3944 /* Expand expression EXP, which is a call to the memset builtin. Return
3945 NULL_RTX if we failed the caller should emit a normal call, otherwise
3946 try to get the result in TARGET, if convenient (and in mode MODE if that's
3947 convenient). */
3948
3949 static rtx
3950 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3951 {
3952 if (!validate_arglist (exp,
3953 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3954 return NULL_RTX;
3955 else
3956 {
3957 tree dest = CALL_EXPR_ARG (exp, 0);
3958 tree val = CALL_EXPR_ARG (exp, 1);
3959 tree len = CALL_EXPR_ARG (exp, 2);
3960 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3961 }
3962 }
3963
3964 /* Helper function to do the actual work for expand_builtin_memset. The
3965 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3966 so that this can also be called without constructing an actual CALL_EXPR.
3967 The other arguments and return value are the same as for
3968 expand_builtin_memset. */
3969
3970 static rtx
3971 expand_builtin_memset_args (tree dest, tree val, tree len,
3972 rtx target, enum machine_mode mode, tree orig_exp)
3973 {
3974 tree fndecl, fn;
3975 enum built_in_function fcode;
3976 char c;
3977 unsigned int dest_align;
3978 rtx dest_mem, dest_addr, len_rtx;
3979 HOST_WIDE_INT expected_size = -1;
3980 unsigned int expected_align = 0;
3981
3982 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3983
3984 /* If DEST is not a pointer type, don't do this operation in-line. */
3985 if (dest_align == 0)
3986 return NULL_RTX;
3987
3988 if (currently_expanding_gimple_stmt)
3989 stringop_block_profile (currently_expanding_gimple_stmt,
3990 &expected_align, &expected_size);
3991
3992 if (expected_align < dest_align)
3993 expected_align = dest_align;
3994
3995 /* If the LEN parameter is zero, return DEST. */
3996 if (integer_zerop (len))
3997 {
3998 /* Evaluate and ignore VAL in case it has side-effects. */
3999 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4000 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4001 }
4002
4003 /* Stabilize the arguments in case we fail. */
4004 dest = builtin_save_expr (dest);
4005 val = builtin_save_expr (val);
4006 len = builtin_save_expr (len);
4007
4008 len_rtx = expand_normal (len);
4009 dest_mem = get_memory_rtx (dest, len);
4010
4011 if (TREE_CODE (val) != INTEGER_CST)
4012 {
4013 rtx val_rtx;
4014
4015 val_rtx = expand_normal (val);
4016 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
4017 val_rtx, 0);
4018
4019 /* Assume that we can memset by pieces if we can store
4020 * the coefficients by pieces (in the required modes).
4021 * We can't pass builtin_memset_gen_str as that emits RTL. */
4022 c = 1;
4023 if (host_integerp (len, 1)
4024 && can_store_by_pieces (tree_low_cst (len, 1),
4025 builtin_memset_read_str, &c, dest_align,
4026 true))
4027 {
4028 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
4029 val_rtx);
4030 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4031 builtin_memset_gen_str, val_rtx, dest_align,
4032 true, 0);
4033 }
4034 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4035 dest_align, expected_align,
4036 expected_size))
4037 goto do_libcall;
4038
4039 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4040 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4041 return dest_mem;
4042 }
4043
4044 if (target_char_cast (val, &c))
4045 goto do_libcall;
4046
4047 if (c)
4048 {
4049 if (host_integerp (len, 1)
4050 && can_store_by_pieces (tree_low_cst (len, 1),
4051 builtin_memset_read_str, &c, dest_align,
4052 true))
4053 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4054 builtin_memset_read_str, &c, dest_align, true, 0);
4055 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
4056 dest_align, expected_align,
4057 expected_size))
4058 goto do_libcall;
4059
4060 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4061 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4062 return dest_mem;
4063 }
4064
4065 set_mem_align (dest_mem, dest_align);
4066 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4067 CALL_EXPR_TAILCALL (orig_exp)
4068 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4069 expected_align, expected_size);
4070
4071 if (dest_addr == 0)
4072 {
4073 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4074 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4075 }
4076
4077 return dest_addr;
4078
4079 do_libcall:
4080 fndecl = get_callee_fndecl (orig_exp);
4081 fcode = DECL_FUNCTION_CODE (fndecl);
4082 if (fcode == BUILT_IN_MEMSET)
4083 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4084 dest, val, len);
4085 else if (fcode == BUILT_IN_BZERO)
4086 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4087 dest, len);
4088 else
4089 gcc_unreachable ();
4090 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4091 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4092 return expand_call (fn, target, target == const0_rtx);
4093 }
4094
4095 /* Expand expression EXP, which is a call to the bzero builtin. Return
4096 NULL_RTX if we failed the caller should emit a normal call. */
4097
4098 static rtx
4099 expand_builtin_bzero (tree exp)
4100 {
4101 tree dest, size;
4102 location_t loc = EXPR_LOCATION (exp);
4103
4104 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4105 return NULL_RTX;
4106
4107 dest = CALL_EXPR_ARG (exp, 0);
4108 size = CALL_EXPR_ARG (exp, 1);
4109
4110 /* New argument list transforming bzero(ptr x, int y) to
4111 memset(ptr x, int 0, size_t y). This is done this way
4112 so that if it isn't expanded inline, we fallback to
4113 calling bzero instead of memset. */
4114
4115 return expand_builtin_memset_args (dest, integer_zero_node,
4116 fold_convert_loc (loc, sizetype, size),
4117 const0_rtx, VOIDmode, exp);
4118 }
4119
4120 /* Expand expression EXP, which is a call to the memcmp built-in function.
4121 Return NULL_RTX if we failed and the
4122 caller should emit a normal call, otherwise try to get the result in
4123 TARGET, if convenient (and in mode MODE, if that's convenient). */
4124
4125 static rtx
4126 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4127 ATTRIBUTE_UNUSED enum machine_mode mode)
4128 {
4129 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4130
4131 if (!validate_arglist (exp,
4132 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4133 return NULL_RTX;
4134
4135 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4136 {
4137 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4138 rtx result;
4139 rtx insn;
4140 tree arg1 = CALL_EXPR_ARG (exp, 0);
4141 tree arg2 = CALL_EXPR_ARG (exp, 1);
4142 tree len = CALL_EXPR_ARG (exp, 2);
4143
4144 unsigned int arg1_align
4145 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4146 unsigned int arg2_align
4147 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4148 enum machine_mode insn_mode;
4149
4150 #ifdef HAVE_cmpmemsi
4151 if (HAVE_cmpmemsi)
4152 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4153 else
4154 #endif
4155 #ifdef HAVE_cmpstrnsi
4156 if (HAVE_cmpstrnsi)
4157 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4158 else
4159 #endif
4160 return NULL_RTX;
4161
4162 /* If we don't have POINTER_TYPE, call the function. */
4163 if (arg1_align == 0 || arg2_align == 0)
4164 return NULL_RTX;
4165
4166 /* Make a place to write the result of the instruction. */
4167 result = target;
4168 if (! (result != 0
4169 && REG_P (result) && GET_MODE (result) == insn_mode
4170 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4171 result = gen_reg_rtx (insn_mode);
4172
4173 arg1_rtx = get_memory_rtx (arg1, len);
4174 arg2_rtx = get_memory_rtx (arg2, len);
4175 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4176
4177 /* Set MEM_SIZE as appropriate. */
4178 if (CONST_INT_P (arg3_rtx))
4179 {
4180 set_mem_size (arg1_rtx, arg3_rtx);
4181 set_mem_size (arg2_rtx, arg3_rtx);
4182 }
4183
4184 #ifdef HAVE_cmpmemsi
4185 if (HAVE_cmpmemsi)
4186 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4187 GEN_INT (MIN (arg1_align, arg2_align)));
4188 else
4189 #endif
4190 #ifdef HAVE_cmpstrnsi
4191 if (HAVE_cmpstrnsi)
4192 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4193 GEN_INT (MIN (arg1_align, arg2_align)));
4194 else
4195 #endif
4196 gcc_unreachable ();
4197
4198 if (insn)
4199 emit_insn (insn);
4200 else
4201 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4202 TYPE_MODE (integer_type_node), 3,
4203 XEXP (arg1_rtx, 0), Pmode,
4204 XEXP (arg2_rtx, 0), Pmode,
4205 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4206 TYPE_UNSIGNED (sizetype)),
4207 TYPE_MODE (sizetype));
4208
4209 /* Return the value in the proper mode for this function. */
4210 mode = TYPE_MODE (TREE_TYPE (exp));
4211 if (GET_MODE (result) == mode)
4212 return result;
4213 else if (target != 0)
4214 {
4215 convert_move (target, result, 0);
4216 return target;
4217 }
4218 else
4219 return convert_to_mode (mode, result, 0);
4220 }
4221 #endif
4222
4223 return NULL_RTX;
4224 }
4225
4226 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4227 if we failed the caller should emit a normal call, otherwise try to get
4228 the result in TARGET, if convenient. */
4229
4230 static rtx
4231 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4232 {
4233 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4234 return NULL_RTX;
4235
4236 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4237 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
4238 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
4239 {
4240 rtx arg1_rtx, arg2_rtx;
4241 rtx result, insn = NULL_RTX;
4242 tree fndecl, fn;
4243 tree arg1 = CALL_EXPR_ARG (exp, 0);
4244 tree arg2 = CALL_EXPR_ARG (exp, 1);
4245
4246 unsigned int arg1_align
4247 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4248 unsigned int arg2_align
4249 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4250
4251 /* If we don't have POINTER_TYPE, call the function. */
4252 if (arg1_align == 0 || arg2_align == 0)
4253 return NULL_RTX;
4254
4255 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4256 arg1 = builtin_save_expr (arg1);
4257 arg2 = builtin_save_expr (arg2);
4258
4259 arg1_rtx = get_memory_rtx (arg1, NULL);
4260 arg2_rtx = get_memory_rtx (arg2, NULL);
4261
4262 #ifdef HAVE_cmpstrsi
4263 /* Try to call cmpstrsi. */
4264 if (HAVE_cmpstrsi)
4265 {
4266 enum machine_mode insn_mode
4267 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4268
4269 /* Make a place to write the result of the instruction. */
4270 result = target;
4271 if (! (result != 0
4272 && REG_P (result) && GET_MODE (result) == insn_mode
4273 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4274 result = gen_reg_rtx (insn_mode);
4275
4276 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4277 GEN_INT (MIN (arg1_align, arg2_align)));
4278 }
4279 #endif
4280 #ifdef HAVE_cmpstrnsi
4281 /* Try to determine at least one length and call cmpstrnsi. */
4282 if (!insn && HAVE_cmpstrnsi)
4283 {
4284 tree len;
4285 rtx arg3_rtx;
4286
4287 enum machine_mode insn_mode
4288 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4289 tree len1 = c_strlen (arg1, 1);
4290 tree len2 = c_strlen (arg2, 1);
4291
4292 if (len1)
4293 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4294 if (len2)
4295 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4296
4297 /* If we don't have a constant length for the first, use the length
4298 of the second, if we know it. We don't require a constant for
4299 this case; some cost analysis could be done if both are available
4300 but neither is constant. For now, assume they're equally cheap,
4301 unless one has side effects. If both strings have constant lengths,
4302 use the smaller. */
4303
4304 if (!len1)
4305 len = len2;
4306 else if (!len2)
4307 len = len1;
4308 else if (TREE_SIDE_EFFECTS (len1))
4309 len = len2;
4310 else if (TREE_SIDE_EFFECTS (len2))
4311 len = len1;
4312 else if (TREE_CODE (len1) != INTEGER_CST)
4313 len = len2;
4314 else if (TREE_CODE (len2) != INTEGER_CST)
4315 len = len1;
4316 else if (tree_int_cst_lt (len1, len2))
4317 len = len1;
4318 else
4319 len = len2;
4320
4321 /* If both arguments have side effects, we cannot optimize. */
4322 if (!len || TREE_SIDE_EFFECTS (len))
4323 goto do_libcall;
4324
4325 arg3_rtx = expand_normal (len);
4326
4327 /* Make a place to write the result of the instruction. */
4328 result = target;
4329 if (! (result != 0
4330 && REG_P (result) && GET_MODE (result) == insn_mode
4331 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4332 result = gen_reg_rtx (insn_mode);
4333
4334 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4335 GEN_INT (MIN (arg1_align, arg2_align)));
4336 }
4337 #endif
4338
4339 if (insn)
4340 {
4341 enum machine_mode mode;
4342 emit_insn (insn);
4343
4344 /* Return the value in the proper mode for this function. */
4345 mode = TYPE_MODE (TREE_TYPE (exp));
4346 if (GET_MODE (result) == mode)
4347 return result;
4348 if (target == 0)
4349 return convert_to_mode (mode, result, 0);
4350 convert_move (target, result, 0);
4351 return target;
4352 }
4353
4354 /* Expand the library call ourselves using a stabilized argument
4355 list to avoid re-evaluating the function's arguments twice. */
4356 #ifdef HAVE_cmpstrnsi
4357 do_libcall:
4358 #endif
4359 fndecl = get_callee_fndecl (exp);
4360 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4361 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4362 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4363 return expand_call (fn, target, target == const0_rtx);
4364 }
4365 #endif
4366 return NULL_RTX;
4367 }
4368
4369 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4370 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4371 the result in TARGET, if convenient. */
4372
4373 static rtx
4374 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4375 ATTRIBUTE_UNUSED enum machine_mode mode)
4376 {
4377 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4378
4379 if (!validate_arglist (exp,
4380 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4381 return NULL_RTX;
4382
4383 /* If c_strlen can determine an expression for one of the string
4384 lengths, and it doesn't have side effects, then emit cmpstrnsi
4385 using length MIN(strlen(string)+1, arg3). */
4386 #ifdef HAVE_cmpstrnsi
4387 if (HAVE_cmpstrnsi)
4388 {
4389 tree len, len1, len2;
4390 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4391 rtx result, insn;
4392 tree fndecl, fn;
4393 tree arg1 = CALL_EXPR_ARG (exp, 0);
4394 tree arg2 = CALL_EXPR_ARG (exp, 1);
4395 tree arg3 = CALL_EXPR_ARG (exp, 2);
4396
4397 unsigned int arg1_align
4398 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4399 unsigned int arg2_align
4400 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4401 enum machine_mode insn_mode
4402 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4403
4404 len1 = c_strlen (arg1, 1);
4405 len2 = c_strlen (arg2, 1);
4406
4407 if (len1)
4408 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4409 if (len2)
4410 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4411
4412 /* If we don't have a constant length for the first, use the length
4413 of the second, if we know it. We don't require a constant for
4414 this case; some cost analysis could be done if both are available
4415 but neither is constant. For now, assume they're equally cheap,
4416 unless one has side effects. If both strings have constant lengths,
4417 use the smaller. */
4418
4419 if (!len1)
4420 len = len2;
4421 else if (!len2)
4422 len = len1;
4423 else if (TREE_SIDE_EFFECTS (len1))
4424 len = len2;
4425 else if (TREE_SIDE_EFFECTS (len2))
4426 len = len1;
4427 else if (TREE_CODE (len1) != INTEGER_CST)
4428 len = len2;
4429 else if (TREE_CODE (len2) != INTEGER_CST)
4430 len = len1;
4431 else if (tree_int_cst_lt (len1, len2))
4432 len = len1;
4433 else
4434 len = len2;
4435
4436 /* If both arguments have side effects, we cannot optimize. */
4437 if (!len || TREE_SIDE_EFFECTS (len))
4438 return NULL_RTX;
4439
4440 /* The actual new length parameter is MIN(len,arg3). */
4441 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4442 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4443
4444 /* If we don't have POINTER_TYPE, call the function. */
4445 if (arg1_align == 0 || arg2_align == 0)
4446 return NULL_RTX;
4447
4448 /* Make a place to write the result of the instruction. */
4449 result = target;
4450 if (! (result != 0
4451 && REG_P (result) && GET_MODE (result) == insn_mode
4452 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4453 result = gen_reg_rtx (insn_mode);
4454
4455 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4456 arg1 = builtin_save_expr (arg1);
4457 arg2 = builtin_save_expr (arg2);
4458 len = builtin_save_expr (len);
4459
4460 arg1_rtx = get_memory_rtx (arg1, len);
4461 arg2_rtx = get_memory_rtx (arg2, len);
4462 arg3_rtx = expand_normal (len);
4463 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4464 GEN_INT (MIN (arg1_align, arg2_align)));
4465 if (insn)
4466 {
4467 emit_insn (insn);
4468
4469 /* Return the value in the proper mode for this function. */
4470 mode = TYPE_MODE (TREE_TYPE (exp));
4471 if (GET_MODE (result) == mode)
4472 return result;
4473 if (target == 0)
4474 return convert_to_mode (mode, result, 0);
4475 convert_move (target, result, 0);
4476 return target;
4477 }
4478
4479 /* Expand the library call ourselves using a stabilized argument
4480 list to avoid re-evaluating the function's arguments twice. */
4481 fndecl = get_callee_fndecl (exp);
4482 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4483 arg1, arg2, len);
4484 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4485 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4486 return expand_call (fn, target, target == const0_rtx);
4487 }
4488 #endif
4489 return NULL_RTX;
4490 }
4491
4492 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4493 if that's convenient. */
4494
4495 rtx
4496 expand_builtin_saveregs (void)
4497 {
4498 rtx val, seq;
4499
4500 /* Don't do __builtin_saveregs more than once in a function.
4501 Save the result of the first call and reuse it. */
4502 if (saveregs_value != 0)
4503 return saveregs_value;
4504
4505 /* When this function is called, it means that registers must be
4506 saved on entry to this function. So we migrate the call to the
4507 first insn of this function. */
4508
4509 start_sequence ();
4510
4511 /* Do whatever the machine needs done in this case. */
4512 val = targetm.calls.expand_builtin_saveregs ();
4513
4514 seq = get_insns ();
4515 end_sequence ();
4516
4517 saveregs_value = val;
4518
4519 /* Put the insns after the NOTE that starts the function. If this
4520 is inside a start_sequence, make the outer-level insn chain current, so
4521 the code is placed at the start of the function. */
4522 push_topmost_sequence ();
4523 emit_insn_after (seq, entry_of_function ());
4524 pop_topmost_sequence ();
4525
4526 return val;
4527 }
4528
4529 /* Expand a call to __builtin_next_arg. */
4530
4531 static rtx
4532 expand_builtin_next_arg (void)
4533 {
4534 /* Checking arguments is already done in fold_builtin_next_arg
4535 that must be called before this function. */
4536 return expand_binop (ptr_mode, add_optab,
4537 crtl->args.internal_arg_pointer,
4538 crtl->args.arg_offset_rtx,
4539 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4540 }
4541
4542 /* Make it easier for the backends by protecting the valist argument
4543 from multiple evaluations. */
4544
4545 static tree
4546 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4547 {
4548 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4549
4550 /* The current way of determining the type of valist is completely
4551 bogus. We should have the information on the va builtin instead. */
4552 if (!vatype)
4553 vatype = targetm.fn_abi_va_list (cfun->decl);
4554
4555 if (TREE_CODE (vatype) == ARRAY_TYPE)
4556 {
4557 if (TREE_SIDE_EFFECTS (valist))
4558 valist = save_expr (valist);
4559
4560 /* For this case, the backends will be expecting a pointer to
4561 vatype, but it's possible we've actually been given an array
4562 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4563 So fix it. */
4564 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4565 {
4566 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4567 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4568 }
4569 }
4570 else
4571 {
4572 tree pt = build_pointer_type (vatype);
4573
4574 if (! needs_lvalue)
4575 {
4576 if (! TREE_SIDE_EFFECTS (valist))
4577 return valist;
4578
4579 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4580 TREE_SIDE_EFFECTS (valist) = 1;
4581 }
4582
4583 if (TREE_SIDE_EFFECTS (valist))
4584 valist = save_expr (valist);
4585 valist = fold_build2_loc (loc, MEM_REF,
4586 vatype, valist, build_int_cst (pt, 0));
4587 }
4588
4589 return valist;
4590 }
4591
4592 /* The "standard" definition of va_list is void*. */
4593
4594 tree
4595 std_build_builtin_va_list (void)
4596 {
4597 return ptr_type_node;
4598 }
4599
4600 /* The "standard" abi va_list is va_list_type_node. */
4601
4602 tree
4603 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4604 {
4605 return va_list_type_node;
4606 }
4607
4608 /* The "standard" type of va_list is va_list_type_node. */
4609
4610 tree
4611 std_canonical_va_list_type (tree type)
4612 {
4613 tree wtype, htype;
4614
4615 if (INDIRECT_REF_P (type))
4616 type = TREE_TYPE (type);
4617 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4618 type = TREE_TYPE (type);
4619 wtype = va_list_type_node;
4620 htype = type;
4621 /* Treat structure va_list types. */
4622 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4623 htype = TREE_TYPE (htype);
4624 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4625 {
4626 /* If va_list is an array type, the argument may have decayed
4627 to a pointer type, e.g. by being passed to another function.
4628 In that case, unwrap both types so that we can compare the
4629 underlying records. */
4630 if (TREE_CODE (htype) == ARRAY_TYPE
4631 || POINTER_TYPE_P (htype))
4632 {
4633 wtype = TREE_TYPE (wtype);
4634 htype = TREE_TYPE (htype);
4635 }
4636 }
4637 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4638 return va_list_type_node;
4639
4640 return NULL_TREE;
4641 }
4642
4643 /* The "standard" implementation of va_start: just assign `nextarg' to
4644 the variable. */
4645
4646 void
4647 std_expand_builtin_va_start (tree valist, rtx nextarg)
4648 {
4649 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4650 convert_move (va_r, nextarg, 0);
4651 }
4652
4653 /* Expand EXP, a call to __builtin_va_start. */
4654
4655 static rtx
4656 expand_builtin_va_start (tree exp)
4657 {
4658 rtx nextarg;
4659 tree valist;
4660 location_t loc = EXPR_LOCATION (exp);
4661
4662 if (call_expr_nargs (exp) < 2)
4663 {
4664 error_at (loc, "too few arguments to function %<va_start%>");
4665 return const0_rtx;
4666 }
4667
4668 if (fold_builtin_next_arg (exp, true))
4669 return const0_rtx;
4670
4671 nextarg = expand_builtin_next_arg ();
4672 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4673
4674 if (targetm.expand_builtin_va_start)
4675 targetm.expand_builtin_va_start (valist, nextarg);
4676 else
4677 std_expand_builtin_va_start (valist, nextarg);
4678
4679 return const0_rtx;
4680 }
4681
4682 /* The "standard" implementation of va_arg: read the value from the
4683 current (padded) address and increment by the (padded) size. */
4684
4685 tree
4686 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4687 gimple_seq *post_p)
4688 {
4689 tree addr, t, type_size, rounded_size, valist_tmp;
4690 unsigned HOST_WIDE_INT align, boundary;
4691 bool indirect;
4692
4693 #ifdef ARGS_GROW_DOWNWARD
4694 /* All of the alignment and movement below is for args-grow-up machines.
4695 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4696 implement their own specialized gimplify_va_arg_expr routines. */
4697 gcc_unreachable ();
4698 #endif
4699
4700 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4701 if (indirect)
4702 type = build_pointer_type (type);
4703
4704 align = PARM_BOUNDARY / BITS_PER_UNIT;
4705 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
4706
4707 /* When we align parameter on stack for caller, if the parameter
4708 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4709 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4710 here with caller. */
4711 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4712 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4713
4714 boundary /= BITS_PER_UNIT;
4715
4716 /* Hoist the valist value into a temporary for the moment. */
4717 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4718
4719 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4720 requires greater alignment, we must perform dynamic alignment. */
4721 if (boundary > align
4722 && !integer_zerop (TYPE_SIZE (type)))
4723 {
4724 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4725 fold_build2 (POINTER_PLUS_EXPR,
4726 TREE_TYPE (valist),
4727 valist_tmp, size_int (boundary - 1)));
4728 gimplify_and_add (t, pre_p);
4729
4730 t = fold_convert (sizetype, valist_tmp);
4731 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4732 fold_convert (TREE_TYPE (valist),
4733 fold_build2 (BIT_AND_EXPR, sizetype, t,
4734 size_int (-boundary))));
4735 gimplify_and_add (t, pre_p);
4736 }
4737 else
4738 boundary = align;
4739
4740 /* If the actual alignment is less than the alignment of the type,
4741 adjust the type accordingly so that we don't assume strict alignment
4742 when dereferencing the pointer. */
4743 boundary *= BITS_PER_UNIT;
4744 if (boundary < TYPE_ALIGN (type))
4745 {
4746 type = build_variant_type_copy (type);
4747 TYPE_ALIGN (type) = boundary;
4748 }
4749
4750 /* Compute the rounded size of the type. */
4751 type_size = size_in_bytes (type);
4752 rounded_size = round_up (type_size, align);
4753
4754 /* Reduce rounded_size so it's sharable with the postqueue. */
4755 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4756
4757 /* Get AP. */
4758 addr = valist_tmp;
4759 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4760 {
4761 /* Small args are padded downward. */
4762 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4763 rounded_size, size_int (align));
4764 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4765 size_binop (MINUS_EXPR, rounded_size, type_size));
4766 addr = fold_build2 (POINTER_PLUS_EXPR,
4767 TREE_TYPE (addr), addr, t);
4768 }
4769
4770 /* Compute new value for AP. */
4771 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4772 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4773 gimplify_and_add (t, pre_p);
4774
4775 addr = fold_convert (build_pointer_type (type), addr);
4776
4777 if (indirect)
4778 addr = build_va_arg_indirect_ref (addr);
4779
4780 return build_va_arg_indirect_ref (addr);
4781 }
4782
4783 /* Build an indirect-ref expression over the given TREE, which represents a
4784 piece of a va_arg() expansion. */
4785 tree
4786 build_va_arg_indirect_ref (tree addr)
4787 {
4788 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4789
4790 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4791 mf_mark (addr);
4792
4793 return addr;
4794 }
4795
4796 /* Return a dummy expression of type TYPE in order to keep going after an
4797 error. */
4798
4799 static tree
4800 dummy_object (tree type)
4801 {
4802 tree t = build_int_cst (build_pointer_type (type), 0);
4803 return build2 (MEM_REF, type, t, t);
4804 }
4805
4806 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4807 builtin function, but a very special sort of operator. */
4808
4809 enum gimplify_status
4810 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4811 {
4812 tree promoted_type, have_va_type;
4813 tree valist = TREE_OPERAND (*expr_p, 0);
4814 tree type = TREE_TYPE (*expr_p);
4815 tree t;
4816 location_t loc = EXPR_LOCATION (*expr_p);
4817
4818 /* Verify that valist is of the proper type. */
4819 have_va_type = TREE_TYPE (valist);
4820 if (have_va_type == error_mark_node)
4821 return GS_ERROR;
4822 have_va_type = targetm.canonical_va_list_type (have_va_type);
4823
4824 if (have_va_type == NULL_TREE)
4825 {
4826 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4827 return GS_ERROR;
4828 }
4829
4830 /* Generate a diagnostic for requesting data of a type that cannot
4831 be passed through `...' due to type promotion at the call site. */
4832 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4833 != type)
4834 {
4835 static bool gave_help;
4836 bool warned;
4837
4838 /* Unfortunately, this is merely undefined, rather than a constraint
4839 violation, so we cannot make this an error. If this call is never
4840 executed, the program is still strictly conforming. */
4841 warned = warning_at (loc, 0,
4842 "%qT is promoted to %qT when passed through %<...%>",
4843 type, promoted_type);
4844 if (!gave_help && warned)
4845 {
4846 gave_help = true;
4847 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4848 promoted_type, type);
4849 }
4850
4851 /* We can, however, treat "undefined" any way we please.
4852 Call abort to encourage the user to fix the program. */
4853 if (warned)
4854 inform (loc, "if this code is reached, the program will abort");
4855 /* Before the abort, allow the evaluation of the va_list
4856 expression to exit or longjmp. */
4857 gimplify_and_add (valist, pre_p);
4858 t = build_call_expr_loc (loc,
4859 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4860 gimplify_and_add (t, pre_p);
4861
4862 /* This is dead code, but go ahead and finish so that the
4863 mode of the result comes out right. */
4864 *expr_p = dummy_object (type);
4865 return GS_ALL_DONE;
4866 }
4867 else
4868 {
4869 /* Make it easier for the backends by protecting the valist argument
4870 from multiple evaluations. */
4871 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4872 {
4873 /* For this case, the backends will be expecting a pointer to
4874 TREE_TYPE (abi), but it's possible we've
4875 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4876 So fix it. */
4877 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4878 {
4879 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4880 valist = fold_convert_loc (loc, p1,
4881 build_fold_addr_expr_loc (loc, valist));
4882 }
4883
4884 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4885 }
4886 else
4887 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4888
4889 if (!targetm.gimplify_va_arg_expr)
4890 /* FIXME: Once most targets are converted we should merely
4891 assert this is non-null. */
4892 return GS_ALL_DONE;
4893
4894 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4895 return GS_OK;
4896 }
4897 }
4898
4899 /* Expand EXP, a call to __builtin_va_end. */
4900
4901 static rtx
4902 expand_builtin_va_end (tree exp)
4903 {
4904 tree valist = CALL_EXPR_ARG (exp, 0);
4905
4906 /* Evaluate for side effects, if needed. I hate macros that don't
4907 do that. */
4908 if (TREE_SIDE_EFFECTS (valist))
4909 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4910
4911 return const0_rtx;
4912 }
4913
4914 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4915 builtin rather than just as an assignment in stdarg.h because of the
4916 nastiness of array-type va_list types. */
4917
4918 static rtx
4919 expand_builtin_va_copy (tree exp)
4920 {
4921 tree dst, src, t;
4922 location_t loc = EXPR_LOCATION (exp);
4923
4924 dst = CALL_EXPR_ARG (exp, 0);
4925 src = CALL_EXPR_ARG (exp, 1);
4926
4927 dst = stabilize_va_list_loc (loc, dst, 1);
4928 src = stabilize_va_list_loc (loc, src, 0);
4929
4930 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4931
4932 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4933 {
4934 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4935 TREE_SIDE_EFFECTS (t) = 1;
4936 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4937 }
4938 else
4939 {
4940 rtx dstb, srcb, size;
4941
4942 /* Evaluate to pointers. */
4943 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4944 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4945 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4946 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4947
4948 dstb = convert_memory_address (Pmode, dstb);
4949 srcb = convert_memory_address (Pmode, srcb);
4950
4951 /* "Dereference" to BLKmode memories. */
4952 dstb = gen_rtx_MEM (BLKmode, dstb);
4953 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4954 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4955 srcb = gen_rtx_MEM (BLKmode, srcb);
4956 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4957 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4958
4959 /* Copy. */
4960 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4961 }
4962
4963 return const0_rtx;
4964 }
4965
4966 /* Expand a call to one of the builtin functions __builtin_frame_address or
4967 __builtin_return_address. */
4968
4969 static rtx
4970 expand_builtin_frame_address (tree fndecl, tree exp)
4971 {
4972 /* The argument must be a nonnegative integer constant.
4973 It counts the number of frames to scan up the stack.
4974 The value is the return address saved in that frame. */
4975 if (call_expr_nargs (exp) == 0)
4976 /* Warning about missing arg was already issued. */
4977 return const0_rtx;
4978 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4979 {
4980 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4981 error ("invalid argument to %<__builtin_frame_address%>");
4982 else
4983 error ("invalid argument to %<__builtin_return_address%>");
4984 return const0_rtx;
4985 }
4986 else
4987 {
4988 rtx tem
4989 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4990 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4991
4992 /* Some ports cannot access arbitrary stack frames. */
4993 if (tem == NULL)
4994 {
4995 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4996 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4997 else
4998 warning (0, "unsupported argument to %<__builtin_return_address%>");
4999 return const0_rtx;
5000 }
5001
5002 /* For __builtin_frame_address, return what we've got. */
5003 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5004 return tem;
5005
5006 if (!REG_P (tem)
5007 && ! CONSTANT_P (tem))
5008 tem = copy_to_mode_reg (Pmode, tem);
5009 return tem;
5010 }
5011 }
5012
5013 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5014 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
5015 is the same as for allocate_dynamic_stack_space. */
5016
5017 static rtx
5018 expand_builtin_alloca (tree exp, bool cannot_accumulate)
5019 {
5020 rtx op0;
5021 rtx result;
5022
5023 /* Emit normal call if marked not-inlineable. */
5024 if (CALL_CANNOT_INLINE_P (exp))
5025 return NULL_RTX;
5026
5027 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5028 return NULL_RTX;
5029
5030 /* Compute the argument. */
5031 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5032
5033 /* Allocate the desired space. */
5034 result = allocate_dynamic_stack_space (op0, 0, BIGGEST_ALIGNMENT,
5035 cannot_accumulate);
5036 result = convert_memory_address (ptr_mode, result);
5037
5038 return result;
5039 }
5040
5041 /* Expand a call to a bswap builtin with argument ARG0. MODE
5042 is the mode to expand with. */
5043
5044 static rtx
5045 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5046 {
5047 enum machine_mode mode;
5048 tree arg;
5049 rtx op0;
5050
5051 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5052 return NULL_RTX;
5053
5054 arg = CALL_EXPR_ARG (exp, 0);
5055 mode = TYPE_MODE (TREE_TYPE (arg));
5056 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5057
5058 target = expand_unop (mode, bswap_optab, op0, target, 1);
5059
5060 gcc_assert (target);
5061
5062 return convert_to_mode (mode, target, 0);
5063 }
5064
5065 /* Expand a call to a unary builtin in EXP.
5066 Return NULL_RTX if a normal call should be emitted rather than expanding the
5067 function in-line. If convenient, the result should be placed in TARGET.
5068 SUBTARGET may be used as the target for computing one of EXP's operands. */
5069
5070 static rtx
5071 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5072 rtx subtarget, optab op_optab)
5073 {
5074 rtx op0;
5075
5076 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5077 return NULL_RTX;
5078
5079 /* Compute the argument. */
5080 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5081 (subtarget
5082 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5083 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5084 VOIDmode, EXPAND_NORMAL);
5085 /* Compute op, into TARGET if possible.
5086 Set TARGET to wherever the result comes back. */
5087 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5088 op_optab, op0, target, 1);
5089 gcc_assert (target);
5090
5091 return convert_to_mode (target_mode, target, 0);
5092 }
5093
5094 /* Expand a call to __builtin_expect. We just return our argument
5095 as the builtin_expect semantic should've been already executed by
5096 tree branch prediction pass. */
5097
5098 static rtx
5099 expand_builtin_expect (tree exp, rtx target)
5100 {
5101 tree arg;
5102
5103 if (call_expr_nargs (exp) < 2)
5104 return const0_rtx;
5105 arg = CALL_EXPR_ARG (exp, 0);
5106
5107 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5108 /* When guessing was done, the hints should be already stripped away. */
5109 gcc_assert (!flag_guess_branch_prob
5110 || optimize == 0 || seen_error ());
5111 return target;
5112 }
5113
5114 void
5115 expand_builtin_trap (void)
5116 {
5117 #ifdef HAVE_trap
5118 if (HAVE_trap)
5119 emit_insn (gen_trap ());
5120 else
5121 #endif
5122 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5123 emit_barrier ();
5124 }
5125
5126 /* Expand a call to __builtin_unreachable. We do nothing except emit
5127 a barrier saying that control flow will not pass here.
5128
5129 It is the responsibility of the program being compiled to ensure
5130 that control flow does never reach __builtin_unreachable. */
5131 static void
5132 expand_builtin_unreachable (void)
5133 {
5134 emit_barrier ();
5135 }
5136
5137 /* Expand EXP, a call to fabs, fabsf or fabsl.
5138 Return NULL_RTX if a normal call should be emitted rather than expanding
5139 the function inline. If convenient, the result should be placed
5140 in TARGET. SUBTARGET may be used as the target for computing
5141 the operand. */
5142
5143 static rtx
5144 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5145 {
5146 enum machine_mode mode;
5147 tree arg;
5148 rtx op0;
5149
5150 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5151 return NULL_RTX;
5152
5153 arg = CALL_EXPR_ARG (exp, 0);
5154 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5155 mode = TYPE_MODE (TREE_TYPE (arg));
5156 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5157 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5158 }
5159
5160 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5161 Return NULL is a normal call should be emitted rather than expanding the
5162 function inline. If convenient, the result should be placed in TARGET.
5163 SUBTARGET may be used as the target for computing the operand. */
5164
5165 static rtx
5166 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5167 {
5168 rtx op0, op1;
5169 tree arg;
5170
5171 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5172 return NULL_RTX;
5173
5174 arg = CALL_EXPR_ARG (exp, 0);
5175 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5176
5177 arg = CALL_EXPR_ARG (exp, 1);
5178 op1 = expand_normal (arg);
5179
5180 return expand_copysign (op0, op1, target);
5181 }
5182
5183 /* Create a new constant string literal and return a char* pointer to it.
5184 The STRING_CST value is the LEN characters at STR. */
5185 tree
5186 build_string_literal (int len, const char *str)
5187 {
5188 tree t, elem, index, type;
5189
5190 t = build_string (len, str);
5191 elem = build_type_variant (char_type_node, 1, 0);
5192 index = build_index_type (size_int (len - 1));
5193 type = build_array_type (elem, index);
5194 TREE_TYPE (t) = type;
5195 TREE_CONSTANT (t) = 1;
5196 TREE_READONLY (t) = 1;
5197 TREE_STATIC (t) = 1;
5198
5199 type = build_pointer_type (elem);
5200 t = build1 (ADDR_EXPR, type,
5201 build4 (ARRAY_REF, elem,
5202 t, integer_zero_node, NULL_TREE, NULL_TREE));
5203 return t;
5204 }
5205
5206 /* Expand a call to either the entry or exit function profiler. */
5207
5208 static rtx
5209 expand_builtin_profile_func (bool exitp)
5210 {
5211 rtx this_rtx, which;
5212
5213 this_rtx = DECL_RTL (current_function_decl);
5214 gcc_assert (MEM_P (this_rtx));
5215 this_rtx = XEXP (this_rtx, 0);
5216
5217 if (exitp)
5218 which = profile_function_exit_libfunc;
5219 else
5220 which = profile_function_entry_libfunc;
5221
5222 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5223 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5224 0),
5225 Pmode);
5226
5227 return const0_rtx;
5228 }
5229
5230 /* Expand a call to __builtin___clear_cache. */
5231
5232 static rtx
5233 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5234 {
5235 #ifndef HAVE_clear_cache
5236 #ifdef CLEAR_INSN_CACHE
5237 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5238 does something. Just do the default expansion to a call to
5239 __clear_cache(). */
5240 return NULL_RTX;
5241 #else
5242 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5243 does nothing. There is no need to call it. Do nothing. */
5244 return const0_rtx;
5245 #endif /* CLEAR_INSN_CACHE */
5246 #else
5247 /* We have a "clear_cache" insn, and it will handle everything. */
5248 tree begin, end;
5249 rtx begin_rtx, end_rtx;
5250 enum insn_code icode;
5251
5252 /* We must not expand to a library call. If we did, any
5253 fallback library function in libgcc that might contain a call to
5254 __builtin___clear_cache() would recurse infinitely. */
5255 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5256 {
5257 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5258 return const0_rtx;
5259 }
5260
5261 if (HAVE_clear_cache)
5262 {
5263 icode = CODE_FOR_clear_cache;
5264
5265 begin = CALL_EXPR_ARG (exp, 0);
5266 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5267 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5268 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5269 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5270
5271 end = CALL_EXPR_ARG (exp, 1);
5272 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5273 end_rtx = convert_memory_address (Pmode, end_rtx);
5274 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5275 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5276
5277 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5278 }
5279 return const0_rtx;
5280 #endif /* HAVE_clear_cache */
5281 }
5282
5283 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5284
5285 static rtx
5286 round_trampoline_addr (rtx tramp)
5287 {
5288 rtx temp, addend, mask;
5289
5290 /* If we don't need too much alignment, we'll have been guaranteed
5291 proper alignment by get_trampoline_type. */
5292 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5293 return tramp;
5294
5295 /* Round address up to desired boundary. */
5296 temp = gen_reg_rtx (Pmode);
5297 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5298 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5299
5300 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5301 temp, 0, OPTAB_LIB_WIDEN);
5302 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5303 temp, 0, OPTAB_LIB_WIDEN);
5304
5305 return tramp;
5306 }
5307
5308 static rtx
5309 expand_builtin_init_trampoline (tree exp)
5310 {
5311 tree t_tramp, t_func, t_chain;
5312 rtx m_tramp, r_tramp, r_chain, tmp;
5313
5314 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5315 POINTER_TYPE, VOID_TYPE))
5316 return NULL_RTX;
5317
5318 t_tramp = CALL_EXPR_ARG (exp, 0);
5319 t_func = CALL_EXPR_ARG (exp, 1);
5320 t_chain = CALL_EXPR_ARG (exp, 2);
5321
5322 r_tramp = expand_normal (t_tramp);
5323 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5324 MEM_NOTRAP_P (m_tramp) = 1;
5325
5326 /* The TRAMP argument should be the address of a field within the
5327 local function's FRAME decl. Let's see if we can fill in the
5328 to fill in the MEM_ATTRs for this memory. */
5329 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5330 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5331 true, 0);
5332
5333 tmp = round_trampoline_addr (r_tramp);
5334 if (tmp != r_tramp)
5335 {
5336 m_tramp = change_address (m_tramp, BLKmode, tmp);
5337 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5338 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5339 }
5340
5341 /* The FUNC argument should be the address of the nested function.
5342 Extract the actual function decl to pass to the hook. */
5343 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5344 t_func = TREE_OPERAND (t_func, 0);
5345 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5346
5347 r_chain = expand_normal (t_chain);
5348
5349 /* Generate insns to initialize the trampoline. */
5350 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5351
5352 trampolines_created = 1;
5353
5354 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5355 "trampoline generated for nested function %qD", t_func);
5356
5357 return const0_rtx;
5358 }
5359
5360 static rtx
5361 expand_builtin_adjust_trampoline (tree exp)
5362 {
5363 rtx tramp;
5364
5365 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5366 return NULL_RTX;
5367
5368 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5369 tramp = round_trampoline_addr (tramp);
5370 if (targetm.calls.trampoline_adjust_address)
5371 tramp = targetm.calls.trampoline_adjust_address (tramp);
5372
5373 return tramp;
5374 }
5375
5376 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5377 function. The function first checks whether the back end provides
5378 an insn to implement signbit for the respective mode. If not, it
5379 checks whether the floating point format of the value is such that
5380 the sign bit can be extracted. If that is not the case, the
5381 function returns NULL_RTX to indicate that a normal call should be
5382 emitted rather than expanding the function in-line. EXP is the
5383 expression that is a call to the builtin function; if convenient,
5384 the result should be placed in TARGET. */
5385 static rtx
5386 expand_builtin_signbit (tree exp, rtx target)
5387 {
5388 const struct real_format *fmt;
5389 enum machine_mode fmode, imode, rmode;
5390 tree arg;
5391 int word, bitpos;
5392 enum insn_code icode;
5393 rtx temp;
5394 location_t loc = EXPR_LOCATION (exp);
5395
5396 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5397 return NULL_RTX;
5398
5399 arg = CALL_EXPR_ARG (exp, 0);
5400 fmode = TYPE_MODE (TREE_TYPE (arg));
5401 rmode = TYPE_MODE (TREE_TYPE (exp));
5402 fmt = REAL_MODE_FORMAT (fmode);
5403
5404 arg = builtin_save_expr (arg);
5405
5406 /* Expand the argument yielding a RTX expression. */
5407 temp = expand_normal (arg);
5408
5409 /* Check if the back end provides an insn that handles signbit for the
5410 argument's mode. */
5411 icode = optab_handler (signbit_optab, fmode);
5412 if (icode != CODE_FOR_nothing)
5413 {
5414 rtx last = get_last_insn ();
5415 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5416 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5417 return target;
5418 delete_insns_since (last);
5419 }
5420
5421 /* For floating point formats without a sign bit, implement signbit
5422 as "ARG < 0.0". */
5423 bitpos = fmt->signbit_ro;
5424 if (bitpos < 0)
5425 {
5426 /* But we can't do this if the format supports signed zero. */
5427 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5428 return NULL_RTX;
5429
5430 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5431 build_real (TREE_TYPE (arg), dconst0));
5432 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5433 }
5434
5435 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5436 {
5437 imode = int_mode_for_mode (fmode);
5438 if (imode == BLKmode)
5439 return NULL_RTX;
5440 temp = gen_lowpart (imode, temp);
5441 }
5442 else
5443 {
5444 imode = word_mode;
5445 /* Handle targets with different FP word orders. */
5446 if (FLOAT_WORDS_BIG_ENDIAN)
5447 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5448 else
5449 word = bitpos / BITS_PER_WORD;
5450 temp = operand_subword_force (temp, word, fmode);
5451 bitpos = bitpos % BITS_PER_WORD;
5452 }
5453
5454 /* Force the intermediate word_mode (or narrower) result into a
5455 register. This avoids attempting to create paradoxical SUBREGs
5456 of floating point modes below. */
5457 temp = force_reg (imode, temp);
5458
5459 /* If the bitpos is within the "result mode" lowpart, the operation
5460 can be implement with a single bitwise AND. Otherwise, we need
5461 a right shift and an AND. */
5462
5463 if (bitpos < GET_MODE_BITSIZE (rmode))
5464 {
5465 double_int mask = double_int_setbit (double_int_zero, bitpos);
5466
5467 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5468 temp = gen_lowpart (rmode, temp);
5469 temp = expand_binop (rmode, and_optab, temp,
5470 immed_double_int_const (mask, rmode),
5471 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5472 }
5473 else
5474 {
5475 /* Perform a logical right shift to place the signbit in the least
5476 significant bit, then truncate the result to the desired mode
5477 and mask just this bit. */
5478 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5479 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5480 temp = gen_lowpart (rmode, temp);
5481 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5482 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5483 }
5484
5485 return temp;
5486 }
5487
5488 /* Expand fork or exec calls. TARGET is the desired target of the
5489 call. EXP is the call. FN is the
5490 identificator of the actual function. IGNORE is nonzero if the
5491 value is to be ignored. */
5492
5493 static rtx
5494 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5495 {
5496 tree id, decl;
5497 tree call;
5498
5499 /* If we are not profiling, just call the function. */
5500 if (!profile_arc_flag)
5501 return NULL_RTX;
5502
5503 /* Otherwise call the wrapper. This should be equivalent for the rest of
5504 compiler, so the code does not diverge, and the wrapper may run the
5505 code necessary for keeping the profiling sane. */
5506
5507 switch (DECL_FUNCTION_CODE (fn))
5508 {
5509 case BUILT_IN_FORK:
5510 id = get_identifier ("__gcov_fork");
5511 break;
5512
5513 case BUILT_IN_EXECL:
5514 id = get_identifier ("__gcov_execl");
5515 break;
5516
5517 case BUILT_IN_EXECV:
5518 id = get_identifier ("__gcov_execv");
5519 break;
5520
5521 case BUILT_IN_EXECLP:
5522 id = get_identifier ("__gcov_execlp");
5523 break;
5524
5525 case BUILT_IN_EXECLE:
5526 id = get_identifier ("__gcov_execle");
5527 break;
5528
5529 case BUILT_IN_EXECVP:
5530 id = get_identifier ("__gcov_execvp");
5531 break;
5532
5533 case BUILT_IN_EXECVE:
5534 id = get_identifier ("__gcov_execve");
5535 break;
5536
5537 default:
5538 gcc_unreachable ();
5539 }
5540
5541 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5542 FUNCTION_DECL, id, TREE_TYPE (fn));
5543 DECL_EXTERNAL (decl) = 1;
5544 TREE_PUBLIC (decl) = 1;
5545 DECL_ARTIFICIAL (decl) = 1;
5546 TREE_NOTHROW (decl) = 1;
5547 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5548 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5549 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5550 return expand_call (call, target, ignore);
5551 }
5552
5553
5554 \f
5555 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5556 the pointer in these functions is void*, the tree optimizers may remove
5557 casts. The mode computed in expand_builtin isn't reliable either, due
5558 to __sync_bool_compare_and_swap.
5559
5560 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5561 group of builtins. This gives us log2 of the mode size. */
5562
5563 static inline enum machine_mode
5564 get_builtin_sync_mode (int fcode_diff)
5565 {
5566 /* The size is not negotiable, so ask not to get BLKmode in return
5567 if the target indicates that a smaller size would be better. */
5568 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5569 }
5570
5571 /* Expand the memory expression LOC and return the appropriate memory operand
5572 for the builtin_sync operations. */
5573
5574 static rtx
5575 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5576 {
5577 rtx addr, mem;
5578
5579 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5580 addr = convert_memory_address (Pmode, addr);
5581
5582 /* Note that we explicitly do not want any alias information for this
5583 memory, so that we kill all other live memories. Otherwise we don't
5584 satisfy the full barrier semantics of the intrinsic. */
5585 mem = validize_mem (gen_rtx_MEM (mode, addr));
5586
5587 /* The alignment needs to be at least according to that of the mode. */
5588 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5589 get_pointer_alignment (loc, BIGGEST_ALIGNMENT)));
5590 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5591 MEM_VOLATILE_P (mem) = 1;
5592
5593 return mem;
5594 }
5595
5596 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5597 EXP is the CALL_EXPR. CODE is the rtx code
5598 that corresponds to the arithmetic or logical operation from the name;
5599 an exception here is that NOT actually means NAND. TARGET is an optional
5600 place for us to store the results; AFTER is true if this is the
5601 fetch_and_xxx form. IGNORE is true if we don't actually care about
5602 the result of the operation at all. */
5603
5604 static rtx
5605 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5606 enum rtx_code code, bool after,
5607 rtx target, bool ignore)
5608 {
5609 rtx val, mem;
5610 enum machine_mode old_mode;
5611 location_t loc = EXPR_LOCATION (exp);
5612
5613 if (code == NOT && warn_sync_nand)
5614 {
5615 tree fndecl = get_callee_fndecl (exp);
5616 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5617
5618 static bool warned_f_a_n, warned_n_a_f;
5619
5620 switch (fcode)
5621 {
5622 case BUILT_IN_FETCH_AND_NAND_1:
5623 case BUILT_IN_FETCH_AND_NAND_2:
5624 case BUILT_IN_FETCH_AND_NAND_4:
5625 case BUILT_IN_FETCH_AND_NAND_8:
5626 case BUILT_IN_FETCH_AND_NAND_16:
5627
5628 if (warned_f_a_n)
5629 break;
5630
5631 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5632 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5633 warned_f_a_n = true;
5634 break;
5635
5636 case BUILT_IN_NAND_AND_FETCH_1:
5637 case BUILT_IN_NAND_AND_FETCH_2:
5638 case BUILT_IN_NAND_AND_FETCH_4:
5639 case BUILT_IN_NAND_AND_FETCH_8:
5640 case BUILT_IN_NAND_AND_FETCH_16:
5641
5642 if (warned_n_a_f)
5643 break;
5644
5645 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5646 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5647 warned_n_a_f = true;
5648 break;
5649
5650 default:
5651 gcc_unreachable ();
5652 }
5653 }
5654
5655 /* Expand the operands. */
5656 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5657
5658 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5659 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5660 of CONST_INTs, where we know the old_mode only from the call argument. */
5661 old_mode = GET_MODE (val);
5662 if (old_mode == VOIDmode)
5663 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5664 val = convert_modes (mode, old_mode, val, 1);
5665
5666 if (ignore)
5667 return expand_sync_operation (mem, val, code);
5668 else
5669 return expand_sync_fetch_operation (mem, val, code, after, target);
5670 }
5671
5672 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5673 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5674 true if this is the boolean form. TARGET is a place for us to store the
5675 results; this is NOT optional if IS_BOOL is true. */
5676
5677 static rtx
5678 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5679 bool is_bool, rtx target)
5680 {
5681 rtx old_val, new_val, mem;
5682 enum machine_mode old_mode;
5683
5684 /* Expand the operands. */
5685 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5686
5687
5688 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5689 mode, EXPAND_NORMAL);
5690 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5691 of CONST_INTs, where we know the old_mode only from the call argument. */
5692 old_mode = GET_MODE (old_val);
5693 if (old_mode == VOIDmode)
5694 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5695 old_val = convert_modes (mode, old_mode, old_val, 1);
5696
5697 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5698 mode, EXPAND_NORMAL);
5699 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5700 of CONST_INTs, where we know the old_mode only from the call argument. */
5701 old_mode = GET_MODE (new_val);
5702 if (old_mode == VOIDmode)
5703 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5704 new_val = convert_modes (mode, old_mode, new_val, 1);
5705
5706 if (is_bool)
5707 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5708 else
5709 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5710 }
5711
5712 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5713 general form is actually an atomic exchange, and some targets only
5714 support a reduced form with the second argument being a constant 1.
5715 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5716 the results. */
5717
5718 static rtx
5719 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5720 rtx target)
5721 {
5722 rtx val, mem;
5723 enum machine_mode old_mode;
5724
5725 /* Expand the operands. */
5726 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5727 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5728 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5729 of CONST_INTs, where we know the old_mode only from the call argument. */
5730 old_mode = GET_MODE (val);
5731 if (old_mode == VOIDmode)
5732 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5733 val = convert_modes (mode, old_mode, val, 1);
5734
5735 return expand_sync_lock_test_and_set (mem, val, target);
5736 }
5737
5738 /* Expand the __sync_synchronize intrinsic. */
5739
5740 static void
5741 expand_builtin_synchronize (void)
5742 {
5743 gimple x;
5744 VEC (tree, gc) *v_clobbers;
5745
5746 #ifdef HAVE_memory_barrier
5747 if (HAVE_memory_barrier)
5748 {
5749 emit_insn (gen_memory_barrier ());
5750 return;
5751 }
5752 #endif
5753
5754 if (synchronize_libfunc != NULL_RTX)
5755 {
5756 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5757 return;
5758 }
5759
5760 /* If no explicit memory barrier instruction is available, create an
5761 empty asm stmt with a memory clobber. */
5762 v_clobbers = VEC_alloc (tree, gc, 1);
5763 VEC_quick_push (tree, v_clobbers,
5764 tree_cons (NULL, build_string (6, "memory"), NULL));
5765 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5766 gimple_asm_set_volatile (x, true);
5767 expand_asm_stmt (x);
5768 }
5769
5770 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5771
5772 static void
5773 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5774 {
5775 enum insn_code icode;
5776 rtx mem, insn;
5777 rtx val = const0_rtx;
5778
5779 /* Expand the operands. */
5780 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5781
5782 /* If there is an explicit operation in the md file, use it. */
5783 icode = direct_optab_handler (sync_lock_release_optab, mode);
5784 if (icode != CODE_FOR_nothing)
5785 {
5786 if (!insn_data[icode].operand[1].predicate (val, mode))
5787 val = force_reg (mode, val);
5788
5789 insn = GEN_FCN (icode) (mem, val);
5790 if (insn)
5791 {
5792 emit_insn (insn);
5793 return;
5794 }
5795 }
5796
5797 /* Otherwise we can implement this operation by emitting a barrier
5798 followed by a store of zero. */
5799 expand_builtin_synchronize ();
5800 emit_move_insn (mem, val);
5801 }
5802 \f
5803 /* Expand an expression EXP that calls a built-in function,
5804 with result going to TARGET if that's convenient
5805 (and in mode MODE if that's convenient).
5806 SUBTARGET may be used as the target for computing one of EXP's operands.
5807 IGNORE is nonzero if the value is to be ignored. */
5808
5809 rtx
5810 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5811 int ignore)
5812 {
5813 tree fndecl = get_callee_fndecl (exp);
5814 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5815 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5816 int flags;
5817
5818 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5819 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5820
5821 /* When not optimizing, generate calls to library functions for a certain
5822 set of builtins. */
5823 if (!optimize
5824 && !called_as_built_in (fndecl)
5825 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5826 && fcode != BUILT_IN_ALLOCA
5827 && fcode != BUILT_IN_FREE)
5828 return expand_call (exp, target, ignore);
5829
5830 /* The built-in function expanders test for target == const0_rtx
5831 to determine whether the function's result will be ignored. */
5832 if (ignore)
5833 target = const0_rtx;
5834
5835 /* If the result of a pure or const built-in function is ignored, and
5836 none of its arguments are volatile, we can avoid expanding the
5837 built-in call and just evaluate the arguments for side-effects. */
5838 if (target == const0_rtx
5839 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5840 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5841 {
5842 bool volatilep = false;
5843 tree arg;
5844 call_expr_arg_iterator iter;
5845
5846 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5847 if (TREE_THIS_VOLATILE (arg))
5848 {
5849 volatilep = true;
5850 break;
5851 }
5852
5853 if (! volatilep)
5854 {
5855 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5856 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5857 return const0_rtx;
5858 }
5859 }
5860
5861 switch (fcode)
5862 {
5863 CASE_FLT_FN (BUILT_IN_FABS):
5864 target = expand_builtin_fabs (exp, target, subtarget);
5865 if (target)
5866 return target;
5867 break;
5868
5869 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5870 target = expand_builtin_copysign (exp, target, subtarget);
5871 if (target)
5872 return target;
5873 break;
5874
5875 /* Just do a normal library call if we were unable to fold
5876 the values. */
5877 CASE_FLT_FN (BUILT_IN_CABS):
5878 break;
5879
5880 CASE_FLT_FN (BUILT_IN_EXP):
5881 CASE_FLT_FN (BUILT_IN_EXP10):
5882 CASE_FLT_FN (BUILT_IN_POW10):
5883 CASE_FLT_FN (BUILT_IN_EXP2):
5884 CASE_FLT_FN (BUILT_IN_EXPM1):
5885 CASE_FLT_FN (BUILT_IN_LOGB):
5886 CASE_FLT_FN (BUILT_IN_LOG):
5887 CASE_FLT_FN (BUILT_IN_LOG10):
5888 CASE_FLT_FN (BUILT_IN_LOG2):
5889 CASE_FLT_FN (BUILT_IN_LOG1P):
5890 CASE_FLT_FN (BUILT_IN_TAN):
5891 CASE_FLT_FN (BUILT_IN_ASIN):
5892 CASE_FLT_FN (BUILT_IN_ACOS):
5893 CASE_FLT_FN (BUILT_IN_ATAN):
5894 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5895 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5896 because of possible accuracy problems. */
5897 if (! flag_unsafe_math_optimizations)
5898 break;
5899 CASE_FLT_FN (BUILT_IN_SQRT):
5900 CASE_FLT_FN (BUILT_IN_FLOOR):
5901 CASE_FLT_FN (BUILT_IN_CEIL):
5902 CASE_FLT_FN (BUILT_IN_TRUNC):
5903 CASE_FLT_FN (BUILT_IN_ROUND):
5904 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5905 CASE_FLT_FN (BUILT_IN_RINT):
5906 target = expand_builtin_mathfn (exp, target, subtarget);
5907 if (target)
5908 return target;
5909 break;
5910
5911 CASE_FLT_FN (BUILT_IN_FMA):
5912 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5913 if (target)
5914 return target;
5915 break;
5916
5917 CASE_FLT_FN (BUILT_IN_ILOGB):
5918 if (! flag_unsafe_math_optimizations)
5919 break;
5920 CASE_FLT_FN (BUILT_IN_ISINF):
5921 CASE_FLT_FN (BUILT_IN_FINITE):
5922 case BUILT_IN_ISFINITE:
5923 case BUILT_IN_ISNORMAL:
5924 target = expand_builtin_interclass_mathfn (exp, target);
5925 if (target)
5926 return target;
5927 break;
5928
5929 CASE_FLT_FN (BUILT_IN_LCEIL):
5930 CASE_FLT_FN (BUILT_IN_LLCEIL):
5931 CASE_FLT_FN (BUILT_IN_LFLOOR):
5932 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5933 target = expand_builtin_int_roundingfn (exp, target);
5934 if (target)
5935 return target;
5936 break;
5937
5938 CASE_FLT_FN (BUILT_IN_LRINT):
5939 CASE_FLT_FN (BUILT_IN_LLRINT):
5940 CASE_FLT_FN (BUILT_IN_LROUND):
5941 CASE_FLT_FN (BUILT_IN_LLROUND):
5942 target = expand_builtin_int_roundingfn_2 (exp, target);
5943 if (target)
5944 return target;
5945 break;
5946
5947 CASE_FLT_FN (BUILT_IN_POW):
5948 target = expand_builtin_pow (exp, target, subtarget);
5949 if (target)
5950 return target;
5951 break;
5952
5953 CASE_FLT_FN (BUILT_IN_POWI):
5954 target = expand_builtin_powi (exp, target);
5955 if (target)
5956 return target;
5957 break;
5958
5959 CASE_FLT_FN (BUILT_IN_ATAN2):
5960 CASE_FLT_FN (BUILT_IN_LDEXP):
5961 CASE_FLT_FN (BUILT_IN_SCALB):
5962 CASE_FLT_FN (BUILT_IN_SCALBN):
5963 CASE_FLT_FN (BUILT_IN_SCALBLN):
5964 if (! flag_unsafe_math_optimizations)
5965 break;
5966
5967 CASE_FLT_FN (BUILT_IN_FMOD):
5968 CASE_FLT_FN (BUILT_IN_REMAINDER):
5969 CASE_FLT_FN (BUILT_IN_DREM):
5970 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5971 if (target)
5972 return target;
5973 break;
5974
5975 CASE_FLT_FN (BUILT_IN_CEXPI):
5976 target = expand_builtin_cexpi (exp, target);
5977 gcc_assert (target);
5978 return target;
5979
5980 CASE_FLT_FN (BUILT_IN_SIN):
5981 CASE_FLT_FN (BUILT_IN_COS):
5982 if (! flag_unsafe_math_optimizations)
5983 break;
5984 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5985 if (target)
5986 return target;
5987 break;
5988
5989 CASE_FLT_FN (BUILT_IN_SINCOS):
5990 if (! flag_unsafe_math_optimizations)
5991 break;
5992 target = expand_builtin_sincos (exp);
5993 if (target)
5994 return target;
5995 break;
5996
5997 case BUILT_IN_APPLY_ARGS:
5998 return expand_builtin_apply_args ();
5999
6000 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6001 FUNCTION with a copy of the parameters described by
6002 ARGUMENTS, and ARGSIZE. It returns a block of memory
6003 allocated on the stack into which is stored all the registers
6004 that might possibly be used for returning the result of a
6005 function. ARGUMENTS is the value returned by
6006 __builtin_apply_args. ARGSIZE is the number of bytes of
6007 arguments that must be copied. ??? How should this value be
6008 computed? We'll also need a safe worst case value for varargs
6009 functions. */
6010 case BUILT_IN_APPLY:
6011 if (!validate_arglist (exp, POINTER_TYPE,
6012 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6013 && !validate_arglist (exp, REFERENCE_TYPE,
6014 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6015 return const0_rtx;
6016 else
6017 {
6018 rtx ops[3];
6019
6020 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6021 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6022 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6023
6024 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6025 }
6026
6027 /* __builtin_return (RESULT) causes the function to return the
6028 value described by RESULT. RESULT is address of the block of
6029 memory returned by __builtin_apply. */
6030 case BUILT_IN_RETURN:
6031 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6032 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6033 return const0_rtx;
6034
6035 case BUILT_IN_SAVEREGS:
6036 return expand_builtin_saveregs ();
6037
6038 case BUILT_IN_VA_ARG_PACK:
6039 /* All valid uses of __builtin_va_arg_pack () are removed during
6040 inlining. */
6041 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6042 return const0_rtx;
6043
6044 case BUILT_IN_VA_ARG_PACK_LEN:
6045 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6046 inlining. */
6047 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6048 return const0_rtx;
6049
6050 /* Return the address of the first anonymous stack arg. */
6051 case BUILT_IN_NEXT_ARG:
6052 if (fold_builtin_next_arg (exp, false))
6053 return const0_rtx;
6054 return expand_builtin_next_arg ();
6055
6056 case BUILT_IN_CLEAR_CACHE:
6057 target = expand_builtin___clear_cache (exp);
6058 if (target)
6059 return target;
6060 break;
6061
6062 case BUILT_IN_CLASSIFY_TYPE:
6063 return expand_builtin_classify_type (exp);
6064
6065 case BUILT_IN_CONSTANT_P:
6066 return const0_rtx;
6067
6068 case BUILT_IN_FRAME_ADDRESS:
6069 case BUILT_IN_RETURN_ADDRESS:
6070 return expand_builtin_frame_address (fndecl, exp);
6071
6072 /* Returns the address of the area where the structure is returned.
6073 0 otherwise. */
6074 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6075 if (call_expr_nargs (exp) != 0
6076 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6077 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6078 return const0_rtx;
6079 else
6080 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6081
6082 case BUILT_IN_ALLOCA:
6083 /* If the allocation stems from the declaration of a variable-sized
6084 object, it cannot accumulate. */
6085 target = expand_builtin_alloca (exp, ALLOCA_FOR_VAR_P (exp));
6086 if (target)
6087 return target;
6088 break;
6089
6090 case BUILT_IN_STACK_SAVE:
6091 return expand_stack_save ();
6092
6093 case BUILT_IN_STACK_RESTORE:
6094 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6095 return const0_rtx;
6096
6097 case BUILT_IN_BSWAP32:
6098 case BUILT_IN_BSWAP64:
6099 target = expand_builtin_bswap (exp, target, subtarget);
6100
6101 if (target)
6102 return target;
6103 break;
6104
6105 CASE_INT_FN (BUILT_IN_FFS):
6106 case BUILT_IN_FFSIMAX:
6107 target = expand_builtin_unop (target_mode, exp, target,
6108 subtarget, ffs_optab);
6109 if (target)
6110 return target;
6111 break;
6112
6113 CASE_INT_FN (BUILT_IN_CLZ):
6114 case BUILT_IN_CLZIMAX:
6115 target = expand_builtin_unop (target_mode, exp, target,
6116 subtarget, clz_optab);
6117 if (target)
6118 return target;
6119 break;
6120
6121 CASE_INT_FN (BUILT_IN_CTZ):
6122 case BUILT_IN_CTZIMAX:
6123 target = expand_builtin_unop (target_mode, exp, target,
6124 subtarget, ctz_optab);
6125 if (target)
6126 return target;
6127 break;
6128
6129 CASE_INT_FN (BUILT_IN_POPCOUNT):
6130 case BUILT_IN_POPCOUNTIMAX:
6131 target = expand_builtin_unop (target_mode, exp, target,
6132 subtarget, popcount_optab);
6133 if (target)
6134 return target;
6135 break;
6136
6137 CASE_INT_FN (BUILT_IN_PARITY):
6138 case BUILT_IN_PARITYIMAX:
6139 target = expand_builtin_unop (target_mode, exp, target,
6140 subtarget, parity_optab);
6141 if (target)
6142 return target;
6143 break;
6144
6145 case BUILT_IN_STRLEN:
6146 target = expand_builtin_strlen (exp, target, target_mode);
6147 if (target)
6148 return target;
6149 break;
6150
6151 case BUILT_IN_STRCPY:
6152 target = expand_builtin_strcpy (exp, target);
6153 if (target)
6154 return target;
6155 break;
6156
6157 case BUILT_IN_STRNCPY:
6158 target = expand_builtin_strncpy (exp, target);
6159 if (target)
6160 return target;
6161 break;
6162
6163 case BUILT_IN_STPCPY:
6164 target = expand_builtin_stpcpy (exp, target, mode);
6165 if (target)
6166 return target;
6167 break;
6168
6169 case BUILT_IN_MEMCPY:
6170 target = expand_builtin_memcpy (exp, target);
6171 if (target)
6172 return target;
6173 break;
6174
6175 case BUILT_IN_MEMPCPY:
6176 target = expand_builtin_mempcpy (exp, target, mode);
6177 if (target)
6178 return target;
6179 break;
6180
6181 case BUILT_IN_MEMSET:
6182 target = expand_builtin_memset (exp, target, mode);
6183 if (target)
6184 return target;
6185 break;
6186
6187 case BUILT_IN_BZERO:
6188 target = expand_builtin_bzero (exp);
6189 if (target)
6190 return target;
6191 break;
6192
6193 case BUILT_IN_STRCMP:
6194 target = expand_builtin_strcmp (exp, target);
6195 if (target)
6196 return target;
6197 break;
6198
6199 case BUILT_IN_STRNCMP:
6200 target = expand_builtin_strncmp (exp, target, mode);
6201 if (target)
6202 return target;
6203 break;
6204
6205 case BUILT_IN_BCMP:
6206 case BUILT_IN_MEMCMP:
6207 target = expand_builtin_memcmp (exp, target, mode);
6208 if (target)
6209 return target;
6210 break;
6211
6212 case BUILT_IN_SETJMP:
6213 /* This should have been lowered to the builtins below. */
6214 gcc_unreachable ();
6215
6216 case BUILT_IN_SETJMP_SETUP:
6217 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6218 and the receiver label. */
6219 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6220 {
6221 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6222 VOIDmode, EXPAND_NORMAL);
6223 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6224 rtx label_r = label_rtx (label);
6225
6226 /* This is copied from the handling of non-local gotos. */
6227 expand_builtin_setjmp_setup (buf_addr, label_r);
6228 nonlocal_goto_handler_labels
6229 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6230 nonlocal_goto_handler_labels);
6231 /* ??? Do not let expand_label treat us as such since we would
6232 not want to be both on the list of non-local labels and on
6233 the list of forced labels. */
6234 FORCED_LABEL (label) = 0;
6235 return const0_rtx;
6236 }
6237 break;
6238
6239 case BUILT_IN_SETJMP_DISPATCHER:
6240 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6241 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6242 {
6243 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6244 rtx label_r = label_rtx (label);
6245
6246 /* Remove the dispatcher label from the list of non-local labels
6247 since the receiver labels have been added to it above. */
6248 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6249 return const0_rtx;
6250 }
6251 break;
6252
6253 case BUILT_IN_SETJMP_RECEIVER:
6254 /* __builtin_setjmp_receiver is passed the receiver label. */
6255 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6256 {
6257 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6258 rtx label_r = label_rtx (label);
6259
6260 expand_builtin_setjmp_receiver (label_r);
6261 return const0_rtx;
6262 }
6263 break;
6264
6265 /* __builtin_longjmp is passed a pointer to an array of five words.
6266 It's similar to the C library longjmp function but works with
6267 __builtin_setjmp above. */
6268 case BUILT_IN_LONGJMP:
6269 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6270 {
6271 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6272 VOIDmode, EXPAND_NORMAL);
6273 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6274
6275 if (value != const1_rtx)
6276 {
6277 error ("%<__builtin_longjmp%> second argument must be 1");
6278 return const0_rtx;
6279 }
6280
6281 expand_builtin_longjmp (buf_addr, value);
6282 return const0_rtx;
6283 }
6284 break;
6285
6286 case BUILT_IN_NONLOCAL_GOTO:
6287 target = expand_builtin_nonlocal_goto (exp);
6288 if (target)
6289 return target;
6290 break;
6291
6292 /* This updates the setjmp buffer that is its argument with the value
6293 of the current stack pointer. */
6294 case BUILT_IN_UPDATE_SETJMP_BUF:
6295 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6296 {
6297 rtx buf_addr
6298 = expand_normal (CALL_EXPR_ARG (exp, 0));
6299
6300 expand_builtin_update_setjmp_buf (buf_addr);
6301 return const0_rtx;
6302 }
6303 break;
6304
6305 case BUILT_IN_TRAP:
6306 expand_builtin_trap ();
6307 return const0_rtx;
6308
6309 case BUILT_IN_UNREACHABLE:
6310 expand_builtin_unreachable ();
6311 return const0_rtx;
6312
6313 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6314 case BUILT_IN_SIGNBITD32:
6315 case BUILT_IN_SIGNBITD64:
6316 case BUILT_IN_SIGNBITD128:
6317 target = expand_builtin_signbit (exp, target);
6318 if (target)
6319 return target;
6320 break;
6321
6322 /* Various hooks for the DWARF 2 __throw routine. */
6323 case BUILT_IN_UNWIND_INIT:
6324 expand_builtin_unwind_init ();
6325 return const0_rtx;
6326 case BUILT_IN_DWARF_CFA:
6327 return virtual_cfa_rtx;
6328 #ifdef DWARF2_UNWIND_INFO
6329 case BUILT_IN_DWARF_SP_COLUMN:
6330 return expand_builtin_dwarf_sp_column ();
6331 case BUILT_IN_INIT_DWARF_REG_SIZES:
6332 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6333 return const0_rtx;
6334 #endif
6335 case BUILT_IN_FROB_RETURN_ADDR:
6336 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6337 case BUILT_IN_EXTRACT_RETURN_ADDR:
6338 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6339 case BUILT_IN_EH_RETURN:
6340 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6341 CALL_EXPR_ARG (exp, 1));
6342 return const0_rtx;
6343 #ifdef EH_RETURN_DATA_REGNO
6344 case BUILT_IN_EH_RETURN_DATA_REGNO:
6345 return expand_builtin_eh_return_data_regno (exp);
6346 #endif
6347 case BUILT_IN_EXTEND_POINTER:
6348 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6349 case BUILT_IN_EH_POINTER:
6350 return expand_builtin_eh_pointer (exp);
6351 case BUILT_IN_EH_FILTER:
6352 return expand_builtin_eh_filter (exp);
6353 case BUILT_IN_EH_COPY_VALUES:
6354 return expand_builtin_eh_copy_values (exp);
6355
6356 case BUILT_IN_VA_START:
6357 return expand_builtin_va_start (exp);
6358 case BUILT_IN_VA_END:
6359 return expand_builtin_va_end (exp);
6360 case BUILT_IN_VA_COPY:
6361 return expand_builtin_va_copy (exp);
6362 case BUILT_IN_EXPECT:
6363 return expand_builtin_expect (exp, target);
6364 case BUILT_IN_PREFETCH:
6365 expand_builtin_prefetch (exp);
6366 return const0_rtx;
6367
6368 case BUILT_IN_PROFILE_FUNC_ENTER:
6369 return expand_builtin_profile_func (false);
6370 case BUILT_IN_PROFILE_FUNC_EXIT:
6371 return expand_builtin_profile_func (true);
6372
6373 case BUILT_IN_INIT_TRAMPOLINE:
6374 return expand_builtin_init_trampoline (exp);
6375 case BUILT_IN_ADJUST_TRAMPOLINE:
6376 return expand_builtin_adjust_trampoline (exp);
6377
6378 case BUILT_IN_FORK:
6379 case BUILT_IN_EXECL:
6380 case BUILT_IN_EXECV:
6381 case BUILT_IN_EXECLP:
6382 case BUILT_IN_EXECLE:
6383 case BUILT_IN_EXECVP:
6384 case BUILT_IN_EXECVE:
6385 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6386 if (target)
6387 return target;
6388 break;
6389
6390 case BUILT_IN_FETCH_AND_ADD_1:
6391 case BUILT_IN_FETCH_AND_ADD_2:
6392 case BUILT_IN_FETCH_AND_ADD_4:
6393 case BUILT_IN_FETCH_AND_ADD_8:
6394 case BUILT_IN_FETCH_AND_ADD_16:
6395 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6396 target = expand_builtin_sync_operation (mode, exp, PLUS,
6397 false, target, ignore);
6398 if (target)
6399 return target;
6400 break;
6401
6402 case BUILT_IN_FETCH_AND_SUB_1:
6403 case BUILT_IN_FETCH_AND_SUB_2:
6404 case BUILT_IN_FETCH_AND_SUB_4:
6405 case BUILT_IN_FETCH_AND_SUB_8:
6406 case BUILT_IN_FETCH_AND_SUB_16:
6407 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6408 target = expand_builtin_sync_operation (mode, exp, MINUS,
6409 false, target, ignore);
6410 if (target)
6411 return target;
6412 break;
6413
6414 case BUILT_IN_FETCH_AND_OR_1:
6415 case BUILT_IN_FETCH_AND_OR_2:
6416 case BUILT_IN_FETCH_AND_OR_4:
6417 case BUILT_IN_FETCH_AND_OR_8:
6418 case BUILT_IN_FETCH_AND_OR_16:
6419 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6420 target = expand_builtin_sync_operation (mode, exp, IOR,
6421 false, target, ignore);
6422 if (target)
6423 return target;
6424 break;
6425
6426 case BUILT_IN_FETCH_AND_AND_1:
6427 case BUILT_IN_FETCH_AND_AND_2:
6428 case BUILT_IN_FETCH_AND_AND_4:
6429 case BUILT_IN_FETCH_AND_AND_8:
6430 case BUILT_IN_FETCH_AND_AND_16:
6431 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6432 target = expand_builtin_sync_operation (mode, exp, AND,
6433 false, target, ignore);
6434 if (target)
6435 return target;
6436 break;
6437
6438 case BUILT_IN_FETCH_AND_XOR_1:
6439 case BUILT_IN_FETCH_AND_XOR_2:
6440 case BUILT_IN_FETCH_AND_XOR_4:
6441 case BUILT_IN_FETCH_AND_XOR_8:
6442 case BUILT_IN_FETCH_AND_XOR_16:
6443 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6444 target = expand_builtin_sync_operation (mode, exp, XOR,
6445 false, target, ignore);
6446 if (target)
6447 return target;
6448 break;
6449
6450 case BUILT_IN_FETCH_AND_NAND_1:
6451 case BUILT_IN_FETCH_AND_NAND_2:
6452 case BUILT_IN_FETCH_AND_NAND_4:
6453 case BUILT_IN_FETCH_AND_NAND_8:
6454 case BUILT_IN_FETCH_AND_NAND_16:
6455 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6456 target = expand_builtin_sync_operation (mode, exp, NOT,
6457 false, target, ignore);
6458 if (target)
6459 return target;
6460 break;
6461
6462 case BUILT_IN_ADD_AND_FETCH_1:
6463 case BUILT_IN_ADD_AND_FETCH_2:
6464 case BUILT_IN_ADD_AND_FETCH_4:
6465 case BUILT_IN_ADD_AND_FETCH_8:
6466 case BUILT_IN_ADD_AND_FETCH_16:
6467 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6468 target = expand_builtin_sync_operation (mode, exp, PLUS,
6469 true, target, ignore);
6470 if (target)
6471 return target;
6472 break;
6473
6474 case BUILT_IN_SUB_AND_FETCH_1:
6475 case BUILT_IN_SUB_AND_FETCH_2:
6476 case BUILT_IN_SUB_AND_FETCH_4:
6477 case BUILT_IN_SUB_AND_FETCH_8:
6478 case BUILT_IN_SUB_AND_FETCH_16:
6479 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6480 target = expand_builtin_sync_operation (mode, exp, MINUS,
6481 true, target, ignore);
6482 if (target)
6483 return target;
6484 break;
6485
6486 case BUILT_IN_OR_AND_FETCH_1:
6487 case BUILT_IN_OR_AND_FETCH_2:
6488 case BUILT_IN_OR_AND_FETCH_4:
6489 case BUILT_IN_OR_AND_FETCH_8:
6490 case BUILT_IN_OR_AND_FETCH_16:
6491 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6492 target = expand_builtin_sync_operation (mode, exp, IOR,
6493 true, target, ignore);
6494 if (target)
6495 return target;
6496 break;
6497
6498 case BUILT_IN_AND_AND_FETCH_1:
6499 case BUILT_IN_AND_AND_FETCH_2:
6500 case BUILT_IN_AND_AND_FETCH_4:
6501 case BUILT_IN_AND_AND_FETCH_8:
6502 case BUILT_IN_AND_AND_FETCH_16:
6503 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6504 target = expand_builtin_sync_operation (mode, exp, AND,
6505 true, target, ignore);
6506 if (target)
6507 return target;
6508 break;
6509
6510 case BUILT_IN_XOR_AND_FETCH_1:
6511 case BUILT_IN_XOR_AND_FETCH_2:
6512 case BUILT_IN_XOR_AND_FETCH_4:
6513 case BUILT_IN_XOR_AND_FETCH_8:
6514 case BUILT_IN_XOR_AND_FETCH_16:
6515 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6516 target = expand_builtin_sync_operation (mode, exp, XOR,
6517 true, target, ignore);
6518 if (target)
6519 return target;
6520 break;
6521
6522 case BUILT_IN_NAND_AND_FETCH_1:
6523 case BUILT_IN_NAND_AND_FETCH_2:
6524 case BUILT_IN_NAND_AND_FETCH_4:
6525 case BUILT_IN_NAND_AND_FETCH_8:
6526 case BUILT_IN_NAND_AND_FETCH_16:
6527 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6528 target = expand_builtin_sync_operation (mode, exp, NOT,
6529 true, target, ignore);
6530 if (target)
6531 return target;
6532 break;
6533
6534 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6535 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6536 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6537 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6538 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6539 if (mode == VOIDmode)
6540 mode = TYPE_MODE (boolean_type_node);
6541 if (!target || !register_operand (target, mode))
6542 target = gen_reg_rtx (mode);
6543
6544 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6545 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6546 if (target)
6547 return target;
6548 break;
6549
6550 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6551 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6552 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6553 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6554 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6555 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6556 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6557 if (target)
6558 return target;
6559 break;
6560
6561 case BUILT_IN_LOCK_TEST_AND_SET_1:
6562 case BUILT_IN_LOCK_TEST_AND_SET_2:
6563 case BUILT_IN_LOCK_TEST_AND_SET_4:
6564 case BUILT_IN_LOCK_TEST_AND_SET_8:
6565 case BUILT_IN_LOCK_TEST_AND_SET_16:
6566 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6567 target = expand_builtin_lock_test_and_set (mode, exp, target);
6568 if (target)
6569 return target;
6570 break;
6571
6572 case BUILT_IN_LOCK_RELEASE_1:
6573 case BUILT_IN_LOCK_RELEASE_2:
6574 case BUILT_IN_LOCK_RELEASE_4:
6575 case BUILT_IN_LOCK_RELEASE_8:
6576 case BUILT_IN_LOCK_RELEASE_16:
6577 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6578 expand_builtin_lock_release (mode, exp);
6579 return const0_rtx;
6580
6581 case BUILT_IN_SYNCHRONIZE:
6582 expand_builtin_synchronize ();
6583 return const0_rtx;
6584
6585 case BUILT_IN_OBJECT_SIZE:
6586 return expand_builtin_object_size (exp);
6587
6588 case BUILT_IN_MEMCPY_CHK:
6589 case BUILT_IN_MEMPCPY_CHK:
6590 case BUILT_IN_MEMMOVE_CHK:
6591 case BUILT_IN_MEMSET_CHK:
6592 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6593 if (target)
6594 return target;
6595 break;
6596
6597 case BUILT_IN_STRCPY_CHK:
6598 case BUILT_IN_STPCPY_CHK:
6599 case BUILT_IN_STRNCPY_CHK:
6600 case BUILT_IN_STRCAT_CHK:
6601 case BUILT_IN_STRNCAT_CHK:
6602 case BUILT_IN_SNPRINTF_CHK:
6603 case BUILT_IN_VSNPRINTF_CHK:
6604 maybe_emit_chk_warning (exp, fcode);
6605 break;
6606
6607 case BUILT_IN_SPRINTF_CHK:
6608 case BUILT_IN_VSPRINTF_CHK:
6609 maybe_emit_sprintf_chk_warning (exp, fcode);
6610 break;
6611
6612 case BUILT_IN_FREE:
6613 maybe_emit_free_warning (exp);
6614 break;
6615
6616 default: /* just do library call, if unknown builtin */
6617 break;
6618 }
6619
6620 /* The switch statement above can drop through to cause the function
6621 to be called normally. */
6622 return expand_call (exp, target, ignore);
6623 }
6624
6625 /* Determine whether a tree node represents a call to a built-in
6626 function. If the tree T is a call to a built-in function with
6627 the right number of arguments of the appropriate types, return
6628 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6629 Otherwise the return value is END_BUILTINS. */
6630
6631 enum built_in_function
6632 builtin_mathfn_code (const_tree t)
6633 {
6634 const_tree fndecl, arg, parmlist;
6635 const_tree argtype, parmtype;
6636 const_call_expr_arg_iterator iter;
6637
6638 if (TREE_CODE (t) != CALL_EXPR
6639 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6640 return END_BUILTINS;
6641
6642 fndecl = get_callee_fndecl (t);
6643 if (fndecl == NULL_TREE
6644 || TREE_CODE (fndecl) != FUNCTION_DECL
6645 || ! DECL_BUILT_IN (fndecl)
6646 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6647 return END_BUILTINS;
6648
6649 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6650 init_const_call_expr_arg_iterator (t, &iter);
6651 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6652 {
6653 /* If a function doesn't take a variable number of arguments,
6654 the last element in the list will have type `void'. */
6655 parmtype = TREE_VALUE (parmlist);
6656 if (VOID_TYPE_P (parmtype))
6657 {
6658 if (more_const_call_expr_args_p (&iter))
6659 return END_BUILTINS;
6660 return DECL_FUNCTION_CODE (fndecl);
6661 }
6662
6663 if (! more_const_call_expr_args_p (&iter))
6664 return END_BUILTINS;
6665
6666 arg = next_const_call_expr_arg (&iter);
6667 argtype = TREE_TYPE (arg);
6668
6669 if (SCALAR_FLOAT_TYPE_P (parmtype))
6670 {
6671 if (! SCALAR_FLOAT_TYPE_P (argtype))
6672 return END_BUILTINS;
6673 }
6674 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6675 {
6676 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6677 return END_BUILTINS;
6678 }
6679 else if (POINTER_TYPE_P (parmtype))
6680 {
6681 if (! POINTER_TYPE_P (argtype))
6682 return END_BUILTINS;
6683 }
6684 else if (INTEGRAL_TYPE_P (parmtype))
6685 {
6686 if (! INTEGRAL_TYPE_P (argtype))
6687 return END_BUILTINS;
6688 }
6689 else
6690 return END_BUILTINS;
6691 }
6692
6693 /* Variable-length argument list. */
6694 return DECL_FUNCTION_CODE (fndecl);
6695 }
6696
6697 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6698 evaluate to a constant. */
6699
6700 static tree
6701 fold_builtin_constant_p (tree arg)
6702 {
6703 /* We return 1 for a numeric type that's known to be a constant
6704 value at compile-time or for an aggregate type that's a
6705 literal constant. */
6706 STRIP_NOPS (arg);
6707
6708 /* If we know this is a constant, emit the constant of one. */
6709 if (CONSTANT_CLASS_P (arg)
6710 || (TREE_CODE (arg) == CONSTRUCTOR
6711 && TREE_CONSTANT (arg)))
6712 return integer_one_node;
6713 if (TREE_CODE (arg) == ADDR_EXPR)
6714 {
6715 tree op = TREE_OPERAND (arg, 0);
6716 if (TREE_CODE (op) == STRING_CST
6717 || (TREE_CODE (op) == ARRAY_REF
6718 && integer_zerop (TREE_OPERAND (op, 1))
6719 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6720 return integer_one_node;
6721 }
6722
6723 /* If this expression has side effects, show we don't know it to be a
6724 constant. Likewise if it's a pointer or aggregate type since in
6725 those case we only want literals, since those are only optimized
6726 when generating RTL, not later.
6727 And finally, if we are compiling an initializer, not code, we
6728 need to return a definite result now; there's not going to be any
6729 more optimization done. */
6730 if (TREE_SIDE_EFFECTS (arg)
6731 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6732 || POINTER_TYPE_P (TREE_TYPE (arg))
6733 || cfun == 0
6734 || folding_initializer)
6735 return integer_zero_node;
6736
6737 return NULL_TREE;
6738 }
6739
6740 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6741 return it as a truthvalue. */
6742
6743 static tree
6744 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6745 {
6746 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6747
6748 fn = built_in_decls[BUILT_IN_EXPECT];
6749 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6750 ret_type = TREE_TYPE (TREE_TYPE (fn));
6751 pred_type = TREE_VALUE (arg_types);
6752 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6753
6754 pred = fold_convert_loc (loc, pred_type, pred);
6755 expected = fold_convert_loc (loc, expected_type, expected);
6756 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6757
6758 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6759 build_int_cst (ret_type, 0));
6760 }
6761
6762 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6763 NULL_TREE if no simplification is possible. */
6764
6765 static tree
6766 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6767 {
6768 tree inner, fndecl;
6769 enum tree_code code;
6770
6771 /* If this is a builtin_expect within a builtin_expect keep the
6772 inner one. See through a comparison against a constant. It
6773 might have been added to create a thruthvalue. */
6774 inner = arg0;
6775 if (COMPARISON_CLASS_P (inner)
6776 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6777 inner = TREE_OPERAND (inner, 0);
6778
6779 if (TREE_CODE (inner) == CALL_EXPR
6780 && (fndecl = get_callee_fndecl (inner))
6781 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6782 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6783 return arg0;
6784
6785 /* Distribute the expected value over short-circuiting operators.
6786 See through the cast from truthvalue_type_node to long. */
6787 inner = arg0;
6788 while (TREE_CODE (inner) == NOP_EXPR
6789 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6790 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6791 inner = TREE_OPERAND (inner, 0);
6792
6793 code = TREE_CODE (inner);
6794 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6795 {
6796 tree op0 = TREE_OPERAND (inner, 0);
6797 tree op1 = TREE_OPERAND (inner, 1);
6798
6799 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6800 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6801 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6802
6803 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6804 }
6805
6806 /* If the argument isn't invariant then there's nothing else we can do. */
6807 if (!TREE_CONSTANT (arg0))
6808 return NULL_TREE;
6809
6810 /* If we expect that a comparison against the argument will fold to
6811 a constant return the constant. In practice, this means a true
6812 constant or the address of a non-weak symbol. */
6813 inner = arg0;
6814 STRIP_NOPS (inner);
6815 if (TREE_CODE (inner) == ADDR_EXPR)
6816 {
6817 do
6818 {
6819 inner = TREE_OPERAND (inner, 0);
6820 }
6821 while (TREE_CODE (inner) == COMPONENT_REF
6822 || TREE_CODE (inner) == ARRAY_REF);
6823 if ((TREE_CODE (inner) == VAR_DECL
6824 || TREE_CODE (inner) == FUNCTION_DECL)
6825 && DECL_WEAK (inner))
6826 return NULL_TREE;
6827 }
6828
6829 /* Otherwise, ARG0 already has the proper type for the return value. */
6830 return arg0;
6831 }
6832
6833 /* Fold a call to __builtin_classify_type with argument ARG. */
6834
6835 static tree
6836 fold_builtin_classify_type (tree arg)
6837 {
6838 if (arg == 0)
6839 return build_int_cst (NULL_TREE, no_type_class);
6840
6841 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6842 }
6843
6844 /* Fold a call to __builtin_strlen with argument ARG. */
6845
6846 static tree
6847 fold_builtin_strlen (location_t loc, tree type, tree arg)
6848 {
6849 if (!validate_arg (arg, POINTER_TYPE))
6850 return NULL_TREE;
6851 else
6852 {
6853 tree len = c_strlen (arg, 0);
6854
6855 if (len)
6856 return fold_convert_loc (loc, type, len);
6857
6858 return NULL_TREE;
6859 }
6860 }
6861
6862 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6863
6864 static tree
6865 fold_builtin_inf (location_t loc, tree type, int warn)
6866 {
6867 REAL_VALUE_TYPE real;
6868
6869 /* __builtin_inff is intended to be usable to define INFINITY on all
6870 targets. If an infinity is not available, INFINITY expands "to a
6871 positive constant of type float that overflows at translation
6872 time", footnote "In this case, using INFINITY will violate the
6873 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6874 Thus we pedwarn to ensure this constraint violation is
6875 diagnosed. */
6876 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6877 pedwarn (loc, 0, "target format does not support infinity");
6878
6879 real_inf (&real);
6880 return build_real (type, real);
6881 }
6882
6883 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6884
6885 static tree
6886 fold_builtin_nan (tree arg, tree type, int quiet)
6887 {
6888 REAL_VALUE_TYPE real;
6889 const char *str;
6890
6891 if (!validate_arg (arg, POINTER_TYPE))
6892 return NULL_TREE;
6893 str = c_getstr (arg);
6894 if (!str)
6895 return NULL_TREE;
6896
6897 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6898 return NULL_TREE;
6899
6900 return build_real (type, real);
6901 }
6902
6903 /* Return true if the floating point expression T has an integer value.
6904 We also allow +Inf, -Inf and NaN to be considered integer values. */
6905
6906 static bool
6907 integer_valued_real_p (tree t)
6908 {
6909 switch (TREE_CODE (t))
6910 {
6911 case FLOAT_EXPR:
6912 return true;
6913
6914 case ABS_EXPR:
6915 case SAVE_EXPR:
6916 return integer_valued_real_p (TREE_OPERAND (t, 0));
6917
6918 case COMPOUND_EXPR:
6919 case MODIFY_EXPR:
6920 case BIND_EXPR:
6921 return integer_valued_real_p (TREE_OPERAND (t, 1));
6922
6923 case PLUS_EXPR:
6924 case MINUS_EXPR:
6925 case MULT_EXPR:
6926 case MIN_EXPR:
6927 case MAX_EXPR:
6928 return integer_valued_real_p (TREE_OPERAND (t, 0))
6929 && integer_valued_real_p (TREE_OPERAND (t, 1));
6930
6931 case COND_EXPR:
6932 return integer_valued_real_p (TREE_OPERAND (t, 1))
6933 && integer_valued_real_p (TREE_OPERAND (t, 2));
6934
6935 case REAL_CST:
6936 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6937
6938 case NOP_EXPR:
6939 {
6940 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6941 if (TREE_CODE (type) == INTEGER_TYPE)
6942 return true;
6943 if (TREE_CODE (type) == REAL_TYPE)
6944 return integer_valued_real_p (TREE_OPERAND (t, 0));
6945 break;
6946 }
6947
6948 case CALL_EXPR:
6949 switch (builtin_mathfn_code (t))
6950 {
6951 CASE_FLT_FN (BUILT_IN_CEIL):
6952 CASE_FLT_FN (BUILT_IN_FLOOR):
6953 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6954 CASE_FLT_FN (BUILT_IN_RINT):
6955 CASE_FLT_FN (BUILT_IN_ROUND):
6956 CASE_FLT_FN (BUILT_IN_TRUNC):
6957 return true;
6958
6959 CASE_FLT_FN (BUILT_IN_FMIN):
6960 CASE_FLT_FN (BUILT_IN_FMAX):
6961 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6962 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6963
6964 default:
6965 break;
6966 }
6967 break;
6968
6969 default:
6970 break;
6971 }
6972 return false;
6973 }
6974
6975 /* FNDECL is assumed to be a builtin where truncation can be propagated
6976 across (for instance floor((double)f) == (double)floorf (f).
6977 Do the transformation for a call with argument ARG. */
6978
6979 static tree
6980 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6981 {
6982 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6983
6984 if (!validate_arg (arg, REAL_TYPE))
6985 return NULL_TREE;
6986
6987 /* Integer rounding functions are idempotent. */
6988 if (fcode == builtin_mathfn_code (arg))
6989 return arg;
6990
6991 /* If argument is already integer valued, and we don't need to worry
6992 about setting errno, there's no need to perform rounding. */
6993 if (! flag_errno_math && integer_valued_real_p (arg))
6994 return arg;
6995
6996 if (optimize)
6997 {
6998 tree arg0 = strip_float_extensions (arg);
6999 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7000 tree newtype = TREE_TYPE (arg0);
7001 tree decl;
7002
7003 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7004 && (decl = mathfn_built_in (newtype, fcode)))
7005 return fold_convert_loc (loc, ftype,
7006 build_call_expr_loc (loc, decl, 1,
7007 fold_convert_loc (loc,
7008 newtype,
7009 arg0)));
7010 }
7011 return NULL_TREE;
7012 }
7013
7014 /* FNDECL is assumed to be builtin which can narrow the FP type of
7015 the argument, for instance lround((double)f) -> lroundf (f).
7016 Do the transformation for a call with argument ARG. */
7017
7018 static tree
7019 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7020 {
7021 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7022
7023 if (!validate_arg (arg, REAL_TYPE))
7024 return NULL_TREE;
7025
7026 /* If argument is already integer valued, and we don't need to worry
7027 about setting errno, there's no need to perform rounding. */
7028 if (! flag_errno_math && integer_valued_real_p (arg))
7029 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7030 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7031
7032 if (optimize)
7033 {
7034 tree ftype = TREE_TYPE (arg);
7035 tree arg0 = strip_float_extensions (arg);
7036 tree newtype = TREE_TYPE (arg0);
7037 tree decl;
7038
7039 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7040 && (decl = mathfn_built_in (newtype, fcode)))
7041 return build_call_expr_loc (loc, decl, 1,
7042 fold_convert_loc (loc, newtype, arg0));
7043 }
7044
7045 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7046 sizeof (long long) == sizeof (long). */
7047 if (TYPE_PRECISION (long_long_integer_type_node)
7048 == TYPE_PRECISION (long_integer_type_node))
7049 {
7050 tree newfn = NULL_TREE;
7051 switch (fcode)
7052 {
7053 CASE_FLT_FN (BUILT_IN_LLCEIL):
7054 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7055 break;
7056
7057 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7058 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7059 break;
7060
7061 CASE_FLT_FN (BUILT_IN_LLROUND):
7062 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7063 break;
7064
7065 CASE_FLT_FN (BUILT_IN_LLRINT):
7066 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7067 break;
7068
7069 default:
7070 break;
7071 }
7072
7073 if (newfn)
7074 {
7075 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7076 return fold_convert_loc (loc,
7077 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7078 }
7079 }
7080
7081 return NULL_TREE;
7082 }
7083
7084 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7085 return type. Return NULL_TREE if no simplification can be made. */
7086
7087 static tree
7088 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7089 {
7090 tree res;
7091
7092 if (!validate_arg (arg, COMPLEX_TYPE)
7093 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7094 return NULL_TREE;
7095
7096 /* Calculate the result when the argument is a constant. */
7097 if (TREE_CODE (arg) == COMPLEX_CST
7098 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7099 type, mpfr_hypot)))
7100 return res;
7101
7102 if (TREE_CODE (arg) == COMPLEX_EXPR)
7103 {
7104 tree real = TREE_OPERAND (arg, 0);
7105 tree imag = TREE_OPERAND (arg, 1);
7106
7107 /* If either part is zero, cabs is fabs of the other. */
7108 if (real_zerop (real))
7109 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7110 if (real_zerop (imag))
7111 return fold_build1_loc (loc, ABS_EXPR, type, real);
7112
7113 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7114 if (flag_unsafe_math_optimizations
7115 && operand_equal_p (real, imag, OEP_PURE_SAME))
7116 {
7117 const REAL_VALUE_TYPE sqrt2_trunc
7118 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7119 STRIP_NOPS (real);
7120 return fold_build2_loc (loc, MULT_EXPR, type,
7121 fold_build1_loc (loc, ABS_EXPR, type, real),
7122 build_real (type, sqrt2_trunc));
7123 }
7124 }
7125
7126 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7127 if (TREE_CODE (arg) == NEGATE_EXPR
7128 || TREE_CODE (arg) == CONJ_EXPR)
7129 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7130
7131 /* Don't do this when optimizing for size. */
7132 if (flag_unsafe_math_optimizations
7133 && optimize && optimize_function_for_speed_p (cfun))
7134 {
7135 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7136
7137 if (sqrtfn != NULL_TREE)
7138 {
7139 tree rpart, ipart, result;
7140
7141 arg = builtin_save_expr (arg);
7142
7143 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7144 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7145
7146 rpart = builtin_save_expr (rpart);
7147 ipart = builtin_save_expr (ipart);
7148
7149 result = fold_build2_loc (loc, PLUS_EXPR, type,
7150 fold_build2_loc (loc, MULT_EXPR, type,
7151 rpart, rpart),
7152 fold_build2_loc (loc, MULT_EXPR, type,
7153 ipart, ipart));
7154
7155 return build_call_expr_loc (loc, sqrtfn, 1, result);
7156 }
7157 }
7158
7159 return NULL_TREE;
7160 }
7161
7162 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7163 complex tree type of the result. If NEG is true, the imaginary
7164 zero is negative. */
7165
7166 static tree
7167 build_complex_cproj (tree type, bool neg)
7168 {
7169 REAL_VALUE_TYPE rinf, rzero = dconst0;
7170
7171 real_inf (&rinf);
7172 rzero.sign = neg;
7173 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7174 build_real (TREE_TYPE (type), rzero));
7175 }
7176
7177 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7178 return type. Return NULL_TREE if no simplification can be made. */
7179
7180 static tree
7181 fold_builtin_cproj (location_t loc, tree arg, tree type)
7182 {
7183 if (!validate_arg (arg, COMPLEX_TYPE)
7184 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7185 return NULL_TREE;
7186
7187 /* If there are no infinities, return arg. */
7188 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7189 return non_lvalue_loc (loc, arg);
7190
7191 /* Calculate the result when the argument is a constant. */
7192 if (TREE_CODE (arg) == COMPLEX_CST)
7193 {
7194 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7195 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7196
7197 if (real_isinf (real) || real_isinf (imag))
7198 return build_complex_cproj (type, imag->sign);
7199 else
7200 return arg;
7201 }
7202 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7203 {
7204 tree real = TREE_OPERAND (arg, 0);
7205 tree imag = TREE_OPERAND (arg, 1);
7206
7207 STRIP_NOPS (real);
7208 STRIP_NOPS (imag);
7209
7210 /* If the real part is inf and the imag part is known to be
7211 nonnegative, return (inf + 0i). Remember side-effects are
7212 possible in the imag part. */
7213 if (TREE_CODE (real) == REAL_CST
7214 && real_isinf (TREE_REAL_CST_PTR (real))
7215 && tree_expr_nonnegative_p (imag))
7216 return omit_one_operand_loc (loc, type,
7217 build_complex_cproj (type, false),
7218 arg);
7219
7220 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7221 Remember side-effects are possible in the real part. */
7222 if (TREE_CODE (imag) == REAL_CST
7223 && real_isinf (TREE_REAL_CST_PTR (imag)))
7224 return
7225 omit_one_operand_loc (loc, type,
7226 build_complex_cproj (type, TREE_REAL_CST_PTR
7227 (imag)->sign), arg);
7228 }
7229
7230 return NULL_TREE;
7231 }
7232
7233 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7234 Return NULL_TREE if no simplification can be made. */
7235
7236 static tree
7237 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7238 {
7239
7240 enum built_in_function fcode;
7241 tree res;
7242
7243 if (!validate_arg (arg, REAL_TYPE))
7244 return NULL_TREE;
7245
7246 /* Calculate the result when the argument is a constant. */
7247 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7248 return res;
7249
7250 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7251 fcode = builtin_mathfn_code (arg);
7252 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7253 {
7254 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7255 arg = fold_build2_loc (loc, MULT_EXPR, type,
7256 CALL_EXPR_ARG (arg, 0),
7257 build_real (type, dconsthalf));
7258 return build_call_expr_loc (loc, expfn, 1, arg);
7259 }
7260
7261 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7262 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7263 {
7264 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7265
7266 if (powfn)
7267 {
7268 tree arg0 = CALL_EXPR_ARG (arg, 0);
7269 tree tree_root;
7270 /* The inner root was either sqrt or cbrt. */
7271 /* This was a conditional expression but it triggered a bug
7272 in Sun C 5.5. */
7273 REAL_VALUE_TYPE dconstroot;
7274 if (BUILTIN_SQRT_P (fcode))
7275 dconstroot = dconsthalf;
7276 else
7277 dconstroot = dconst_third ();
7278
7279 /* Adjust for the outer root. */
7280 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7281 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7282 tree_root = build_real (type, dconstroot);
7283 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7284 }
7285 }
7286
7287 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7288 if (flag_unsafe_math_optimizations
7289 && (fcode == BUILT_IN_POW
7290 || fcode == BUILT_IN_POWF
7291 || fcode == BUILT_IN_POWL))
7292 {
7293 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7294 tree arg0 = CALL_EXPR_ARG (arg, 0);
7295 tree arg1 = CALL_EXPR_ARG (arg, 1);
7296 tree narg1;
7297 if (!tree_expr_nonnegative_p (arg0))
7298 arg0 = build1 (ABS_EXPR, type, arg0);
7299 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7300 build_real (type, dconsthalf));
7301 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7302 }
7303
7304 return NULL_TREE;
7305 }
7306
7307 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7308 Return NULL_TREE if no simplification can be made. */
7309
7310 static tree
7311 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7312 {
7313 const enum built_in_function fcode = builtin_mathfn_code (arg);
7314 tree res;
7315
7316 if (!validate_arg (arg, REAL_TYPE))
7317 return NULL_TREE;
7318
7319 /* Calculate the result when the argument is a constant. */
7320 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7321 return res;
7322
7323 if (flag_unsafe_math_optimizations)
7324 {
7325 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7326 if (BUILTIN_EXPONENT_P (fcode))
7327 {
7328 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7329 const REAL_VALUE_TYPE third_trunc =
7330 real_value_truncate (TYPE_MODE (type), dconst_third ());
7331 arg = fold_build2_loc (loc, MULT_EXPR, type,
7332 CALL_EXPR_ARG (arg, 0),
7333 build_real (type, third_trunc));
7334 return build_call_expr_loc (loc, expfn, 1, arg);
7335 }
7336
7337 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7338 if (BUILTIN_SQRT_P (fcode))
7339 {
7340 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7341
7342 if (powfn)
7343 {
7344 tree arg0 = CALL_EXPR_ARG (arg, 0);
7345 tree tree_root;
7346 REAL_VALUE_TYPE dconstroot = dconst_third ();
7347
7348 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7349 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7350 tree_root = build_real (type, dconstroot);
7351 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7352 }
7353 }
7354
7355 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7356 if (BUILTIN_CBRT_P (fcode))
7357 {
7358 tree arg0 = CALL_EXPR_ARG (arg, 0);
7359 if (tree_expr_nonnegative_p (arg0))
7360 {
7361 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7362
7363 if (powfn)
7364 {
7365 tree tree_root;
7366 REAL_VALUE_TYPE dconstroot;
7367
7368 real_arithmetic (&dconstroot, MULT_EXPR,
7369 dconst_third_ptr (), dconst_third_ptr ());
7370 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7371 tree_root = build_real (type, dconstroot);
7372 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7373 }
7374 }
7375 }
7376
7377 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7378 if (fcode == BUILT_IN_POW
7379 || fcode == BUILT_IN_POWF
7380 || fcode == BUILT_IN_POWL)
7381 {
7382 tree arg00 = CALL_EXPR_ARG (arg, 0);
7383 tree arg01 = CALL_EXPR_ARG (arg, 1);
7384 if (tree_expr_nonnegative_p (arg00))
7385 {
7386 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7387 const REAL_VALUE_TYPE dconstroot
7388 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7389 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7390 build_real (type, dconstroot));
7391 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7392 }
7393 }
7394 }
7395 return NULL_TREE;
7396 }
7397
7398 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7399 TYPE is the type of the return value. Return NULL_TREE if no
7400 simplification can be made. */
7401
7402 static tree
7403 fold_builtin_cos (location_t loc,
7404 tree arg, tree type, tree fndecl)
7405 {
7406 tree res, narg;
7407
7408 if (!validate_arg (arg, REAL_TYPE))
7409 return NULL_TREE;
7410
7411 /* Calculate the result when the argument is a constant. */
7412 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7413 return res;
7414
7415 /* Optimize cos(-x) into cos (x). */
7416 if ((narg = fold_strip_sign_ops (arg)))
7417 return build_call_expr_loc (loc, fndecl, 1, narg);
7418
7419 return NULL_TREE;
7420 }
7421
7422 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7423 Return NULL_TREE if no simplification can be made. */
7424
7425 static tree
7426 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7427 {
7428 if (validate_arg (arg, REAL_TYPE))
7429 {
7430 tree res, narg;
7431
7432 /* Calculate the result when the argument is a constant. */
7433 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7434 return res;
7435
7436 /* Optimize cosh(-x) into cosh (x). */
7437 if ((narg = fold_strip_sign_ops (arg)))
7438 return build_call_expr_loc (loc, fndecl, 1, narg);
7439 }
7440
7441 return NULL_TREE;
7442 }
7443
7444 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7445 argument ARG. TYPE is the type of the return value. Return
7446 NULL_TREE if no simplification can be made. */
7447
7448 static tree
7449 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7450 bool hyper)
7451 {
7452 if (validate_arg (arg, COMPLEX_TYPE)
7453 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7454 {
7455 tree tmp;
7456
7457 /* Calculate the result when the argument is a constant. */
7458 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7459 return tmp;
7460
7461 /* Optimize fn(-x) into fn(x). */
7462 if ((tmp = fold_strip_sign_ops (arg)))
7463 return build_call_expr_loc (loc, fndecl, 1, tmp);
7464 }
7465
7466 return NULL_TREE;
7467 }
7468
7469 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7470 Return NULL_TREE if no simplification can be made. */
7471
7472 static tree
7473 fold_builtin_tan (tree arg, tree type)
7474 {
7475 enum built_in_function fcode;
7476 tree res;
7477
7478 if (!validate_arg (arg, REAL_TYPE))
7479 return NULL_TREE;
7480
7481 /* Calculate the result when the argument is a constant. */
7482 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7483 return res;
7484
7485 /* Optimize tan(atan(x)) = x. */
7486 fcode = builtin_mathfn_code (arg);
7487 if (flag_unsafe_math_optimizations
7488 && (fcode == BUILT_IN_ATAN
7489 || fcode == BUILT_IN_ATANF
7490 || fcode == BUILT_IN_ATANL))
7491 return CALL_EXPR_ARG (arg, 0);
7492
7493 return NULL_TREE;
7494 }
7495
7496 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7497 NULL_TREE if no simplification can be made. */
7498
7499 static tree
7500 fold_builtin_sincos (location_t loc,
7501 tree arg0, tree arg1, tree arg2)
7502 {
7503 tree type;
7504 tree res, fn, call;
7505
7506 if (!validate_arg (arg0, REAL_TYPE)
7507 || !validate_arg (arg1, POINTER_TYPE)
7508 || !validate_arg (arg2, POINTER_TYPE))
7509 return NULL_TREE;
7510
7511 type = TREE_TYPE (arg0);
7512
7513 /* Calculate the result when the argument is a constant. */
7514 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7515 return res;
7516
7517 /* Canonicalize sincos to cexpi. */
7518 if (!TARGET_C99_FUNCTIONS)
7519 return NULL_TREE;
7520 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7521 if (!fn)
7522 return NULL_TREE;
7523
7524 call = build_call_expr_loc (loc, fn, 1, arg0);
7525 call = builtin_save_expr (call);
7526
7527 return build2 (COMPOUND_EXPR, void_type_node,
7528 build2 (MODIFY_EXPR, void_type_node,
7529 build_fold_indirect_ref_loc (loc, arg1),
7530 build1 (IMAGPART_EXPR, type, call)),
7531 build2 (MODIFY_EXPR, void_type_node,
7532 build_fold_indirect_ref_loc (loc, arg2),
7533 build1 (REALPART_EXPR, type, call)));
7534 }
7535
7536 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7537 NULL_TREE if no simplification can be made. */
7538
7539 static tree
7540 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7541 {
7542 tree rtype;
7543 tree realp, imagp, ifn;
7544 tree res;
7545
7546 if (!validate_arg (arg0, COMPLEX_TYPE)
7547 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7548 return NULL_TREE;
7549
7550 /* Calculate the result when the argument is a constant. */
7551 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7552 return res;
7553
7554 rtype = TREE_TYPE (TREE_TYPE (arg0));
7555
7556 /* In case we can figure out the real part of arg0 and it is constant zero
7557 fold to cexpi. */
7558 if (!TARGET_C99_FUNCTIONS)
7559 return NULL_TREE;
7560 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7561 if (!ifn)
7562 return NULL_TREE;
7563
7564 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7565 && real_zerop (realp))
7566 {
7567 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7568 return build_call_expr_loc (loc, ifn, 1, narg);
7569 }
7570
7571 /* In case we can easily decompose real and imaginary parts split cexp
7572 to exp (r) * cexpi (i). */
7573 if (flag_unsafe_math_optimizations
7574 && realp)
7575 {
7576 tree rfn, rcall, icall;
7577
7578 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7579 if (!rfn)
7580 return NULL_TREE;
7581
7582 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7583 if (!imagp)
7584 return NULL_TREE;
7585
7586 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7587 icall = builtin_save_expr (icall);
7588 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7589 rcall = builtin_save_expr (rcall);
7590 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7591 fold_build2_loc (loc, MULT_EXPR, rtype,
7592 rcall,
7593 fold_build1_loc (loc, REALPART_EXPR,
7594 rtype, icall)),
7595 fold_build2_loc (loc, MULT_EXPR, rtype,
7596 rcall,
7597 fold_build1_loc (loc, IMAGPART_EXPR,
7598 rtype, icall)));
7599 }
7600
7601 return NULL_TREE;
7602 }
7603
7604 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7605 Return NULL_TREE if no simplification can be made. */
7606
7607 static tree
7608 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7609 {
7610 if (!validate_arg (arg, REAL_TYPE))
7611 return NULL_TREE;
7612
7613 /* Optimize trunc of constant value. */
7614 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7615 {
7616 REAL_VALUE_TYPE r, x;
7617 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7618
7619 x = TREE_REAL_CST (arg);
7620 real_trunc (&r, TYPE_MODE (type), &x);
7621 return build_real (type, r);
7622 }
7623
7624 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7625 }
7626
7627 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7628 Return NULL_TREE if no simplification can be made. */
7629
7630 static tree
7631 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7632 {
7633 if (!validate_arg (arg, REAL_TYPE))
7634 return NULL_TREE;
7635
7636 /* Optimize floor of constant value. */
7637 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7638 {
7639 REAL_VALUE_TYPE x;
7640
7641 x = TREE_REAL_CST (arg);
7642 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7643 {
7644 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7645 REAL_VALUE_TYPE r;
7646
7647 real_floor (&r, TYPE_MODE (type), &x);
7648 return build_real (type, r);
7649 }
7650 }
7651
7652 /* Fold floor (x) where x is nonnegative to trunc (x). */
7653 if (tree_expr_nonnegative_p (arg))
7654 {
7655 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7656 if (truncfn)
7657 return build_call_expr_loc (loc, truncfn, 1, arg);
7658 }
7659
7660 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7661 }
7662
7663 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7664 Return NULL_TREE if no simplification can be made. */
7665
7666 static tree
7667 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7668 {
7669 if (!validate_arg (arg, REAL_TYPE))
7670 return NULL_TREE;
7671
7672 /* Optimize ceil of constant value. */
7673 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7674 {
7675 REAL_VALUE_TYPE x;
7676
7677 x = TREE_REAL_CST (arg);
7678 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7679 {
7680 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7681 REAL_VALUE_TYPE r;
7682
7683 real_ceil (&r, TYPE_MODE (type), &x);
7684 return build_real (type, r);
7685 }
7686 }
7687
7688 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7689 }
7690
7691 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7692 Return NULL_TREE if no simplification can be made. */
7693
7694 static tree
7695 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7696 {
7697 if (!validate_arg (arg, REAL_TYPE))
7698 return NULL_TREE;
7699
7700 /* Optimize round of constant value. */
7701 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7702 {
7703 REAL_VALUE_TYPE x;
7704
7705 x = TREE_REAL_CST (arg);
7706 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7707 {
7708 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7709 REAL_VALUE_TYPE r;
7710
7711 real_round (&r, TYPE_MODE (type), &x);
7712 return build_real (type, r);
7713 }
7714 }
7715
7716 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7717 }
7718
7719 /* Fold function call to builtin lround, lroundf or lroundl (or the
7720 corresponding long long versions) and other rounding functions. ARG
7721 is the argument to the call. Return NULL_TREE if no simplification
7722 can be made. */
7723
7724 static tree
7725 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7726 {
7727 if (!validate_arg (arg, REAL_TYPE))
7728 return NULL_TREE;
7729
7730 /* Optimize lround of constant value. */
7731 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7732 {
7733 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7734
7735 if (real_isfinite (&x))
7736 {
7737 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7738 tree ftype = TREE_TYPE (arg);
7739 double_int val;
7740 REAL_VALUE_TYPE r;
7741
7742 switch (DECL_FUNCTION_CODE (fndecl))
7743 {
7744 CASE_FLT_FN (BUILT_IN_LFLOOR):
7745 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7746 real_floor (&r, TYPE_MODE (ftype), &x);
7747 break;
7748
7749 CASE_FLT_FN (BUILT_IN_LCEIL):
7750 CASE_FLT_FN (BUILT_IN_LLCEIL):
7751 real_ceil (&r, TYPE_MODE (ftype), &x);
7752 break;
7753
7754 CASE_FLT_FN (BUILT_IN_LROUND):
7755 CASE_FLT_FN (BUILT_IN_LLROUND):
7756 real_round (&r, TYPE_MODE (ftype), &x);
7757 break;
7758
7759 default:
7760 gcc_unreachable ();
7761 }
7762
7763 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
7764 if (double_int_fits_to_tree_p (itype, val))
7765 return double_int_to_tree (itype, val);
7766 }
7767 }
7768
7769 switch (DECL_FUNCTION_CODE (fndecl))
7770 {
7771 CASE_FLT_FN (BUILT_IN_LFLOOR):
7772 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7773 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7774 if (tree_expr_nonnegative_p (arg))
7775 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7776 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7777 break;
7778 default:;
7779 }
7780
7781 return fold_fixed_mathfn (loc, fndecl, arg);
7782 }
7783
7784 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7785 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7786 the argument to the call. Return NULL_TREE if no simplification can
7787 be made. */
7788
7789 static tree
7790 fold_builtin_bitop (tree fndecl, tree arg)
7791 {
7792 if (!validate_arg (arg, INTEGER_TYPE))
7793 return NULL_TREE;
7794
7795 /* Optimize for constant argument. */
7796 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7797 {
7798 HOST_WIDE_INT hi, width, result;
7799 unsigned HOST_WIDE_INT lo;
7800 tree type;
7801
7802 type = TREE_TYPE (arg);
7803 width = TYPE_PRECISION (type);
7804 lo = TREE_INT_CST_LOW (arg);
7805
7806 /* Clear all the bits that are beyond the type's precision. */
7807 if (width > HOST_BITS_PER_WIDE_INT)
7808 {
7809 hi = TREE_INT_CST_HIGH (arg);
7810 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7811 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7812 }
7813 else
7814 {
7815 hi = 0;
7816 if (width < HOST_BITS_PER_WIDE_INT)
7817 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7818 }
7819
7820 switch (DECL_FUNCTION_CODE (fndecl))
7821 {
7822 CASE_INT_FN (BUILT_IN_FFS):
7823 if (lo != 0)
7824 result = ffs_hwi (lo);
7825 else if (hi != 0)
7826 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
7827 else
7828 result = 0;
7829 break;
7830
7831 CASE_INT_FN (BUILT_IN_CLZ):
7832 if (hi != 0)
7833 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7834 else if (lo != 0)
7835 result = width - floor_log2 (lo) - 1;
7836 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7837 result = width;
7838 break;
7839
7840 CASE_INT_FN (BUILT_IN_CTZ):
7841 if (lo != 0)
7842 result = ctz_hwi (lo);
7843 else if (hi != 0)
7844 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
7845 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7846 result = width;
7847 break;
7848
7849 CASE_INT_FN (BUILT_IN_POPCOUNT):
7850 result = 0;
7851 while (lo)
7852 result++, lo &= lo - 1;
7853 while (hi)
7854 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7855 break;
7856
7857 CASE_INT_FN (BUILT_IN_PARITY):
7858 result = 0;
7859 while (lo)
7860 result++, lo &= lo - 1;
7861 while (hi)
7862 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7863 result &= 1;
7864 break;
7865
7866 default:
7867 gcc_unreachable ();
7868 }
7869
7870 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7871 }
7872
7873 return NULL_TREE;
7874 }
7875
7876 /* Fold function call to builtin_bswap and the long and long long
7877 variants. Return NULL_TREE if no simplification can be made. */
7878 static tree
7879 fold_builtin_bswap (tree fndecl, tree arg)
7880 {
7881 if (! validate_arg (arg, INTEGER_TYPE))
7882 return NULL_TREE;
7883
7884 /* Optimize constant value. */
7885 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7886 {
7887 HOST_WIDE_INT hi, width, r_hi = 0;
7888 unsigned HOST_WIDE_INT lo, r_lo = 0;
7889 tree type;
7890
7891 type = TREE_TYPE (arg);
7892 width = TYPE_PRECISION (type);
7893 lo = TREE_INT_CST_LOW (arg);
7894 hi = TREE_INT_CST_HIGH (arg);
7895
7896 switch (DECL_FUNCTION_CODE (fndecl))
7897 {
7898 case BUILT_IN_BSWAP32:
7899 case BUILT_IN_BSWAP64:
7900 {
7901 int s;
7902
7903 for (s = 0; s < width; s += 8)
7904 {
7905 int d = width - s - 8;
7906 unsigned HOST_WIDE_INT byte;
7907
7908 if (s < HOST_BITS_PER_WIDE_INT)
7909 byte = (lo >> s) & 0xff;
7910 else
7911 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7912
7913 if (d < HOST_BITS_PER_WIDE_INT)
7914 r_lo |= byte << d;
7915 else
7916 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7917 }
7918 }
7919
7920 break;
7921
7922 default:
7923 gcc_unreachable ();
7924 }
7925
7926 if (width < HOST_BITS_PER_WIDE_INT)
7927 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7928 else
7929 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7930 }
7931
7932 return NULL_TREE;
7933 }
7934
7935 /* A subroutine of fold_builtin to fold the various logarithmic
7936 functions. Return NULL_TREE if no simplification can me made.
7937 FUNC is the corresponding MPFR logarithm function. */
7938
7939 static tree
7940 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7941 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7942 {
7943 if (validate_arg (arg, REAL_TYPE))
7944 {
7945 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7946 tree res;
7947 const enum built_in_function fcode = builtin_mathfn_code (arg);
7948
7949 /* Calculate the result when the argument is a constant. */
7950 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7951 return res;
7952
7953 /* Special case, optimize logN(expN(x)) = x. */
7954 if (flag_unsafe_math_optimizations
7955 && ((func == mpfr_log
7956 && (fcode == BUILT_IN_EXP
7957 || fcode == BUILT_IN_EXPF
7958 || fcode == BUILT_IN_EXPL))
7959 || (func == mpfr_log2
7960 && (fcode == BUILT_IN_EXP2
7961 || fcode == BUILT_IN_EXP2F
7962 || fcode == BUILT_IN_EXP2L))
7963 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7964 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7965
7966 /* Optimize logN(func()) for various exponential functions. We
7967 want to determine the value "x" and the power "exponent" in
7968 order to transform logN(x**exponent) into exponent*logN(x). */
7969 if (flag_unsafe_math_optimizations)
7970 {
7971 tree exponent = 0, x = 0;
7972
7973 switch (fcode)
7974 {
7975 CASE_FLT_FN (BUILT_IN_EXP):
7976 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7977 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7978 dconst_e ()));
7979 exponent = CALL_EXPR_ARG (arg, 0);
7980 break;
7981 CASE_FLT_FN (BUILT_IN_EXP2):
7982 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7983 x = build_real (type, dconst2);
7984 exponent = CALL_EXPR_ARG (arg, 0);
7985 break;
7986 CASE_FLT_FN (BUILT_IN_EXP10):
7987 CASE_FLT_FN (BUILT_IN_POW10):
7988 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7989 {
7990 REAL_VALUE_TYPE dconst10;
7991 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7992 x = build_real (type, dconst10);
7993 }
7994 exponent = CALL_EXPR_ARG (arg, 0);
7995 break;
7996 CASE_FLT_FN (BUILT_IN_SQRT):
7997 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7998 x = CALL_EXPR_ARG (arg, 0);
7999 exponent = build_real (type, dconsthalf);
8000 break;
8001 CASE_FLT_FN (BUILT_IN_CBRT):
8002 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8003 x = CALL_EXPR_ARG (arg, 0);
8004 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8005 dconst_third ()));
8006 break;
8007 CASE_FLT_FN (BUILT_IN_POW):
8008 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8009 x = CALL_EXPR_ARG (arg, 0);
8010 exponent = CALL_EXPR_ARG (arg, 1);
8011 break;
8012 default:
8013 break;
8014 }
8015
8016 /* Now perform the optimization. */
8017 if (x && exponent)
8018 {
8019 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8020 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8021 }
8022 }
8023 }
8024
8025 return NULL_TREE;
8026 }
8027
8028 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8029 NULL_TREE if no simplification can be made. */
8030
8031 static tree
8032 fold_builtin_hypot (location_t loc, tree fndecl,
8033 tree arg0, tree arg1, tree type)
8034 {
8035 tree res, narg0, narg1;
8036
8037 if (!validate_arg (arg0, REAL_TYPE)
8038 || !validate_arg (arg1, REAL_TYPE))
8039 return NULL_TREE;
8040
8041 /* Calculate the result when the argument is a constant. */
8042 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8043 return res;
8044
8045 /* If either argument to hypot has a negate or abs, strip that off.
8046 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8047 narg0 = fold_strip_sign_ops (arg0);
8048 narg1 = fold_strip_sign_ops (arg1);
8049 if (narg0 || narg1)
8050 {
8051 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8052 narg1 ? narg1 : arg1);
8053 }
8054
8055 /* If either argument is zero, hypot is fabs of the other. */
8056 if (real_zerop (arg0))
8057 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8058 else if (real_zerop (arg1))
8059 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8060
8061 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8062 if (flag_unsafe_math_optimizations
8063 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8064 {
8065 const REAL_VALUE_TYPE sqrt2_trunc
8066 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8067 return fold_build2_loc (loc, MULT_EXPR, type,
8068 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8069 build_real (type, sqrt2_trunc));
8070 }
8071
8072 return NULL_TREE;
8073 }
8074
8075
8076 /* Fold a builtin function call to pow, powf, or powl. Return
8077 NULL_TREE if no simplification can be made. */
8078 static tree
8079 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8080 {
8081 tree res;
8082
8083 if (!validate_arg (arg0, REAL_TYPE)
8084 || !validate_arg (arg1, REAL_TYPE))
8085 return NULL_TREE;
8086
8087 /* Calculate the result when the argument is a constant. */
8088 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8089 return res;
8090
8091 /* Optimize pow(1.0,y) = 1.0. */
8092 if (real_onep (arg0))
8093 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8094
8095 if (TREE_CODE (arg1) == REAL_CST
8096 && !TREE_OVERFLOW (arg1))
8097 {
8098 REAL_VALUE_TYPE cint;
8099 REAL_VALUE_TYPE c;
8100 HOST_WIDE_INT n;
8101
8102 c = TREE_REAL_CST (arg1);
8103
8104 /* Optimize pow(x,0.0) = 1.0. */
8105 if (REAL_VALUES_EQUAL (c, dconst0))
8106 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8107 arg0);
8108
8109 /* Optimize pow(x,1.0) = x. */
8110 if (REAL_VALUES_EQUAL (c, dconst1))
8111 return arg0;
8112
8113 /* Optimize pow(x,-1.0) = 1.0/x. */
8114 if (REAL_VALUES_EQUAL (c, dconstm1))
8115 return fold_build2_loc (loc, RDIV_EXPR, type,
8116 build_real (type, dconst1), arg0);
8117
8118 /* Optimize pow(x,0.5) = sqrt(x). */
8119 if (flag_unsafe_math_optimizations
8120 && REAL_VALUES_EQUAL (c, dconsthalf))
8121 {
8122 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8123
8124 if (sqrtfn != NULL_TREE)
8125 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8126 }
8127
8128 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8129 if (flag_unsafe_math_optimizations)
8130 {
8131 const REAL_VALUE_TYPE dconstroot
8132 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8133
8134 if (REAL_VALUES_EQUAL (c, dconstroot))
8135 {
8136 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8137 if (cbrtfn != NULL_TREE)
8138 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8139 }
8140 }
8141
8142 /* Check for an integer exponent. */
8143 n = real_to_integer (&c);
8144 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8145 if (real_identical (&c, &cint))
8146 {
8147 /* Attempt to evaluate pow at compile-time, unless this should
8148 raise an exception. */
8149 if (TREE_CODE (arg0) == REAL_CST
8150 && !TREE_OVERFLOW (arg0)
8151 && (n > 0
8152 || (!flag_trapping_math && !flag_errno_math)
8153 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8154 {
8155 REAL_VALUE_TYPE x;
8156 bool inexact;
8157
8158 x = TREE_REAL_CST (arg0);
8159 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8160 if (flag_unsafe_math_optimizations || !inexact)
8161 return build_real (type, x);
8162 }
8163
8164 /* Strip sign ops from even integer powers. */
8165 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8166 {
8167 tree narg0 = fold_strip_sign_ops (arg0);
8168 if (narg0)
8169 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8170 }
8171 }
8172 }
8173
8174 if (flag_unsafe_math_optimizations)
8175 {
8176 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8177
8178 /* Optimize pow(expN(x),y) = expN(x*y). */
8179 if (BUILTIN_EXPONENT_P (fcode))
8180 {
8181 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8182 tree arg = CALL_EXPR_ARG (arg0, 0);
8183 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8184 return build_call_expr_loc (loc, expfn, 1, arg);
8185 }
8186
8187 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8188 if (BUILTIN_SQRT_P (fcode))
8189 {
8190 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8191 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8192 build_real (type, dconsthalf));
8193 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8194 }
8195
8196 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8197 if (BUILTIN_CBRT_P (fcode))
8198 {
8199 tree arg = CALL_EXPR_ARG (arg0, 0);
8200 if (tree_expr_nonnegative_p (arg))
8201 {
8202 const REAL_VALUE_TYPE dconstroot
8203 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8204 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8205 build_real (type, dconstroot));
8206 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8207 }
8208 }
8209
8210 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8211 if (fcode == BUILT_IN_POW
8212 || fcode == BUILT_IN_POWF
8213 || fcode == BUILT_IN_POWL)
8214 {
8215 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8216 if (tree_expr_nonnegative_p (arg00))
8217 {
8218 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8219 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8220 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8221 }
8222 }
8223 }
8224
8225 return NULL_TREE;
8226 }
8227
8228 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8229 Return NULL_TREE if no simplification can be made. */
8230 static tree
8231 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8232 tree arg0, tree arg1, tree type)
8233 {
8234 if (!validate_arg (arg0, REAL_TYPE)
8235 || !validate_arg (arg1, INTEGER_TYPE))
8236 return NULL_TREE;
8237
8238 /* Optimize pow(1.0,y) = 1.0. */
8239 if (real_onep (arg0))
8240 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8241
8242 if (host_integerp (arg1, 0))
8243 {
8244 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8245
8246 /* Evaluate powi at compile-time. */
8247 if (TREE_CODE (arg0) == REAL_CST
8248 && !TREE_OVERFLOW (arg0))
8249 {
8250 REAL_VALUE_TYPE x;
8251 x = TREE_REAL_CST (arg0);
8252 real_powi (&x, TYPE_MODE (type), &x, c);
8253 return build_real (type, x);
8254 }
8255
8256 /* Optimize pow(x,0) = 1.0. */
8257 if (c == 0)
8258 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8259 arg0);
8260
8261 /* Optimize pow(x,1) = x. */
8262 if (c == 1)
8263 return arg0;
8264
8265 /* Optimize pow(x,-1) = 1.0/x. */
8266 if (c == -1)
8267 return fold_build2_loc (loc, RDIV_EXPR, type,
8268 build_real (type, dconst1), arg0);
8269 }
8270
8271 return NULL_TREE;
8272 }
8273
8274 /* A subroutine of fold_builtin to fold the various exponent
8275 functions. Return NULL_TREE if no simplification can be made.
8276 FUNC is the corresponding MPFR exponent function. */
8277
8278 static tree
8279 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8280 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8281 {
8282 if (validate_arg (arg, REAL_TYPE))
8283 {
8284 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8285 tree res;
8286
8287 /* Calculate the result when the argument is a constant. */
8288 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8289 return res;
8290
8291 /* Optimize expN(logN(x)) = x. */
8292 if (flag_unsafe_math_optimizations)
8293 {
8294 const enum built_in_function fcode = builtin_mathfn_code (arg);
8295
8296 if ((func == mpfr_exp
8297 && (fcode == BUILT_IN_LOG
8298 || fcode == BUILT_IN_LOGF
8299 || fcode == BUILT_IN_LOGL))
8300 || (func == mpfr_exp2
8301 && (fcode == BUILT_IN_LOG2
8302 || fcode == BUILT_IN_LOG2F
8303 || fcode == BUILT_IN_LOG2L))
8304 || (func == mpfr_exp10
8305 && (fcode == BUILT_IN_LOG10
8306 || fcode == BUILT_IN_LOG10F
8307 || fcode == BUILT_IN_LOG10L)))
8308 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8309 }
8310 }
8311
8312 return NULL_TREE;
8313 }
8314
8315 /* Return true if VAR is a VAR_DECL or a component thereof. */
8316
8317 static bool
8318 var_decl_component_p (tree var)
8319 {
8320 tree inner = var;
8321 while (handled_component_p (inner))
8322 inner = TREE_OPERAND (inner, 0);
8323 return SSA_VAR_P (inner);
8324 }
8325
8326 /* Fold function call to builtin memset. Return
8327 NULL_TREE if no simplification can be made. */
8328
8329 static tree
8330 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8331 tree type, bool ignore)
8332 {
8333 tree var, ret, etype;
8334 unsigned HOST_WIDE_INT length, cval;
8335
8336 if (! validate_arg (dest, POINTER_TYPE)
8337 || ! validate_arg (c, INTEGER_TYPE)
8338 || ! validate_arg (len, INTEGER_TYPE))
8339 return NULL_TREE;
8340
8341 if (! host_integerp (len, 1))
8342 return NULL_TREE;
8343
8344 /* If the LEN parameter is zero, return DEST. */
8345 if (integer_zerop (len))
8346 return omit_one_operand_loc (loc, type, dest, c);
8347
8348 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8349 return NULL_TREE;
8350
8351 var = dest;
8352 STRIP_NOPS (var);
8353 if (TREE_CODE (var) != ADDR_EXPR)
8354 return NULL_TREE;
8355
8356 var = TREE_OPERAND (var, 0);
8357 if (TREE_THIS_VOLATILE (var))
8358 return NULL_TREE;
8359
8360 etype = TREE_TYPE (var);
8361 if (TREE_CODE (etype) == ARRAY_TYPE)
8362 etype = TREE_TYPE (etype);
8363
8364 if (!INTEGRAL_TYPE_P (etype)
8365 && !POINTER_TYPE_P (etype))
8366 return NULL_TREE;
8367
8368 if (! var_decl_component_p (var))
8369 return NULL_TREE;
8370
8371 length = tree_low_cst (len, 1);
8372 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8373 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8374 < length)
8375 return NULL_TREE;
8376
8377 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8378 return NULL_TREE;
8379
8380 if (integer_zerop (c))
8381 cval = 0;
8382 else
8383 {
8384 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8385 return NULL_TREE;
8386
8387 cval = tree_low_cst (c, 1);
8388 cval &= 0xff;
8389 cval |= cval << 8;
8390 cval |= cval << 16;
8391 cval |= (cval << 31) << 1;
8392 }
8393
8394 ret = build_int_cst_type (etype, cval);
8395 var = build_fold_indirect_ref_loc (loc,
8396 fold_convert_loc (loc,
8397 build_pointer_type (etype),
8398 dest));
8399 ret = build2 (MODIFY_EXPR, etype, var, ret);
8400 if (ignore)
8401 return ret;
8402
8403 return omit_one_operand_loc (loc, type, dest, ret);
8404 }
8405
8406 /* Fold function call to builtin memset. Return
8407 NULL_TREE if no simplification can be made. */
8408
8409 static tree
8410 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8411 {
8412 if (! validate_arg (dest, POINTER_TYPE)
8413 || ! validate_arg (size, INTEGER_TYPE))
8414 return NULL_TREE;
8415
8416 if (!ignore)
8417 return NULL_TREE;
8418
8419 /* New argument list transforming bzero(ptr x, int y) to
8420 memset(ptr x, int 0, size_t y). This is done this way
8421 so that if it isn't expanded inline, we fallback to
8422 calling bzero instead of memset. */
8423
8424 return fold_builtin_memset (loc, dest, integer_zero_node,
8425 fold_convert_loc (loc, sizetype, size),
8426 void_type_node, ignore);
8427 }
8428
8429 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8430 NULL_TREE if no simplification can be made.
8431 If ENDP is 0, return DEST (like memcpy).
8432 If ENDP is 1, return DEST+LEN (like mempcpy).
8433 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8434 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8435 (memmove). */
8436
8437 static tree
8438 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8439 tree len, tree type, bool ignore, int endp)
8440 {
8441 tree destvar, srcvar, expr;
8442
8443 if (! validate_arg (dest, POINTER_TYPE)
8444 || ! validate_arg (src, POINTER_TYPE)
8445 || ! validate_arg (len, INTEGER_TYPE))
8446 return NULL_TREE;
8447
8448 /* If the LEN parameter is zero, return DEST. */
8449 if (integer_zerop (len))
8450 return omit_one_operand_loc (loc, type, dest, src);
8451
8452 /* If SRC and DEST are the same (and not volatile), return
8453 DEST{,+LEN,+LEN-1}. */
8454 if (operand_equal_p (src, dest, 0))
8455 expr = len;
8456 else
8457 {
8458 tree srctype, desttype;
8459 unsigned int src_align, dest_align;
8460 tree off0;
8461
8462 if (endp == 3)
8463 {
8464 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8465 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8466
8467 /* Both DEST and SRC must be pointer types.
8468 ??? This is what old code did. Is the testing for pointer types
8469 really mandatory?
8470
8471 If either SRC is readonly or length is 1, we can use memcpy. */
8472 if (!dest_align || !src_align)
8473 return NULL_TREE;
8474 if (readonly_data_expr (src)
8475 || (host_integerp (len, 1)
8476 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8477 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
8478 {
8479 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8480 if (!fn)
8481 return NULL_TREE;
8482 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8483 }
8484
8485 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8486 if (TREE_CODE (src) == ADDR_EXPR
8487 && TREE_CODE (dest) == ADDR_EXPR)
8488 {
8489 tree src_base, dest_base, fn;
8490 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8491 HOST_WIDE_INT size = -1;
8492 HOST_WIDE_INT maxsize = -1;
8493
8494 srcvar = TREE_OPERAND (src, 0);
8495 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8496 &size, &maxsize);
8497 destvar = TREE_OPERAND (dest, 0);
8498 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8499 &size, &maxsize);
8500 if (host_integerp (len, 1))
8501 maxsize = tree_low_cst (len, 1);
8502 else
8503 maxsize = -1;
8504 src_offset /= BITS_PER_UNIT;
8505 dest_offset /= BITS_PER_UNIT;
8506 if (SSA_VAR_P (src_base)
8507 && SSA_VAR_P (dest_base))
8508 {
8509 if (operand_equal_p (src_base, dest_base, 0)
8510 && ranges_overlap_p (src_offset, maxsize,
8511 dest_offset, maxsize))
8512 return NULL_TREE;
8513 }
8514 else if (TREE_CODE (src_base) == MEM_REF
8515 && TREE_CODE (dest_base) == MEM_REF)
8516 {
8517 double_int off;
8518 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8519 TREE_OPERAND (dest_base, 0), 0))
8520 return NULL_TREE;
8521 off = double_int_add (mem_ref_offset (src_base),
8522 shwi_to_double_int (src_offset));
8523 if (!double_int_fits_in_shwi_p (off))
8524 return NULL_TREE;
8525 src_offset = off.low;
8526 off = double_int_add (mem_ref_offset (dest_base),
8527 shwi_to_double_int (dest_offset));
8528 if (!double_int_fits_in_shwi_p (off))
8529 return NULL_TREE;
8530 dest_offset = off.low;
8531 if (ranges_overlap_p (src_offset, maxsize,
8532 dest_offset, maxsize))
8533 return NULL_TREE;
8534 }
8535 else
8536 return NULL_TREE;
8537
8538 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8539 if (!fn)
8540 return NULL_TREE;
8541 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8542 }
8543
8544 /* If the destination and source do not alias optimize into
8545 memcpy as well. */
8546 if ((is_gimple_min_invariant (dest)
8547 || TREE_CODE (dest) == SSA_NAME)
8548 && (is_gimple_min_invariant (src)
8549 || TREE_CODE (src) == SSA_NAME))
8550 {
8551 ao_ref destr, srcr;
8552 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8553 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8554 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8555 {
8556 tree fn;
8557 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8558 if (!fn)
8559 return NULL_TREE;
8560 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8561 }
8562 }
8563
8564 return NULL_TREE;
8565 }
8566
8567 if (!host_integerp (len, 0))
8568 return NULL_TREE;
8569 /* FIXME:
8570 This logic lose for arguments like (type *)malloc (sizeof (type)),
8571 since we strip the casts of up to VOID return value from malloc.
8572 Perhaps we ought to inherit type from non-VOID argument here? */
8573 STRIP_NOPS (src);
8574 STRIP_NOPS (dest);
8575 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8576 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8577 {
8578 tree tem = TREE_OPERAND (src, 0);
8579 STRIP_NOPS (tem);
8580 if (tem != TREE_OPERAND (src, 0))
8581 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8582 }
8583 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8584 {
8585 tree tem = TREE_OPERAND (dest, 0);
8586 STRIP_NOPS (tem);
8587 if (tem != TREE_OPERAND (dest, 0))
8588 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8589 }
8590 srctype = TREE_TYPE (TREE_TYPE (src));
8591 if (srctype
8592 && TREE_CODE (srctype) == ARRAY_TYPE
8593 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8594 {
8595 srctype = TREE_TYPE (srctype);
8596 STRIP_NOPS (src);
8597 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8598 }
8599 desttype = TREE_TYPE (TREE_TYPE (dest));
8600 if (desttype
8601 && TREE_CODE (desttype) == ARRAY_TYPE
8602 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8603 {
8604 desttype = TREE_TYPE (desttype);
8605 STRIP_NOPS (dest);
8606 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8607 }
8608 if (!srctype || !desttype
8609 || TREE_ADDRESSABLE (srctype)
8610 || TREE_ADDRESSABLE (desttype)
8611 || !TYPE_SIZE_UNIT (srctype)
8612 || !TYPE_SIZE_UNIT (desttype)
8613 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8614 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST)
8615 return NULL_TREE;
8616
8617 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8618 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8619 if (dest_align < TYPE_ALIGN (desttype)
8620 || src_align < TYPE_ALIGN (srctype))
8621 return NULL_TREE;
8622
8623 if (!ignore)
8624 dest = builtin_save_expr (dest);
8625
8626 /* Build accesses at offset zero with a ref-all character type. */
8627 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8628 ptr_mode, true), 0);
8629
8630 destvar = dest;
8631 STRIP_NOPS (destvar);
8632 if (TREE_CODE (destvar) == ADDR_EXPR
8633 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8634 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8635 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8636 else
8637 destvar = NULL_TREE;
8638
8639 srcvar = src;
8640 STRIP_NOPS (srcvar);
8641 if (TREE_CODE (srcvar) == ADDR_EXPR
8642 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8643 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8644 {
8645 if (!destvar
8646 || src_align >= TYPE_ALIGN (desttype))
8647 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8648 srcvar, off0);
8649 else if (!STRICT_ALIGNMENT)
8650 {
8651 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8652 src_align);
8653 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8654 }
8655 else
8656 srcvar = NULL_TREE;
8657 }
8658 else
8659 srcvar = NULL_TREE;
8660
8661 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8662 return NULL_TREE;
8663
8664 if (srcvar == NULL_TREE)
8665 {
8666 STRIP_NOPS (src);
8667 if (src_align >= TYPE_ALIGN (desttype))
8668 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8669 else
8670 {
8671 if (STRICT_ALIGNMENT)
8672 return NULL_TREE;
8673 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8674 src_align);
8675 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
8676 }
8677 }
8678 else if (destvar == NULL_TREE)
8679 {
8680 STRIP_NOPS (dest);
8681 if (dest_align >= TYPE_ALIGN (srctype))
8682 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8683 else
8684 {
8685 if (STRICT_ALIGNMENT)
8686 return NULL_TREE;
8687 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
8688 dest_align);
8689 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
8690 }
8691 }
8692
8693 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8694 }
8695
8696 if (ignore)
8697 return expr;
8698
8699 if (endp == 0 || endp == 3)
8700 return omit_one_operand_loc (loc, type, dest, expr);
8701
8702 if (expr == len)
8703 expr = NULL_TREE;
8704
8705 if (endp == 2)
8706 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8707 ssize_int (1));
8708
8709 len = fold_convert_loc (loc, sizetype, len);
8710 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8711 dest = fold_convert_loc (loc, type, dest);
8712 if (expr)
8713 dest = omit_one_operand_loc (loc, type, dest, expr);
8714 return dest;
8715 }
8716
8717 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8718 If LEN is not NULL, it represents the length of the string to be
8719 copied. Return NULL_TREE if no simplification can be made. */
8720
8721 tree
8722 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8723 {
8724 tree fn;
8725
8726 if (!validate_arg (dest, POINTER_TYPE)
8727 || !validate_arg (src, POINTER_TYPE))
8728 return NULL_TREE;
8729
8730 /* If SRC and DEST are the same (and not volatile), return DEST. */
8731 if (operand_equal_p (src, dest, 0))
8732 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8733
8734 if (optimize_function_for_size_p (cfun))
8735 return NULL_TREE;
8736
8737 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8738 if (!fn)
8739 return NULL_TREE;
8740
8741 if (!len)
8742 {
8743 len = c_strlen (src, 1);
8744 if (! len || TREE_SIDE_EFFECTS (len))
8745 return NULL_TREE;
8746 }
8747
8748 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8749 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8750 build_call_expr_loc (loc, fn, 3, dest, src, len));
8751 }
8752
8753 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8754 Return NULL_TREE if no simplification can be made. */
8755
8756 static tree
8757 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8758 {
8759 tree fn, len, lenp1, call, type;
8760
8761 if (!validate_arg (dest, POINTER_TYPE)
8762 || !validate_arg (src, POINTER_TYPE))
8763 return NULL_TREE;
8764
8765 len = c_strlen (src, 1);
8766 if (!len
8767 || TREE_CODE (len) != INTEGER_CST)
8768 return NULL_TREE;
8769
8770 if (optimize_function_for_size_p (cfun)
8771 /* If length is zero it's small enough. */
8772 && !integer_zerop (len))
8773 return NULL_TREE;
8774
8775 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8776 if (!fn)
8777 return NULL_TREE;
8778
8779 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8780 /* We use dest twice in building our expression. Save it from
8781 multiple expansions. */
8782 dest = builtin_save_expr (dest);
8783 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8784
8785 type = TREE_TYPE (TREE_TYPE (fndecl));
8786 len = fold_convert_loc (loc, sizetype, len);
8787 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8788 dest = fold_convert_loc (loc, type, dest);
8789 dest = omit_one_operand_loc (loc, type, dest, call);
8790 return dest;
8791 }
8792
8793 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8794 If SLEN is not NULL, it represents the length of the source string.
8795 Return NULL_TREE if no simplification can be made. */
8796
8797 tree
8798 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8799 tree src, tree len, tree slen)
8800 {
8801 tree fn;
8802
8803 if (!validate_arg (dest, POINTER_TYPE)
8804 || !validate_arg (src, POINTER_TYPE)
8805 || !validate_arg (len, INTEGER_TYPE))
8806 return NULL_TREE;
8807
8808 /* If the LEN parameter is zero, return DEST. */
8809 if (integer_zerop (len))
8810 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8811
8812 /* We can't compare slen with len as constants below if len is not a
8813 constant. */
8814 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8815 return NULL_TREE;
8816
8817 if (!slen)
8818 slen = c_strlen (src, 1);
8819
8820 /* Now, we must be passed a constant src ptr parameter. */
8821 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8822 return NULL_TREE;
8823
8824 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8825
8826 /* We do not support simplification of this case, though we do
8827 support it when expanding trees into RTL. */
8828 /* FIXME: generate a call to __builtin_memset. */
8829 if (tree_int_cst_lt (slen, len))
8830 return NULL_TREE;
8831
8832 /* OK transform into builtin memcpy. */
8833 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8834 if (!fn)
8835 return NULL_TREE;
8836 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8837 build_call_expr_loc (loc, fn, 3, dest, src, len));
8838 }
8839
8840 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8841 arguments to the call, and TYPE is its return type.
8842 Return NULL_TREE if no simplification can be made. */
8843
8844 static tree
8845 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8846 {
8847 if (!validate_arg (arg1, POINTER_TYPE)
8848 || !validate_arg (arg2, INTEGER_TYPE)
8849 || !validate_arg (len, INTEGER_TYPE))
8850 return NULL_TREE;
8851 else
8852 {
8853 const char *p1;
8854
8855 if (TREE_CODE (arg2) != INTEGER_CST
8856 || !host_integerp (len, 1))
8857 return NULL_TREE;
8858
8859 p1 = c_getstr (arg1);
8860 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8861 {
8862 char c;
8863 const char *r;
8864 tree tem;
8865
8866 if (target_char_cast (arg2, &c))
8867 return NULL_TREE;
8868
8869 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8870
8871 if (r == NULL)
8872 return build_int_cst (TREE_TYPE (arg1), 0);
8873
8874 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8875 size_int (r - p1));
8876 return fold_convert_loc (loc, type, tem);
8877 }
8878 return NULL_TREE;
8879 }
8880 }
8881
8882 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8883 Return NULL_TREE if no simplification can be made. */
8884
8885 static tree
8886 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8887 {
8888 const char *p1, *p2;
8889
8890 if (!validate_arg (arg1, POINTER_TYPE)
8891 || !validate_arg (arg2, POINTER_TYPE)
8892 || !validate_arg (len, INTEGER_TYPE))
8893 return NULL_TREE;
8894
8895 /* If the LEN parameter is zero, return zero. */
8896 if (integer_zerop (len))
8897 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8898 arg1, arg2);
8899
8900 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8901 if (operand_equal_p (arg1, arg2, 0))
8902 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8903
8904 p1 = c_getstr (arg1);
8905 p2 = c_getstr (arg2);
8906
8907 /* If all arguments are constant, and the value of len is not greater
8908 than the lengths of arg1 and arg2, evaluate at compile-time. */
8909 if (host_integerp (len, 1) && p1 && p2
8910 && compare_tree_int (len, strlen (p1) + 1) <= 0
8911 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8912 {
8913 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8914
8915 if (r > 0)
8916 return integer_one_node;
8917 else if (r < 0)
8918 return integer_minus_one_node;
8919 else
8920 return integer_zero_node;
8921 }
8922
8923 /* If len parameter is one, return an expression corresponding to
8924 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8925 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8926 {
8927 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8928 tree cst_uchar_ptr_node
8929 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8930
8931 tree ind1
8932 = fold_convert_loc (loc, integer_type_node,
8933 build1 (INDIRECT_REF, cst_uchar_node,
8934 fold_convert_loc (loc,
8935 cst_uchar_ptr_node,
8936 arg1)));
8937 tree ind2
8938 = fold_convert_loc (loc, integer_type_node,
8939 build1 (INDIRECT_REF, cst_uchar_node,
8940 fold_convert_loc (loc,
8941 cst_uchar_ptr_node,
8942 arg2)));
8943 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8944 }
8945
8946 return NULL_TREE;
8947 }
8948
8949 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8950 Return NULL_TREE if no simplification can be made. */
8951
8952 static tree
8953 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8954 {
8955 const char *p1, *p2;
8956
8957 if (!validate_arg (arg1, POINTER_TYPE)
8958 || !validate_arg (arg2, POINTER_TYPE))
8959 return NULL_TREE;
8960
8961 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8962 if (operand_equal_p (arg1, arg2, 0))
8963 return integer_zero_node;
8964
8965 p1 = c_getstr (arg1);
8966 p2 = c_getstr (arg2);
8967
8968 if (p1 && p2)
8969 {
8970 const int i = strcmp (p1, p2);
8971 if (i < 0)
8972 return integer_minus_one_node;
8973 else if (i > 0)
8974 return integer_one_node;
8975 else
8976 return integer_zero_node;
8977 }
8978
8979 /* If the second arg is "", return *(const unsigned char*)arg1. */
8980 if (p2 && *p2 == '\0')
8981 {
8982 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8983 tree cst_uchar_ptr_node
8984 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8985
8986 return fold_convert_loc (loc, integer_type_node,
8987 build1 (INDIRECT_REF, cst_uchar_node,
8988 fold_convert_loc (loc,
8989 cst_uchar_ptr_node,
8990 arg1)));
8991 }
8992
8993 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8994 if (p1 && *p1 == '\0')
8995 {
8996 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8997 tree cst_uchar_ptr_node
8998 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8999
9000 tree temp
9001 = fold_convert_loc (loc, integer_type_node,
9002 build1 (INDIRECT_REF, cst_uchar_node,
9003 fold_convert_loc (loc,
9004 cst_uchar_ptr_node,
9005 arg2)));
9006 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9007 }
9008
9009 return NULL_TREE;
9010 }
9011
9012 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9013 Return NULL_TREE if no simplification can be made. */
9014
9015 static tree
9016 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9017 {
9018 const char *p1, *p2;
9019
9020 if (!validate_arg (arg1, POINTER_TYPE)
9021 || !validate_arg (arg2, POINTER_TYPE)
9022 || !validate_arg (len, INTEGER_TYPE))
9023 return NULL_TREE;
9024
9025 /* If the LEN parameter is zero, return zero. */
9026 if (integer_zerop (len))
9027 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9028 arg1, arg2);
9029
9030 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9031 if (operand_equal_p (arg1, arg2, 0))
9032 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9033
9034 p1 = c_getstr (arg1);
9035 p2 = c_getstr (arg2);
9036
9037 if (host_integerp (len, 1) && p1 && p2)
9038 {
9039 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9040 if (i > 0)
9041 return integer_one_node;
9042 else if (i < 0)
9043 return integer_minus_one_node;
9044 else
9045 return integer_zero_node;
9046 }
9047
9048 /* If the second arg is "", and the length is greater than zero,
9049 return *(const unsigned char*)arg1. */
9050 if (p2 && *p2 == '\0'
9051 && TREE_CODE (len) == INTEGER_CST
9052 && tree_int_cst_sgn (len) == 1)
9053 {
9054 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9055 tree cst_uchar_ptr_node
9056 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9057
9058 return fold_convert_loc (loc, integer_type_node,
9059 build1 (INDIRECT_REF, cst_uchar_node,
9060 fold_convert_loc (loc,
9061 cst_uchar_ptr_node,
9062 arg1)));
9063 }
9064
9065 /* If the first arg is "", and the length is greater than zero,
9066 return -*(const unsigned char*)arg2. */
9067 if (p1 && *p1 == '\0'
9068 && TREE_CODE (len) == INTEGER_CST
9069 && tree_int_cst_sgn (len) == 1)
9070 {
9071 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9072 tree cst_uchar_ptr_node
9073 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9074
9075 tree temp = fold_convert_loc (loc, integer_type_node,
9076 build1 (INDIRECT_REF, cst_uchar_node,
9077 fold_convert_loc (loc,
9078 cst_uchar_ptr_node,
9079 arg2)));
9080 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9081 }
9082
9083 /* If len parameter is one, return an expression corresponding to
9084 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9085 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9086 {
9087 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9088 tree cst_uchar_ptr_node
9089 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9090
9091 tree ind1 = fold_convert_loc (loc, integer_type_node,
9092 build1 (INDIRECT_REF, cst_uchar_node,
9093 fold_convert_loc (loc,
9094 cst_uchar_ptr_node,
9095 arg1)));
9096 tree ind2 = fold_convert_loc (loc, integer_type_node,
9097 build1 (INDIRECT_REF, cst_uchar_node,
9098 fold_convert_loc (loc,
9099 cst_uchar_ptr_node,
9100 arg2)));
9101 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9102 }
9103
9104 return NULL_TREE;
9105 }
9106
9107 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9108 ARG. Return NULL_TREE if no simplification can be made. */
9109
9110 static tree
9111 fold_builtin_signbit (location_t loc, tree arg, tree type)
9112 {
9113 if (!validate_arg (arg, REAL_TYPE))
9114 return NULL_TREE;
9115
9116 /* If ARG is a compile-time constant, determine the result. */
9117 if (TREE_CODE (arg) == REAL_CST
9118 && !TREE_OVERFLOW (arg))
9119 {
9120 REAL_VALUE_TYPE c;
9121
9122 c = TREE_REAL_CST (arg);
9123 return (REAL_VALUE_NEGATIVE (c)
9124 ? build_one_cst (type)
9125 : build_zero_cst (type));
9126 }
9127
9128 /* If ARG is non-negative, the result is always zero. */
9129 if (tree_expr_nonnegative_p (arg))
9130 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9131
9132 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9133 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9134 return fold_build2_loc (loc, LT_EXPR, type, arg,
9135 build_real (TREE_TYPE (arg), dconst0));
9136
9137 return NULL_TREE;
9138 }
9139
9140 /* Fold function call to builtin copysign, copysignf or copysignl with
9141 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9142 be made. */
9143
9144 static tree
9145 fold_builtin_copysign (location_t loc, tree fndecl,
9146 tree arg1, tree arg2, tree type)
9147 {
9148 tree tem;
9149
9150 if (!validate_arg (arg1, REAL_TYPE)
9151 || !validate_arg (arg2, REAL_TYPE))
9152 return NULL_TREE;
9153
9154 /* copysign(X,X) is X. */
9155 if (operand_equal_p (arg1, arg2, 0))
9156 return fold_convert_loc (loc, type, arg1);
9157
9158 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9159 if (TREE_CODE (arg1) == REAL_CST
9160 && TREE_CODE (arg2) == REAL_CST
9161 && !TREE_OVERFLOW (arg1)
9162 && !TREE_OVERFLOW (arg2))
9163 {
9164 REAL_VALUE_TYPE c1, c2;
9165
9166 c1 = TREE_REAL_CST (arg1);
9167 c2 = TREE_REAL_CST (arg2);
9168 /* c1.sign := c2.sign. */
9169 real_copysign (&c1, &c2);
9170 return build_real (type, c1);
9171 }
9172
9173 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9174 Remember to evaluate Y for side-effects. */
9175 if (tree_expr_nonnegative_p (arg2))
9176 return omit_one_operand_loc (loc, type,
9177 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9178 arg2);
9179
9180 /* Strip sign changing operations for the first argument. */
9181 tem = fold_strip_sign_ops (arg1);
9182 if (tem)
9183 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9184
9185 return NULL_TREE;
9186 }
9187
9188 /* Fold a call to builtin isascii with argument ARG. */
9189
9190 static tree
9191 fold_builtin_isascii (location_t loc, tree arg)
9192 {
9193 if (!validate_arg (arg, INTEGER_TYPE))
9194 return NULL_TREE;
9195 else
9196 {
9197 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9198 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9199 build_int_cst (NULL_TREE,
9200 ~ (unsigned HOST_WIDE_INT) 0x7f));
9201 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9202 arg, integer_zero_node);
9203 }
9204 }
9205
9206 /* Fold a call to builtin toascii with argument ARG. */
9207
9208 static tree
9209 fold_builtin_toascii (location_t loc, tree arg)
9210 {
9211 if (!validate_arg (arg, INTEGER_TYPE))
9212 return NULL_TREE;
9213
9214 /* Transform toascii(c) -> (c & 0x7f). */
9215 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9216 build_int_cst (NULL_TREE, 0x7f));
9217 }
9218
9219 /* Fold a call to builtin isdigit with argument ARG. */
9220
9221 static tree
9222 fold_builtin_isdigit (location_t loc, tree arg)
9223 {
9224 if (!validate_arg (arg, INTEGER_TYPE))
9225 return NULL_TREE;
9226 else
9227 {
9228 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9229 /* According to the C standard, isdigit is unaffected by locale.
9230 However, it definitely is affected by the target character set. */
9231 unsigned HOST_WIDE_INT target_digit0
9232 = lang_hooks.to_target_charset ('0');
9233
9234 if (target_digit0 == 0)
9235 return NULL_TREE;
9236
9237 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9238 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9239 build_int_cst (unsigned_type_node, target_digit0));
9240 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9241 build_int_cst (unsigned_type_node, 9));
9242 }
9243 }
9244
9245 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9246
9247 static tree
9248 fold_builtin_fabs (location_t loc, tree arg, tree type)
9249 {
9250 if (!validate_arg (arg, REAL_TYPE))
9251 return NULL_TREE;
9252
9253 arg = fold_convert_loc (loc, type, arg);
9254 if (TREE_CODE (arg) == REAL_CST)
9255 return fold_abs_const (arg, type);
9256 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9257 }
9258
9259 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9260
9261 static tree
9262 fold_builtin_abs (location_t loc, tree arg, tree type)
9263 {
9264 if (!validate_arg (arg, INTEGER_TYPE))
9265 return NULL_TREE;
9266
9267 arg = fold_convert_loc (loc, type, arg);
9268 if (TREE_CODE (arg) == INTEGER_CST)
9269 return fold_abs_const (arg, type);
9270 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9271 }
9272
9273 /* Fold a fma operation with arguments ARG[012]. */
9274
9275 tree
9276 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9277 tree type, tree arg0, tree arg1, tree arg2)
9278 {
9279 if (TREE_CODE (arg0) == REAL_CST
9280 && TREE_CODE (arg1) == REAL_CST
9281 && TREE_CODE (arg2) == REAL_CST)
9282 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9283
9284 return NULL_TREE;
9285 }
9286
9287 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9288
9289 static tree
9290 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9291 {
9292 if (validate_arg (arg0, REAL_TYPE)
9293 && validate_arg(arg1, REAL_TYPE)
9294 && validate_arg(arg2, REAL_TYPE))
9295 {
9296 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9297 if (tem)
9298 return tem;
9299
9300 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9301 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9302 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9303 }
9304 return NULL_TREE;
9305 }
9306
9307 /* Fold a call to builtin fmin or fmax. */
9308
9309 static tree
9310 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9311 tree type, bool max)
9312 {
9313 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9314 {
9315 /* Calculate the result when the argument is a constant. */
9316 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9317
9318 if (res)
9319 return res;
9320
9321 /* If either argument is NaN, return the other one. Avoid the
9322 transformation if we get (and honor) a signalling NaN. Using
9323 omit_one_operand() ensures we create a non-lvalue. */
9324 if (TREE_CODE (arg0) == REAL_CST
9325 && real_isnan (&TREE_REAL_CST (arg0))
9326 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9327 || ! TREE_REAL_CST (arg0).signalling))
9328 return omit_one_operand_loc (loc, type, arg1, arg0);
9329 if (TREE_CODE (arg1) == REAL_CST
9330 && real_isnan (&TREE_REAL_CST (arg1))
9331 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9332 || ! TREE_REAL_CST (arg1).signalling))
9333 return omit_one_operand_loc (loc, type, arg0, arg1);
9334
9335 /* Transform fmin/fmax(x,x) -> x. */
9336 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9337 return omit_one_operand_loc (loc, type, arg0, arg1);
9338
9339 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9340 functions to return the numeric arg if the other one is NaN.
9341 These tree codes don't honor that, so only transform if
9342 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9343 handled, so we don't have to worry about it either. */
9344 if (flag_finite_math_only)
9345 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9346 fold_convert_loc (loc, type, arg0),
9347 fold_convert_loc (loc, type, arg1));
9348 }
9349 return NULL_TREE;
9350 }
9351
9352 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9353
9354 static tree
9355 fold_builtin_carg (location_t loc, tree arg, tree type)
9356 {
9357 if (validate_arg (arg, COMPLEX_TYPE)
9358 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9359 {
9360 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9361
9362 if (atan2_fn)
9363 {
9364 tree new_arg = builtin_save_expr (arg);
9365 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9366 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9367 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9368 }
9369 }
9370
9371 return NULL_TREE;
9372 }
9373
9374 /* Fold a call to builtin logb/ilogb. */
9375
9376 static tree
9377 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9378 {
9379 if (! validate_arg (arg, REAL_TYPE))
9380 return NULL_TREE;
9381
9382 STRIP_NOPS (arg);
9383
9384 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9385 {
9386 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9387
9388 switch (value->cl)
9389 {
9390 case rvc_nan:
9391 case rvc_inf:
9392 /* If arg is Inf or NaN and we're logb, return it. */
9393 if (TREE_CODE (rettype) == REAL_TYPE)
9394 return fold_convert_loc (loc, rettype, arg);
9395 /* Fall through... */
9396 case rvc_zero:
9397 /* Zero may set errno and/or raise an exception for logb, also
9398 for ilogb we don't know FP_ILOGB0. */
9399 return NULL_TREE;
9400 case rvc_normal:
9401 /* For normal numbers, proceed iff radix == 2. In GCC,
9402 normalized significands are in the range [0.5, 1.0). We
9403 want the exponent as if they were [1.0, 2.0) so get the
9404 exponent and subtract 1. */
9405 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9406 return fold_convert_loc (loc, rettype,
9407 build_int_cst (NULL_TREE,
9408 REAL_EXP (value)-1));
9409 break;
9410 }
9411 }
9412
9413 return NULL_TREE;
9414 }
9415
9416 /* Fold a call to builtin significand, if radix == 2. */
9417
9418 static tree
9419 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9420 {
9421 if (! validate_arg (arg, REAL_TYPE))
9422 return NULL_TREE;
9423
9424 STRIP_NOPS (arg);
9425
9426 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9427 {
9428 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9429
9430 switch (value->cl)
9431 {
9432 case rvc_zero:
9433 case rvc_nan:
9434 case rvc_inf:
9435 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9436 return fold_convert_loc (loc, rettype, arg);
9437 case rvc_normal:
9438 /* For normal numbers, proceed iff radix == 2. */
9439 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9440 {
9441 REAL_VALUE_TYPE result = *value;
9442 /* In GCC, normalized significands are in the range [0.5,
9443 1.0). We want them to be [1.0, 2.0) so set the
9444 exponent to 1. */
9445 SET_REAL_EXP (&result, 1);
9446 return build_real (rettype, result);
9447 }
9448 break;
9449 }
9450 }
9451
9452 return NULL_TREE;
9453 }
9454
9455 /* Fold a call to builtin frexp, we can assume the base is 2. */
9456
9457 static tree
9458 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9459 {
9460 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9461 return NULL_TREE;
9462
9463 STRIP_NOPS (arg0);
9464
9465 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9466 return NULL_TREE;
9467
9468 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9469
9470 /* Proceed if a valid pointer type was passed in. */
9471 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9472 {
9473 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9474 tree frac, exp;
9475
9476 switch (value->cl)
9477 {
9478 case rvc_zero:
9479 /* For +-0, return (*exp = 0, +-0). */
9480 exp = integer_zero_node;
9481 frac = arg0;
9482 break;
9483 case rvc_nan:
9484 case rvc_inf:
9485 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9486 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9487 case rvc_normal:
9488 {
9489 /* Since the frexp function always expects base 2, and in
9490 GCC normalized significands are already in the range
9491 [0.5, 1.0), we have exactly what frexp wants. */
9492 REAL_VALUE_TYPE frac_rvt = *value;
9493 SET_REAL_EXP (&frac_rvt, 0);
9494 frac = build_real (rettype, frac_rvt);
9495 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9496 }
9497 break;
9498 default:
9499 gcc_unreachable ();
9500 }
9501
9502 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9503 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9504 TREE_SIDE_EFFECTS (arg1) = 1;
9505 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9506 }
9507
9508 return NULL_TREE;
9509 }
9510
9511 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9512 then we can assume the base is two. If it's false, then we have to
9513 check the mode of the TYPE parameter in certain cases. */
9514
9515 static tree
9516 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9517 tree type, bool ldexp)
9518 {
9519 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9520 {
9521 STRIP_NOPS (arg0);
9522 STRIP_NOPS (arg1);
9523
9524 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9525 if (real_zerop (arg0) || integer_zerop (arg1)
9526 || (TREE_CODE (arg0) == REAL_CST
9527 && !real_isfinite (&TREE_REAL_CST (arg0))))
9528 return omit_one_operand_loc (loc, type, arg0, arg1);
9529
9530 /* If both arguments are constant, then try to evaluate it. */
9531 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9532 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9533 && host_integerp (arg1, 0))
9534 {
9535 /* Bound the maximum adjustment to twice the range of the
9536 mode's valid exponents. Use abs to ensure the range is
9537 positive as a sanity check. */
9538 const long max_exp_adj = 2 *
9539 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9540 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9541
9542 /* Get the user-requested adjustment. */
9543 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9544
9545 /* The requested adjustment must be inside this range. This
9546 is a preliminary cap to avoid things like overflow, we
9547 may still fail to compute the result for other reasons. */
9548 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9549 {
9550 REAL_VALUE_TYPE initial_result;
9551
9552 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9553
9554 /* Ensure we didn't overflow. */
9555 if (! real_isinf (&initial_result))
9556 {
9557 const REAL_VALUE_TYPE trunc_result
9558 = real_value_truncate (TYPE_MODE (type), initial_result);
9559
9560 /* Only proceed if the target mode can hold the
9561 resulting value. */
9562 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9563 return build_real (type, trunc_result);
9564 }
9565 }
9566 }
9567 }
9568
9569 return NULL_TREE;
9570 }
9571
9572 /* Fold a call to builtin modf. */
9573
9574 static tree
9575 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9576 {
9577 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9578 return NULL_TREE;
9579
9580 STRIP_NOPS (arg0);
9581
9582 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9583 return NULL_TREE;
9584
9585 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9586
9587 /* Proceed if a valid pointer type was passed in. */
9588 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9589 {
9590 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9591 REAL_VALUE_TYPE trunc, frac;
9592
9593 switch (value->cl)
9594 {
9595 case rvc_nan:
9596 case rvc_zero:
9597 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9598 trunc = frac = *value;
9599 break;
9600 case rvc_inf:
9601 /* For +-Inf, return (*arg1 = arg0, +-0). */
9602 frac = dconst0;
9603 frac.sign = value->sign;
9604 trunc = *value;
9605 break;
9606 case rvc_normal:
9607 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9608 real_trunc (&trunc, VOIDmode, value);
9609 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9610 /* If the original number was negative and already
9611 integral, then the fractional part is -0.0. */
9612 if (value->sign && frac.cl == rvc_zero)
9613 frac.sign = value->sign;
9614 break;
9615 }
9616
9617 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9618 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9619 build_real (rettype, trunc));
9620 TREE_SIDE_EFFECTS (arg1) = 1;
9621 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9622 build_real (rettype, frac));
9623 }
9624
9625 return NULL_TREE;
9626 }
9627
9628 /* Given a location LOC, an interclass builtin function decl FNDECL
9629 and its single argument ARG, return an folded expression computing
9630 the same, or NULL_TREE if we either couldn't or didn't want to fold
9631 (the latter happen if there's an RTL instruction available). */
9632
9633 static tree
9634 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9635 {
9636 enum machine_mode mode;
9637
9638 if (!validate_arg (arg, REAL_TYPE))
9639 return NULL_TREE;
9640
9641 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9642 return NULL_TREE;
9643
9644 mode = TYPE_MODE (TREE_TYPE (arg));
9645
9646 /* If there is no optab, try generic code. */
9647 switch (DECL_FUNCTION_CODE (fndecl))
9648 {
9649 tree result;
9650
9651 CASE_FLT_FN (BUILT_IN_ISINF):
9652 {
9653 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9654 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9655 tree const type = TREE_TYPE (arg);
9656 REAL_VALUE_TYPE r;
9657 char buf[128];
9658
9659 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9660 real_from_string (&r, buf);
9661 result = build_call_expr (isgr_fn, 2,
9662 fold_build1_loc (loc, ABS_EXPR, type, arg),
9663 build_real (type, r));
9664 return result;
9665 }
9666 CASE_FLT_FN (BUILT_IN_FINITE):
9667 case BUILT_IN_ISFINITE:
9668 {
9669 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9670 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9671 tree const type = TREE_TYPE (arg);
9672 REAL_VALUE_TYPE r;
9673 char buf[128];
9674
9675 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9676 real_from_string (&r, buf);
9677 result = build_call_expr (isle_fn, 2,
9678 fold_build1_loc (loc, ABS_EXPR, type, arg),
9679 build_real (type, r));
9680 /*result = fold_build2_loc (loc, UNGT_EXPR,
9681 TREE_TYPE (TREE_TYPE (fndecl)),
9682 fold_build1_loc (loc, ABS_EXPR, type, arg),
9683 build_real (type, r));
9684 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9685 TREE_TYPE (TREE_TYPE (fndecl)),
9686 result);*/
9687 return result;
9688 }
9689 case BUILT_IN_ISNORMAL:
9690 {
9691 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9692 islessequal(fabs(x),DBL_MAX). */
9693 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9694 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9695 tree const type = TREE_TYPE (arg);
9696 REAL_VALUE_TYPE rmax, rmin;
9697 char buf[128];
9698
9699 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9700 real_from_string (&rmax, buf);
9701 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9702 real_from_string (&rmin, buf);
9703 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9704 result = build_call_expr (isle_fn, 2, arg,
9705 build_real (type, rmax));
9706 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9707 build_call_expr (isge_fn, 2, arg,
9708 build_real (type, rmin)));
9709 return result;
9710 }
9711 default:
9712 break;
9713 }
9714
9715 return NULL_TREE;
9716 }
9717
9718 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9719 ARG is the argument for the call. */
9720
9721 static tree
9722 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9723 {
9724 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9725 REAL_VALUE_TYPE r;
9726
9727 if (!validate_arg (arg, REAL_TYPE))
9728 return NULL_TREE;
9729
9730 switch (builtin_index)
9731 {
9732 case BUILT_IN_ISINF:
9733 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9734 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9735
9736 if (TREE_CODE (arg) == REAL_CST)
9737 {
9738 r = TREE_REAL_CST (arg);
9739 if (real_isinf (&r))
9740 return real_compare (GT_EXPR, &r, &dconst0)
9741 ? integer_one_node : integer_minus_one_node;
9742 else
9743 return integer_zero_node;
9744 }
9745
9746 return NULL_TREE;
9747
9748 case BUILT_IN_ISINF_SIGN:
9749 {
9750 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9751 /* In a boolean context, GCC will fold the inner COND_EXPR to
9752 1. So e.g. "if (isinf_sign(x))" would be folded to just
9753 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9754 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9755 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9756 tree tmp = NULL_TREE;
9757
9758 arg = builtin_save_expr (arg);
9759
9760 if (signbit_fn && isinf_fn)
9761 {
9762 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9763 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9764
9765 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9766 signbit_call, integer_zero_node);
9767 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9768 isinf_call, integer_zero_node);
9769
9770 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9771 integer_minus_one_node, integer_one_node);
9772 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9773 isinf_call, tmp,
9774 integer_zero_node);
9775 }
9776
9777 return tmp;
9778 }
9779
9780 case BUILT_IN_ISFINITE:
9781 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9782 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9783 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9784
9785 if (TREE_CODE (arg) == REAL_CST)
9786 {
9787 r = TREE_REAL_CST (arg);
9788 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9789 }
9790
9791 return NULL_TREE;
9792
9793 case BUILT_IN_ISNAN:
9794 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9795 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9796
9797 if (TREE_CODE (arg) == REAL_CST)
9798 {
9799 r = TREE_REAL_CST (arg);
9800 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9801 }
9802
9803 arg = builtin_save_expr (arg);
9804 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9805
9806 default:
9807 gcc_unreachable ();
9808 }
9809 }
9810
9811 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9812 This builtin will generate code to return the appropriate floating
9813 point classification depending on the value of the floating point
9814 number passed in. The possible return values must be supplied as
9815 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9816 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9817 one floating point argument which is "type generic". */
9818
9819 static tree
9820 fold_builtin_fpclassify (location_t loc, tree exp)
9821 {
9822 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9823 arg, type, res, tmp;
9824 enum machine_mode mode;
9825 REAL_VALUE_TYPE r;
9826 char buf[128];
9827
9828 /* Verify the required arguments in the original call. */
9829 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9830 INTEGER_TYPE, INTEGER_TYPE,
9831 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9832 return NULL_TREE;
9833
9834 fp_nan = CALL_EXPR_ARG (exp, 0);
9835 fp_infinite = CALL_EXPR_ARG (exp, 1);
9836 fp_normal = CALL_EXPR_ARG (exp, 2);
9837 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9838 fp_zero = CALL_EXPR_ARG (exp, 4);
9839 arg = CALL_EXPR_ARG (exp, 5);
9840 type = TREE_TYPE (arg);
9841 mode = TYPE_MODE (type);
9842 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9843
9844 /* fpclassify(x) ->
9845 isnan(x) ? FP_NAN :
9846 (fabs(x) == Inf ? FP_INFINITE :
9847 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9848 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9849
9850 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9851 build_real (type, dconst0));
9852 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9853 tmp, fp_zero, fp_subnormal);
9854
9855 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9856 real_from_string (&r, buf);
9857 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9858 arg, build_real (type, r));
9859 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9860
9861 if (HONOR_INFINITIES (mode))
9862 {
9863 real_inf (&r);
9864 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9865 build_real (type, r));
9866 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9867 fp_infinite, res);
9868 }
9869
9870 if (HONOR_NANS (mode))
9871 {
9872 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9873 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9874 }
9875
9876 return res;
9877 }
9878
9879 /* Fold a call to an unordered comparison function such as
9880 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9881 being called and ARG0 and ARG1 are the arguments for the call.
9882 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9883 the opposite of the desired result. UNORDERED_CODE is used
9884 for modes that can hold NaNs and ORDERED_CODE is used for
9885 the rest. */
9886
9887 static tree
9888 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9889 enum tree_code unordered_code,
9890 enum tree_code ordered_code)
9891 {
9892 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9893 enum tree_code code;
9894 tree type0, type1;
9895 enum tree_code code0, code1;
9896 tree cmp_type = NULL_TREE;
9897
9898 type0 = TREE_TYPE (arg0);
9899 type1 = TREE_TYPE (arg1);
9900
9901 code0 = TREE_CODE (type0);
9902 code1 = TREE_CODE (type1);
9903
9904 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9905 /* Choose the wider of two real types. */
9906 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9907 ? type0 : type1;
9908 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9909 cmp_type = type0;
9910 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9911 cmp_type = type1;
9912
9913 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9914 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9915
9916 if (unordered_code == UNORDERED_EXPR)
9917 {
9918 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9919 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9920 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9921 }
9922
9923 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9924 : ordered_code;
9925 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9926 fold_build2_loc (loc, code, type, arg0, arg1));
9927 }
9928
9929 /* Fold a call to built-in function FNDECL with 0 arguments.
9930 IGNORE is true if the result of the function call is ignored. This
9931 function returns NULL_TREE if no simplification was possible. */
9932
9933 static tree
9934 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9935 {
9936 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9937 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9938 switch (fcode)
9939 {
9940 CASE_FLT_FN (BUILT_IN_INF):
9941 case BUILT_IN_INFD32:
9942 case BUILT_IN_INFD64:
9943 case BUILT_IN_INFD128:
9944 return fold_builtin_inf (loc, type, true);
9945
9946 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9947 return fold_builtin_inf (loc, type, false);
9948
9949 case BUILT_IN_CLASSIFY_TYPE:
9950 return fold_builtin_classify_type (NULL_TREE);
9951
9952 default:
9953 break;
9954 }
9955 return NULL_TREE;
9956 }
9957
9958 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9959 IGNORE is true if the result of the function call is ignored. This
9960 function returns NULL_TREE if no simplification was possible. */
9961
9962 static tree
9963 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9964 {
9965 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9966 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9967 switch (fcode)
9968 {
9969 case BUILT_IN_CONSTANT_P:
9970 {
9971 tree val = fold_builtin_constant_p (arg0);
9972
9973 /* Gimplification will pull the CALL_EXPR for the builtin out of
9974 an if condition. When not optimizing, we'll not CSE it back.
9975 To avoid link error types of regressions, return false now. */
9976 if (!val && !optimize)
9977 val = integer_zero_node;
9978
9979 return val;
9980 }
9981
9982 case BUILT_IN_CLASSIFY_TYPE:
9983 return fold_builtin_classify_type (arg0);
9984
9985 case BUILT_IN_STRLEN:
9986 return fold_builtin_strlen (loc, type, arg0);
9987
9988 CASE_FLT_FN (BUILT_IN_FABS):
9989 return fold_builtin_fabs (loc, arg0, type);
9990
9991 case BUILT_IN_ABS:
9992 case BUILT_IN_LABS:
9993 case BUILT_IN_LLABS:
9994 case BUILT_IN_IMAXABS:
9995 return fold_builtin_abs (loc, arg0, type);
9996
9997 CASE_FLT_FN (BUILT_IN_CONJ):
9998 if (validate_arg (arg0, COMPLEX_TYPE)
9999 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10000 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10001 break;
10002
10003 CASE_FLT_FN (BUILT_IN_CREAL):
10004 if (validate_arg (arg0, COMPLEX_TYPE)
10005 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10006 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10007 break;
10008
10009 CASE_FLT_FN (BUILT_IN_CIMAG):
10010 if (validate_arg (arg0, COMPLEX_TYPE)
10011 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10012 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10013 break;
10014
10015 CASE_FLT_FN (BUILT_IN_CCOS):
10016 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
10017
10018 CASE_FLT_FN (BUILT_IN_CCOSH):
10019 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
10020
10021 CASE_FLT_FN (BUILT_IN_CPROJ):
10022 return fold_builtin_cproj(loc, arg0, type);
10023
10024 CASE_FLT_FN (BUILT_IN_CSIN):
10025 if (validate_arg (arg0, COMPLEX_TYPE)
10026 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10027 return do_mpc_arg1 (arg0, type, mpc_sin);
10028 break;
10029
10030 CASE_FLT_FN (BUILT_IN_CSINH):
10031 if (validate_arg (arg0, COMPLEX_TYPE)
10032 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10033 return do_mpc_arg1 (arg0, type, mpc_sinh);
10034 break;
10035
10036 CASE_FLT_FN (BUILT_IN_CTAN):
10037 if (validate_arg (arg0, COMPLEX_TYPE)
10038 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10039 return do_mpc_arg1 (arg0, type, mpc_tan);
10040 break;
10041
10042 CASE_FLT_FN (BUILT_IN_CTANH):
10043 if (validate_arg (arg0, COMPLEX_TYPE)
10044 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10045 return do_mpc_arg1 (arg0, type, mpc_tanh);
10046 break;
10047
10048 CASE_FLT_FN (BUILT_IN_CLOG):
10049 if (validate_arg (arg0, COMPLEX_TYPE)
10050 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10051 return do_mpc_arg1 (arg0, type, mpc_log);
10052 break;
10053
10054 CASE_FLT_FN (BUILT_IN_CSQRT):
10055 if (validate_arg (arg0, COMPLEX_TYPE)
10056 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10057 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10058 break;
10059
10060 CASE_FLT_FN (BUILT_IN_CASIN):
10061 if (validate_arg (arg0, COMPLEX_TYPE)
10062 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10063 return do_mpc_arg1 (arg0, type, mpc_asin);
10064 break;
10065
10066 CASE_FLT_FN (BUILT_IN_CACOS):
10067 if (validate_arg (arg0, COMPLEX_TYPE)
10068 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10069 return do_mpc_arg1 (arg0, type, mpc_acos);
10070 break;
10071
10072 CASE_FLT_FN (BUILT_IN_CATAN):
10073 if (validate_arg (arg0, COMPLEX_TYPE)
10074 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10075 return do_mpc_arg1 (arg0, type, mpc_atan);
10076 break;
10077
10078 CASE_FLT_FN (BUILT_IN_CASINH):
10079 if (validate_arg (arg0, COMPLEX_TYPE)
10080 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10081 return do_mpc_arg1 (arg0, type, mpc_asinh);
10082 break;
10083
10084 CASE_FLT_FN (BUILT_IN_CACOSH):
10085 if (validate_arg (arg0, COMPLEX_TYPE)
10086 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10087 return do_mpc_arg1 (arg0, type, mpc_acosh);
10088 break;
10089
10090 CASE_FLT_FN (BUILT_IN_CATANH):
10091 if (validate_arg (arg0, COMPLEX_TYPE)
10092 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10093 return do_mpc_arg1 (arg0, type, mpc_atanh);
10094 break;
10095
10096 CASE_FLT_FN (BUILT_IN_CABS):
10097 return fold_builtin_cabs (loc, arg0, type, fndecl);
10098
10099 CASE_FLT_FN (BUILT_IN_CARG):
10100 return fold_builtin_carg (loc, arg0, type);
10101
10102 CASE_FLT_FN (BUILT_IN_SQRT):
10103 return fold_builtin_sqrt (loc, arg0, type);
10104
10105 CASE_FLT_FN (BUILT_IN_CBRT):
10106 return fold_builtin_cbrt (loc, arg0, type);
10107
10108 CASE_FLT_FN (BUILT_IN_ASIN):
10109 if (validate_arg (arg0, REAL_TYPE))
10110 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10111 &dconstm1, &dconst1, true);
10112 break;
10113
10114 CASE_FLT_FN (BUILT_IN_ACOS):
10115 if (validate_arg (arg0, REAL_TYPE))
10116 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10117 &dconstm1, &dconst1, true);
10118 break;
10119
10120 CASE_FLT_FN (BUILT_IN_ATAN):
10121 if (validate_arg (arg0, REAL_TYPE))
10122 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10123 break;
10124
10125 CASE_FLT_FN (BUILT_IN_ASINH):
10126 if (validate_arg (arg0, REAL_TYPE))
10127 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10128 break;
10129
10130 CASE_FLT_FN (BUILT_IN_ACOSH):
10131 if (validate_arg (arg0, REAL_TYPE))
10132 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10133 &dconst1, NULL, true);
10134 break;
10135
10136 CASE_FLT_FN (BUILT_IN_ATANH):
10137 if (validate_arg (arg0, REAL_TYPE))
10138 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10139 &dconstm1, &dconst1, false);
10140 break;
10141
10142 CASE_FLT_FN (BUILT_IN_SIN):
10143 if (validate_arg (arg0, REAL_TYPE))
10144 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10145 break;
10146
10147 CASE_FLT_FN (BUILT_IN_COS):
10148 return fold_builtin_cos (loc, arg0, type, fndecl);
10149
10150 CASE_FLT_FN (BUILT_IN_TAN):
10151 return fold_builtin_tan (arg0, type);
10152
10153 CASE_FLT_FN (BUILT_IN_CEXP):
10154 return fold_builtin_cexp (loc, arg0, type);
10155
10156 CASE_FLT_FN (BUILT_IN_CEXPI):
10157 if (validate_arg (arg0, REAL_TYPE))
10158 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10159 break;
10160
10161 CASE_FLT_FN (BUILT_IN_SINH):
10162 if (validate_arg (arg0, REAL_TYPE))
10163 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10164 break;
10165
10166 CASE_FLT_FN (BUILT_IN_COSH):
10167 return fold_builtin_cosh (loc, arg0, type, fndecl);
10168
10169 CASE_FLT_FN (BUILT_IN_TANH):
10170 if (validate_arg (arg0, REAL_TYPE))
10171 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10172 break;
10173
10174 CASE_FLT_FN (BUILT_IN_ERF):
10175 if (validate_arg (arg0, REAL_TYPE))
10176 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10177 break;
10178
10179 CASE_FLT_FN (BUILT_IN_ERFC):
10180 if (validate_arg (arg0, REAL_TYPE))
10181 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10182 break;
10183
10184 CASE_FLT_FN (BUILT_IN_TGAMMA):
10185 if (validate_arg (arg0, REAL_TYPE))
10186 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10187 break;
10188
10189 CASE_FLT_FN (BUILT_IN_EXP):
10190 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10191
10192 CASE_FLT_FN (BUILT_IN_EXP2):
10193 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10194
10195 CASE_FLT_FN (BUILT_IN_EXP10):
10196 CASE_FLT_FN (BUILT_IN_POW10):
10197 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10198
10199 CASE_FLT_FN (BUILT_IN_EXPM1):
10200 if (validate_arg (arg0, REAL_TYPE))
10201 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10202 break;
10203
10204 CASE_FLT_FN (BUILT_IN_LOG):
10205 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10206
10207 CASE_FLT_FN (BUILT_IN_LOG2):
10208 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10209
10210 CASE_FLT_FN (BUILT_IN_LOG10):
10211 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10212
10213 CASE_FLT_FN (BUILT_IN_LOG1P):
10214 if (validate_arg (arg0, REAL_TYPE))
10215 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10216 &dconstm1, NULL, false);
10217 break;
10218
10219 CASE_FLT_FN (BUILT_IN_J0):
10220 if (validate_arg (arg0, REAL_TYPE))
10221 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10222 NULL, NULL, 0);
10223 break;
10224
10225 CASE_FLT_FN (BUILT_IN_J1):
10226 if (validate_arg (arg0, REAL_TYPE))
10227 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10228 NULL, NULL, 0);
10229 break;
10230
10231 CASE_FLT_FN (BUILT_IN_Y0):
10232 if (validate_arg (arg0, REAL_TYPE))
10233 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10234 &dconst0, NULL, false);
10235 break;
10236
10237 CASE_FLT_FN (BUILT_IN_Y1):
10238 if (validate_arg (arg0, REAL_TYPE))
10239 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10240 &dconst0, NULL, false);
10241 break;
10242
10243 CASE_FLT_FN (BUILT_IN_NAN):
10244 case BUILT_IN_NAND32:
10245 case BUILT_IN_NAND64:
10246 case BUILT_IN_NAND128:
10247 return fold_builtin_nan (arg0, type, true);
10248
10249 CASE_FLT_FN (BUILT_IN_NANS):
10250 return fold_builtin_nan (arg0, type, false);
10251
10252 CASE_FLT_FN (BUILT_IN_FLOOR):
10253 return fold_builtin_floor (loc, fndecl, arg0);
10254
10255 CASE_FLT_FN (BUILT_IN_CEIL):
10256 return fold_builtin_ceil (loc, fndecl, arg0);
10257
10258 CASE_FLT_FN (BUILT_IN_TRUNC):
10259 return fold_builtin_trunc (loc, fndecl, arg0);
10260
10261 CASE_FLT_FN (BUILT_IN_ROUND):
10262 return fold_builtin_round (loc, fndecl, arg0);
10263
10264 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10265 CASE_FLT_FN (BUILT_IN_RINT):
10266 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10267
10268 CASE_FLT_FN (BUILT_IN_LCEIL):
10269 CASE_FLT_FN (BUILT_IN_LLCEIL):
10270 CASE_FLT_FN (BUILT_IN_LFLOOR):
10271 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10272 CASE_FLT_FN (BUILT_IN_LROUND):
10273 CASE_FLT_FN (BUILT_IN_LLROUND):
10274 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10275
10276 CASE_FLT_FN (BUILT_IN_LRINT):
10277 CASE_FLT_FN (BUILT_IN_LLRINT):
10278 return fold_fixed_mathfn (loc, fndecl, arg0);
10279
10280 case BUILT_IN_BSWAP32:
10281 case BUILT_IN_BSWAP64:
10282 return fold_builtin_bswap (fndecl, arg0);
10283
10284 CASE_INT_FN (BUILT_IN_FFS):
10285 CASE_INT_FN (BUILT_IN_CLZ):
10286 CASE_INT_FN (BUILT_IN_CTZ):
10287 CASE_INT_FN (BUILT_IN_POPCOUNT):
10288 CASE_INT_FN (BUILT_IN_PARITY):
10289 return fold_builtin_bitop (fndecl, arg0);
10290
10291 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10292 return fold_builtin_signbit (loc, arg0, type);
10293
10294 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10295 return fold_builtin_significand (loc, arg0, type);
10296
10297 CASE_FLT_FN (BUILT_IN_ILOGB):
10298 CASE_FLT_FN (BUILT_IN_LOGB):
10299 return fold_builtin_logb (loc, arg0, type);
10300
10301 case BUILT_IN_ISASCII:
10302 return fold_builtin_isascii (loc, arg0);
10303
10304 case BUILT_IN_TOASCII:
10305 return fold_builtin_toascii (loc, arg0);
10306
10307 case BUILT_IN_ISDIGIT:
10308 return fold_builtin_isdigit (loc, arg0);
10309
10310 CASE_FLT_FN (BUILT_IN_FINITE):
10311 case BUILT_IN_FINITED32:
10312 case BUILT_IN_FINITED64:
10313 case BUILT_IN_FINITED128:
10314 case BUILT_IN_ISFINITE:
10315 {
10316 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10317 if (ret)
10318 return ret;
10319 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10320 }
10321
10322 CASE_FLT_FN (BUILT_IN_ISINF):
10323 case BUILT_IN_ISINFD32:
10324 case BUILT_IN_ISINFD64:
10325 case BUILT_IN_ISINFD128:
10326 {
10327 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10328 if (ret)
10329 return ret;
10330 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10331 }
10332
10333 case BUILT_IN_ISNORMAL:
10334 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10335
10336 case BUILT_IN_ISINF_SIGN:
10337 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10338
10339 CASE_FLT_FN (BUILT_IN_ISNAN):
10340 case BUILT_IN_ISNAND32:
10341 case BUILT_IN_ISNAND64:
10342 case BUILT_IN_ISNAND128:
10343 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10344
10345 case BUILT_IN_PRINTF:
10346 case BUILT_IN_PRINTF_UNLOCKED:
10347 case BUILT_IN_VPRINTF:
10348 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10349
10350 case BUILT_IN_FREE:
10351 if (integer_zerop (arg0))
10352 return build_empty_stmt (loc);
10353 break;
10354
10355 default:
10356 break;
10357 }
10358
10359 return NULL_TREE;
10360
10361 }
10362
10363 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10364 IGNORE is true if the result of the function call is ignored. This
10365 function returns NULL_TREE if no simplification was possible. */
10366
10367 static tree
10368 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10369 {
10370 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10371 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10372
10373 switch (fcode)
10374 {
10375 CASE_FLT_FN (BUILT_IN_JN):
10376 if (validate_arg (arg0, INTEGER_TYPE)
10377 && validate_arg (arg1, REAL_TYPE))
10378 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10379 break;
10380
10381 CASE_FLT_FN (BUILT_IN_YN):
10382 if (validate_arg (arg0, INTEGER_TYPE)
10383 && validate_arg (arg1, REAL_TYPE))
10384 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10385 &dconst0, false);
10386 break;
10387
10388 CASE_FLT_FN (BUILT_IN_DREM):
10389 CASE_FLT_FN (BUILT_IN_REMAINDER):
10390 if (validate_arg (arg0, REAL_TYPE)
10391 && validate_arg(arg1, REAL_TYPE))
10392 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10393 break;
10394
10395 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10396 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10397 if (validate_arg (arg0, REAL_TYPE)
10398 && validate_arg(arg1, POINTER_TYPE))
10399 return do_mpfr_lgamma_r (arg0, arg1, type);
10400 break;
10401
10402 CASE_FLT_FN (BUILT_IN_ATAN2):
10403 if (validate_arg (arg0, REAL_TYPE)
10404 && validate_arg(arg1, REAL_TYPE))
10405 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10406 break;
10407
10408 CASE_FLT_FN (BUILT_IN_FDIM):
10409 if (validate_arg (arg0, REAL_TYPE)
10410 && validate_arg(arg1, REAL_TYPE))
10411 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10412 break;
10413
10414 CASE_FLT_FN (BUILT_IN_HYPOT):
10415 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10416
10417 CASE_FLT_FN (BUILT_IN_CPOW):
10418 if (validate_arg (arg0, COMPLEX_TYPE)
10419 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10420 && validate_arg (arg1, COMPLEX_TYPE)
10421 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10422 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10423 break;
10424
10425 CASE_FLT_FN (BUILT_IN_LDEXP):
10426 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10427 CASE_FLT_FN (BUILT_IN_SCALBN):
10428 CASE_FLT_FN (BUILT_IN_SCALBLN):
10429 return fold_builtin_load_exponent (loc, arg0, arg1,
10430 type, /*ldexp=*/false);
10431
10432 CASE_FLT_FN (BUILT_IN_FREXP):
10433 return fold_builtin_frexp (loc, arg0, arg1, type);
10434
10435 CASE_FLT_FN (BUILT_IN_MODF):
10436 return fold_builtin_modf (loc, arg0, arg1, type);
10437
10438 case BUILT_IN_BZERO:
10439 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10440
10441 case BUILT_IN_FPUTS:
10442 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10443
10444 case BUILT_IN_FPUTS_UNLOCKED:
10445 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10446
10447 case BUILT_IN_STRSTR:
10448 return fold_builtin_strstr (loc, arg0, arg1, type);
10449
10450 case BUILT_IN_STRCAT:
10451 return fold_builtin_strcat (loc, arg0, arg1);
10452
10453 case BUILT_IN_STRSPN:
10454 return fold_builtin_strspn (loc, arg0, arg1);
10455
10456 case BUILT_IN_STRCSPN:
10457 return fold_builtin_strcspn (loc, arg0, arg1);
10458
10459 case BUILT_IN_STRCHR:
10460 case BUILT_IN_INDEX:
10461 return fold_builtin_strchr (loc, arg0, arg1, type);
10462
10463 case BUILT_IN_STRRCHR:
10464 case BUILT_IN_RINDEX:
10465 return fold_builtin_strrchr (loc, arg0, arg1, type);
10466
10467 case BUILT_IN_STRCPY:
10468 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10469
10470 case BUILT_IN_STPCPY:
10471 if (ignore)
10472 {
10473 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10474 if (!fn)
10475 break;
10476
10477 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10478 }
10479 else
10480 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10481 break;
10482
10483 case BUILT_IN_STRCMP:
10484 return fold_builtin_strcmp (loc, arg0, arg1);
10485
10486 case BUILT_IN_STRPBRK:
10487 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10488
10489 case BUILT_IN_EXPECT:
10490 return fold_builtin_expect (loc, arg0, arg1);
10491
10492 CASE_FLT_FN (BUILT_IN_POW):
10493 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10494
10495 CASE_FLT_FN (BUILT_IN_POWI):
10496 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10497
10498 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10499 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10500
10501 CASE_FLT_FN (BUILT_IN_FMIN):
10502 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10503
10504 CASE_FLT_FN (BUILT_IN_FMAX):
10505 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10506
10507 case BUILT_IN_ISGREATER:
10508 return fold_builtin_unordered_cmp (loc, fndecl,
10509 arg0, arg1, UNLE_EXPR, LE_EXPR);
10510 case BUILT_IN_ISGREATEREQUAL:
10511 return fold_builtin_unordered_cmp (loc, fndecl,
10512 arg0, arg1, UNLT_EXPR, LT_EXPR);
10513 case BUILT_IN_ISLESS:
10514 return fold_builtin_unordered_cmp (loc, fndecl,
10515 arg0, arg1, UNGE_EXPR, GE_EXPR);
10516 case BUILT_IN_ISLESSEQUAL:
10517 return fold_builtin_unordered_cmp (loc, fndecl,
10518 arg0, arg1, UNGT_EXPR, GT_EXPR);
10519 case BUILT_IN_ISLESSGREATER:
10520 return fold_builtin_unordered_cmp (loc, fndecl,
10521 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10522 case BUILT_IN_ISUNORDERED:
10523 return fold_builtin_unordered_cmp (loc, fndecl,
10524 arg0, arg1, UNORDERED_EXPR,
10525 NOP_EXPR);
10526
10527 /* We do the folding for va_start in the expander. */
10528 case BUILT_IN_VA_START:
10529 break;
10530
10531 case BUILT_IN_SPRINTF:
10532 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10533
10534 case BUILT_IN_OBJECT_SIZE:
10535 return fold_builtin_object_size (arg0, arg1);
10536
10537 case BUILT_IN_PRINTF:
10538 case BUILT_IN_PRINTF_UNLOCKED:
10539 case BUILT_IN_VPRINTF:
10540 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10541
10542 case BUILT_IN_PRINTF_CHK:
10543 case BUILT_IN_VPRINTF_CHK:
10544 if (!validate_arg (arg0, INTEGER_TYPE)
10545 || TREE_SIDE_EFFECTS (arg0))
10546 return NULL_TREE;
10547 else
10548 return fold_builtin_printf (loc, fndecl,
10549 arg1, NULL_TREE, ignore, fcode);
10550 break;
10551
10552 case BUILT_IN_FPRINTF:
10553 case BUILT_IN_FPRINTF_UNLOCKED:
10554 case BUILT_IN_VFPRINTF:
10555 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10556 ignore, fcode);
10557
10558 default:
10559 break;
10560 }
10561 return NULL_TREE;
10562 }
10563
10564 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10565 and ARG2. IGNORE is true if the result of the function call is ignored.
10566 This function returns NULL_TREE if no simplification was possible. */
10567
10568 static tree
10569 fold_builtin_3 (location_t loc, tree fndecl,
10570 tree arg0, tree arg1, tree arg2, bool ignore)
10571 {
10572 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10573 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10574 switch (fcode)
10575 {
10576
10577 CASE_FLT_FN (BUILT_IN_SINCOS):
10578 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10579
10580 CASE_FLT_FN (BUILT_IN_FMA):
10581 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10582 break;
10583
10584 CASE_FLT_FN (BUILT_IN_REMQUO):
10585 if (validate_arg (arg0, REAL_TYPE)
10586 && validate_arg(arg1, REAL_TYPE)
10587 && validate_arg(arg2, POINTER_TYPE))
10588 return do_mpfr_remquo (arg0, arg1, arg2);
10589 break;
10590
10591 case BUILT_IN_MEMSET:
10592 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10593
10594 case BUILT_IN_BCOPY:
10595 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10596 void_type_node, true, /*endp=*/3);
10597
10598 case BUILT_IN_MEMCPY:
10599 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10600 type, ignore, /*endp=*/0);
10601
10602 case BUILT_IN_MEMPCPY:
10603 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10604 type, ignore, /*endp=*/1);
10605
10606 case BUILT_IN_MEMMOVE:
10607 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10608 type, ignore, /*endp=*/3);
10609
10610 case BUILT_IN_STRNCAT:
10611 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10612
10613 case BUILT_IN_STRNCPY:
10614 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10615
10616 case BUILT_IN_STRNCMP:
10617 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10618
10619 case BUILT_IN_MEMCHR:
10620 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10621
10622 case BUILT_IN_BCMP:
10623 case BUILT_IN_MEMCMP:
10624 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10625
10626 case BUILT_IN_SPRINTF:
10627 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10628
10629 case BUILT_IN_STRCPY_CHK:
10630 case BUILT_IN_STPCPY_CHK:
10631 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10632 ignore, fcode);
10633
10634 case BUILT_IN_STRCAT_CHK:
10635 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10636
10637 case BUILT_IN_PRINTF_CHK:
10638 case BUILT_IN_VPRINTF_CHK:
10639 if (!validate_arg (arg0, INTEGER_TYPE)
10640 || TREE_SIDE_EFFECTS (arg0))
10641 return NULL_TREE;
10642 else
10643 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10644 break;
10645
10646 case BUILT_IN_FPRINTF:
10647 case BUILT_IN_FPRINTF_UNLOCKED:
10648 case BUILT_IN_VFPRINTF:
10649 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10650 ignore, fcode);
10651
10652 case BUILT_IN_FPRINTF_CHK:
10653 case BUILT_IN_VFPRINTF_CHK:
10654 if (!validate_arg (arg1, INTEGER_TYPE)
10655 || TREE_SIDE_EFFECTS (arg1))
10656 return NULL_TREE;
10657 else
10658 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10659 ignore, fcode);
10660
10661 default:
10662 break;
10663 }
10664 return NULL_TREE;
10665 }
10666
10667 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10668 ARG2, and ARG3. IGNORE is true if the result of the function call is
10669 ignored. This function returns NULL_TREE if no simplification was
10670 possible. */
10671
10672 static tree
10673 fold_builtin_4 (location_t loc, tree fndecl,
10674 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10675 {
10676 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10677
10678 switch (fcode)
10679 {
10680 case BUILT_IN_MEMCPY_CHK:
10681 case BUILT_IN_MEMPCPY_CHK:
10682 case BUILT_IN_MEMMOVE_CHK:
10683 case BUILT_IN_MEMSET_CHK:
10684 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10685 NULL_TREE, ignore,
10686 DECL_FUNCTION_CODE (fndecl));
10687
10688 case BUILT_IN_STRNCPY_CHK:
10689 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10690
10691 case BUILT_IN_STRNCAT_CHK:
10692 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10693
10694 case BUILT_IN_FPRINTF_CHK:
10695 case BUILT_IN_VFPRINTF_CHK:
10696 if (!validate_arg (arg1, INTEGER_TYPE)
10697 || TREE_SIDE_EFFECTS (arg1))
10698 return NULL_TREE;
10699 else
10700 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10701 ignore, fcode);
10702 break;
10703
10704 default:
10705 break;
10706 }
10707 return NULL_TREE;
10708 }
10709
10710 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10711 arguments, where NARGS <= 4. IGNORE is true if the result of the
10712 function call is ignored. This function returns NULL_TREE if no
10713 simplification was possible. Note that this only folds builtins with
10714 fixed argument patterns. Foldings that do varargs-to-varargs
10715 transformations, or that match calls with more than 4 arguments,
10716 need to be handled with fold_builtin_varargs instead. */
10717
10718 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10719
10720 static tree
10721 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10722 {
10723 tree ret = NULL_TREE;
10724
10725 switch (nargs)
10726 {
10727 case 0:
10728 ret = fold_builtin_0 (loc, fndecl, ignore);
10729 break;
10730 case 1:
10731 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10732 break;
10733 case 2:
10734 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10735 break;
10736 case 3:
10737 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10738 break;
10739 case 4:
10740 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10741 ignore);
10742 break;
10743 default:
10744 break;
10745 }
10746 if (ret)
10747 {
10748 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10749 SET_EXPR_LOCATION (ret, loc);
10750 TREE_NO_WARNING (ret) = 1;
10751 return ret;
10752 }
10753 return NULL_TREE;
10754 }
10755
10756 /* Builtins with folding operations that operate on "..." arguments
10757 need special handling; we need to store the arguments in a convenient
10758 data structure before attempting any folding. Fortunately there are
10759 only a few builtins that fall into this category. FNDECL is the
10760 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10761 result of the function call is ignored. */
10762
10763 static tree
10764 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10765 bool ignore ATTRIBUTE_UNUSED)
10766 {
10767 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10768 tree ret = NULL_TREE;
10769
10770 switch (fcode)
10771 {
10772 case BUILT_IN_SPRINTF_CHK:
10773 case BUILT_IN_VSPRINTF_CHK:
10774 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10775 break;
10776
10777 case BUILT_IN_SNPRINTF_CHK:
10778 case BUILT_IN_VSNPRINTF_CHK:
10779 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10780 break;
10781
10782 case BUILT_IN_FPCLASSIFY:
10783 ret = fold_builtin_fpclassify (loc, exp);
10784 break;
10785
10786 default:
10787 break;
10788 }
10789 if (ret)
10790 {
10791 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10792 SET_EXPR_LOCATION (ret, loc);
10793 TREE_NO_WARNING (ret) = 1;
10794 return ret;
10795 }
10796 return NULL_TREE;
10797 }
10798
10799 /* Return true if FNDECL shouldn't be folded right now.
10800 If a built-in function has an inline attribute always_inline
10801 wrapper, defer folding it after always_inline functions have
10802 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10803 might not be performed. */
10804
10805 static bool
10806 avoid_folding_inline_builtin (tree fndecl)
10807 {
10808 return (DECL_DECLARED_INLINE_P (fndecl)
10809 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10810 && cfun
10811 && !cfun->always_inline_functions_inlined
10812 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10813 }
10814
10815 /* A wrapper function for builtin folding that prevents warnings for
10816 "statement without effect" and the like, caused by removing the
10817 call node earlier than the warning is generated. */
10818
10819 tree
10820 fold_call_expr (location_t loc, tree exp, bool ignore)
10821 {
10822 tree ret = NULL_TREE;
10823 tree fndecl = get_callee_fndecl (exp);
10824 if (fndecl
10825 && TREE_CODE (fndecl) == FUNCTION_DECL
10826 && DECL_BUILT_IN (fndecl)
10827 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10828 yet. Defer folding until we see all the arguments
10829 (after inlining). */
10830 && !CALL_EXPR_VA_ARG_PACK (exp))
10831 {
10832 int nargs = call_expr_nargs (exp);
10833
10834 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10835 instead last argument is __builtin_va_arg_pack (). Defer folding
10836 even in that case, until arguments are finalized. */
10837 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10838 {
10839 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10840 if (fndecl2
10841 && TREE_CODE (fndecl2) == FUNCTION_DECL
10842 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10843 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10844 return NULL_TREE;
10845 }
10846
10847 if (avoid_folding_inline_builtin (fndecl))
10848 return NULL_TREE;
10849
10850 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10851 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10852 CALL_EXPR_ARGP (exp), ignore);
10853 else
10854 {
10855 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10856 {
10857 tree *args = CALL_EXPR_ARGP (exp);
10858 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10859 }
10860 if (!ret)
10861 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10862 if (ret)
10863 return ret;
10864 }
10865 }
10866 return NULL_TREE;
10867 }
10868
10869 /* Conveniently construct a function call expression. FNDECL names the
10870 function to be called and N arguments are passed in the array
10871 ARGARRAY. */
10872
10873 tree
10874 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10875 {
10876 tree fntype = TREE_TYPE (fndecl);
10877 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10878
10879 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10880 }
10881
10882 /* Conveniently construct a function call expression. FNDECL names the
10883 function to be called and the arguments are passed in the vector
10884 VEC. */
10885
10886 tree
10887 build_call_expr_loc_vec (location_t loc, tree fndecl, VEC(tree,gc) *vec)
10888 {
10889 return build_call_expr_loc_array (loc, fndecl, VEC_length (tree, vec),
10890 VEC_address (tree, vec));
10891 }
10892
10893
10894 /* Conveniently construct a function call expression. FNDECL names the
10895 function to be called, N is the number of arguments, and the "..."
10896 parameters are the argument expressions. */
10897
10898 tree
10899 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10900 {
10901 va_list ap;
10902 tree *argarray = XALLOCAVEC (tree, n);
10903 int i;
10904
10905 va_start (ap, n);
10906 for (i = 0; i < n; i++)
10907 argarray[i] = va_arg (ap, tree);
10908 va_end (ap);
10909 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10910 }
10911
10912 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10913 varargs macros aren't supported by all bootstrap compilers. */
10914
10915 tree
10916 build_call_expr (tree fndecl, int n, ...)
10917 {
10918 va_list ap;
10919 tree *argarray = XALLOCAVEC (tree, n);
10920 int i;
10921
10922 va_start (ap, n);
10923 for (i = 0; i < n; i++)
10924 argarray[i] = va_arg (ap, tree);
10925 va_end (ap);
10926 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10927 }
10928
10929 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10930 N arguments are passed in the array ARGARRAY. */
10931
10932 tree
10933 fold_builtin_call_array (location_t loc, tree type,
10934 tree fn,
10935 int n,
10936 tree *argarray)
10937 {
10938 tree ret = NULL_TREE;
10939 tree exp;
10940
10941 if (TREE_CODE (fn) == ADDR_EXPR)
10942 {
10943 tree fndecl = TREE_OPERAND (fn, 0);
10944 if (TREE_CODE (fndecl) == FUNCTION_DECL
10945 && DECL_BUILT_IN (fndecl))
10946 {
10947 /* If last argument is __builtin_va_arg_pack (), arguments to this
10948 function are not finalized yet. Defer folding until they are. */
10949 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10950 {
10951 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10952 if (fndecl2
10953 && TREE_CODE (fndecl2) == FUNCTION_DECL
10954 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10955 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10956 return build_call_array_loc (loc, type, fn, n, argarray);
10957 }
10958 if (avoid_folding_inline_builtin (fndecl))
10959 return build_call_array_loc (loc, type, fn, n, argarray);
10960 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10961 {
10962 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10963 if (ret)
10964 return ret;
10965
10966 return build_call_array_loc (loc, type, fn, n, argarray);
10967 }
10968 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10969 {
10970 /* First try the transformations that don't require consing up
10971 an exp. */
10972 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10973 if (ret)
10974 return ret;
10975 }
10976
10977 /* If we got this far, we need to build an exp. */
10978 exp = build_call_array_loc (loc, type, fn, n, argarray);
10979 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10980 return ret ? ret : exp;
10981 }
10982 }
10983
10984 return build_call_array_loc (loc, type, fn, n, argarray);
10985 }
10986
10987 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10988 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10989 of arguments in ARGS to be omitted. OLDNARGS is the number of
10990 elements in ARGS. */
10991
10992 static tree
10993 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10994 int skip, tree fndecl, int n, va_list newargs)
10995 {
10996 int nargs = oldnargs - skip + n;
10997 tree *buffer;
10998
10999 if (n > 0)
11000 {
11001 int i, j;
11002
11003 buffer = XALLOCAVEC (tree, nargs);
11004 for (i = 0; i < n; i++)
11005 buffer[i] = va_arg (newargs, tree);
11006 for (j = skip; j < oldnargs; j++, i++)
11007 buffer[i] = args[j];
11008 }
11009 else
11010 buffer = args + skip;
11011
11012 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11013 }
11014
11015 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11016 list ARGS along with N new arguments specified as the "..."
11017 parameters. SKIP is the number of arguments in ARGS to be omitted.
11018 OLDNARGS is the number of elements in ARGS. */
11019
11020 static tree
11021 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
11022 int skip, tree fndecl, int n, ...)
11023 {
11024 va_list ap;
11025 tree t;
11026
11027 va_start (ap, n);
11028 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
11029 va_end (ap);
11030
11031 return t;
11032 }
11033
11034 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11035 along with N new arguments specified as the "..." parameters. SKIP
11036 is the number of arguments in EXP to be omitted. This function is used
11037 to do varargs-to-varargs transformations. */
11038
11039 static tree
11040 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11041 {
11042 va_list ap;
11043 tree t;
11044
11045 va_start (ap, n);
11046 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11047 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11048 va_end (ap);
11049
11050 return t;
11051 }
11052
11053 /* Validate a single argument ARG against a tree code CODE representing
11054 a type. */
11055
11056 static bool
11057 validate_arg (const_tree arg, enum tree_code code)
11058 {
11059 if (!arg)
11060 return false;
11061 else if (code == POINTER_TYPE)
11062 return POINTER_TYPE_P (TREE_TYPE (arg));
11063 else if (code == INTEGER_TYPE)
11064 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11065 return code == TREE_CODE (TREE_TYPE (arg));
11066 }
11067
11068 /* This function validates the types of a function call argument list
11069 against a specified list of tree_codes. If the last specifier is a 0,
11070 that represents an ellipses, otherwise the last specifier must be a
11071 VOID_TYPE.
11072
11073 This is the GIMPLE version of validate_arglist. Eventually we want to
11074 completely convert builtins.c to work from GIMPLEs and the tree based
11075 validate_arglist will then be removed. */
11076
11077 bool
11078 validate_gimple_arglist (const_gimple call, ...)
11079 {
11080 enum tree_code code;
11081 bool res = 0;
11082 va_list ap;
11083 const_tree arg;
11084 size_t i;
11085
11086 va_start (ap, call);
11087 i = 0;
11088
11089 do
11090 {
11091 code = (enum tree_code) va_arg (ap, int);
11092 switch (code)
11093 {
11094 case 0:
11095 /* This signifies an ellipses, any further arguments are all ok. */
11096 res = true;
11097 goto end;
11098 case VOID_TYPE:
11099 /* This signifies an endlink, if no arguments remain, return
11100 true, otherwise return false. */
11101 res = (i == gimple_call_num_args (call));
11102 goto end;
11103 default:
11104 /* If no parameters remain or the parameter's code does not
11105 match the specified code, return false. Otherwise continue
11106 checking any remaining arguments. */
11107 arg = gimple_call_arg (call, i++);
11108 if (!validate_arg (arg, code))
11109 goto end;
11110 break;
11111 }
11112 }
11113 while (1);
11114
11115 /* We need gotos here since we can only have one VA_CLOSE in a
11116 function. */
11117 end: ;
11118 va_end (ap);
11119
11120 return res;
11121 }
11122
11123 /* This function validates the types of a function call argument list
11124 against a specified list of tree_codes. If the last specifier is a 0,
11125 that represents an ellipses, otherwise the last specifier must be a
11126 VOID_TYPE. */
11127
11128 bool
11129 validate_arglist (const_tree callexpr, ...)
11130 {
11131 enum tree_code code;
11132 bool res = 0;
11133 va_list ap;
11134 const_call_expr_arg_iterator iter;
11135 const_tree arg;
11136
11137 va_start (ap, callexpr);
11138 init_const_call_expr_arg_iterator (callexpr, &iter);
11139
11140 do
11141 {
11142 code = (enum tree_code) va_arg (ap, int);
11143 switch (code)
11144 {
11145 case 0:
11146 /* This signifies an ellipses, any further arguments are all ok. */
11147 res = true;
11148 goto end;
11149 case VOID_TYPE:
11150 /* This signifies an endlink, if no arguments remain, return
11151 true, otherwise return false. */
11152 res = !more_const_call_expr_args_p (&iter);
11153 goto end;
11154 default:
11155 /* If no parameters remain or the parameter's code does not
11156 match the specified code, return false. Otherwise continue
11157 checking any remaining arguments. */
11158 arg = next_const_call_expr_arg (&iter);
11159 if (!validate_arg (arg, code))
11160 goto end;
11161 break;
11162 }
11163 }
11164 while (1);
11165
11166 /* We need gotos here since we can only have one VA_CLOSE in a
11167 function. */
11168 end: ;
11169 va_end (ap);
11170
11171 return res;
11172 }
11173
11174 /* Default target-specific builtin expander that does nothing. */
11175
11176 rtx
11177 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11178 rtx target ATTRIBUTE_UNUSED,
11179 rtx subtarget ATTRIBUTE_UNUSED,
11180 enum machine_mode mode ATTRIBUTE_UNUSED,
11181 int ignore ATTRIBUTE_UNUSED)
11182 {
11183 return NULL_RTX;
11184 }
11185
11186 /* Returns true is EXP represents data that would potentially reside
11187 in a readonly section. */
11188
11189 static bool
11190 readonly_data_expr (tree exp)
11191 {
11192 STRIP_NOPS (exp);
11193
11194 if (TREE_CODE (exp) != ADDR_EXPR)
11195 return false;
11196
11197 exp = get_base_address (TREE_OPERAND (exp, 0));
11198 if (!exp)
11199 return false;
11200
11201 /* Make sure we call decl_readonly_section only for trees it
11202 can handle (since it returns true for everything it doesn't
11203 understand). */
11204 if (TREE_CODE (exp) == STRING_CST
11205 || TREE_CODE (exp) == CONSTRUCTOR
11206 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11207 return decl_readonly_section (exp, 0);
11208 else
11209 return false;
11210 }
11211
11212 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11213 to the call, and TYPE is its return type.
11214
11215 Return NULL_TREE if no simplification was possible, otherwise return the
11216 simplified form of the call as a tree.
11217
11218 The simplified form may be a constant or other expression which
11219 computes the same value, but in a more efficient manner (including
11220 calls to other builtin functions).
11221
11222 The call may contain arguments which need to be evaluated, but
11223 which are not useful to determine the result of the call. In
11224 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11225 COMPOUND_EXPR will be an argument which must be evaluated.
11226 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11227 COMPOUND_EXPR in the chain will contain the tree for the simplified
11228 form of the builtin function call. */
11229
11230 static tree
11231 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11232 {
11233 if (!validate_arg (s1, POINTER_TYPE)
11234 || !validate_arg (s2, POINTER_TYPE))
11235 return NULL_TREE;
11236 else
11237 {
11238 tree fn;
11239 const char *p1, *p2;
11240
11241 p2 = c_getstr (s2);
11242 if (p2 == NULL)
11243 return NULL_TREE;
11244
11245 p1 = c_getstr (s1);
11246 if (p1 != NULL)
11247 {
11248 const char *r = strstr (p1, p2);
11249 tree tem;
11250
11251 if (r == NULL)
11252 return build_int_cst (TREE_TYPE (s1), 0);
11253
11254 /* Return an offset into the constant string argument. */
11255 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11256 s1, size_int (r - p1));
11257 return fold_convert_loc (loc, type, tem);
11258 }
11259
11260 /* The argument is const char *, and the result is char *, so we need
11261 a type conversion here to avoid a warning. */
11262 if (p2[0] == '\0')
11263 return fold_convert_loc (loc, type, s1);
11264
11265 if (p2[1] != '\0')
11266 return NULL_TREE;
11267
11268 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11269 if (!fn)
11270 return NULL_TREE;
11271
11272 /* New argument list transforming strstr(s1, s2) to
11273 strchr(s1, s2[0]). */
11274 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11275 }
11276 }
11277
11278 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11279 the call, and TYPE is its return type.
11280
11281 Return NULL_TREE if no simplification was possible, otherwise return the
11282 simplified form of the call as a tree.
11283
11284 The simplified form may be a constant or other expression which
11285 computes the same value, but in a more efficient manner (including
11286 calls to other builtin functions).
11287
11288 The call may contain arguments which need to be evaluated, but
11289 which are not useful to determine the result of the call. In
11290 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11291 COMPOUND_EXPR will be an argument which must be evaluated.
11292 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11293 COMPOUND_EXPR in the chain will contain the tree for the simplified
11294 form of the builtin function call. */
11295
11296 static tree
11297 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11298 {
11299 if (!validate_arg (s1, POINTER_TYPE)
11300 || !validate_arg (s2, INTEGER_TYPE))
11301 return NULL_TREE;
11302 else
11303 {
11304 const char *p1;
11305
11306 if (TREE_CODE (s2) != INTEGER_CST)
11307 return NULL_TREE;
11308
11309 p1 = c_getstr (s1);
11310 if (p1 != NULL)
11311 {
11312 char c;
11313 const char *r;
11314 tree tem;
11315
11316 if (target_char_cast (s2, &c))
11317 return NULL_TREE;
11318
11319 r = strchr (p1, c);
11320
11321 if (r == NULL)
11322 return build_int_cst (TREE_TYPE (s1), 0);
11323
11324 /* Return an offset into the constant string argument. */
11325 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11326 s1, size_int (r - p1));
11327 return fold_convert_loc (loc, type, tem);
11328 }
11329 return NULL_TREE;
11330 }
11331 }
11332
11333 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11334 the call, and TYPE is its return type.
11335
11336 Return NULL_TREE if no simplification was possible, otherwise return the
11337 simplified form of the call as a tree.
11338
11339 The simplified form may be a constant or other expression which
11340 computes the same value, but in a more efficient manner (including
11341 calls to other builtin functions).
11342
11343 The call may contain arguments which need to be evaluated, but
11344 which are not useful to determine the result of the call. In
11345 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11346 COMPOUND_EXPR will be an argument which must be evaluated.
11347 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11348 COMPOUND_EXPR in the chain will contain the tree for the simplified
11349 form of the builtin function call. */
11350
11351 static tree
11352 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11353 {
11354 if (!validate_arg (s1, POINTER_TYPE)
11355 || !validate_arg (s2, INTEGER_TYPE))
11356 return NULL_TREE;
11357 else
11358 {
11359 tree fn;
11360 const char *p1;
11361
11362 if (TREE_CODE (s2) != INTEGER_CST)
11363 return NULL_TREE;
11364
11365 p1 = c_getstr (s1);
11366 if (p1 != NULL)
11367 {
11368 char c;
11369 const char *r;
11370 tree tem;
11371
11372 if (target_char_cast (s2, &c))
11373 return NULL_TREE;
11374
11375 r = strrchr (p1, c);
11376
11377 if (r == NULL)
11378 return build_int_cst (TREE_TYPE (s1), 0);
11379
11380 /* Return an offset into the constant string argument. */
11381 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11382 s1, size_int (r - p1));
11383 return fold_convert_loc (loc, type, tem);
11384 }
11385
11386 if (! integer_zerop (s2))
11387 return NULL_TREE;
11388
11389 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11390 if (!fn)
11391 return NULL_TREE;
11392
11393 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11394 return build_call_expr_loc (loc, fn, 2, s1, s2);
11395 }
11396 }
11397
11398 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11399 to the call, and TYPE is its return type.
11400
11401 Return NULL_TREE if no simplification was possible, otherwise return the
11402 simplified form of the call as a tree.
11403
11404 The simplified form may be a constant or other expression which
11405 computes the same value, but in a more efficient manner (including
11406 calls to other builtin functions).
11407
11408 The call may contain arguments which need to be evaluated, but
11409 which are not useful to determine the result of the call. In
11410 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11411 COMPOUND_EXPR will be an argument which must be evaluated.
11412 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11413 COMPOUND_EXPR in the chain will contain the tree for the simplified
11414 form of the builtin function call. */
11415
11416 static tree
11417 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11418 {
11419 if (!validate_arg (s1, POINTER_TYPE)
11420 || !validate_arg (s2, POINTER_TYPE))
11421 return NULL_TREE;
11422 else
11423 {
11424 tree fn;
11425 const char *p1, *p2;
11426
11427 p2 = c_getstr (s2);
11428 if (p2 == NULL)
11429 return NULL_TREE;
11430
11431 p1 = c_getstr (s1);
11432 if (p1 != NULL)
11433 {
11434 const char *r = strpbrk (p1, p2);
11435 tree tem;
11436
11437 if (r == NULL)
11438 return build_int_cst (TREE_TYPE (s1), 0);
11439
11440 /* Return an offset into the constant string argument. */
11441 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11442 s1, size_int (r - p1));
11443 return fold_convert_loc (loc, type, tem);
11444 }
11445
11446 if (p2[0] == '\0')
11447 /* strpbrk(x, "") == NULL.
11448 Evaluate and ignore s1 in case it had side-effects. */
11449 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11450
11451 if (p2[1] != '\0')
11452 return NULL_TREE; /* Really call strpbrk. */
11453
11454 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11455 if (!fn)
11456 return NULL_TREE;
11457
11458 /* New argument list transforming strpbrk(s1, s2) to
11459 strchr(s1, s2[0]). */
11460 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11461 }
11462 }
11463
11464 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11465 to the call.
11466
11467 Return NULL_TREE if no simplification was possible, otherwise return the
11468 simplified form of the call as a tree.
11469
11470 The simplified form may be a constant or other expression which
11471 computes the same value, but in a more efficient manner (including
11472 calls to other builtin functions).
11473
11474 The call may contain arguments which need to be evaluated, but
11475 which are not useful to determine the result of the call. In
11476 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11477 COMPOUND_EXPR will be an argument which must be evaluated.
11478 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11479 COMPOUND_EXPR in the chain will contain the tree for the simplified
11480 form of the builtin function call. */
11481
11482 static tree
11483 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11484 {
11485 if (!validate_arg (dst, POINTER_TYPE)
11486 || !validate_arg (src, POINTER_TYPE))
11487 return NULL_TREE;
11488 else
11489 {
11490 const char *p = c_getstr (src);
11491
11492 /* If the string length is zero, return the dst parameter. */
11493 if (p && *p == '\0')
11494 return dst;
11495
11496 if (optimize_insn_for_speed_p ())
11497 {
11498 /* See if we can store by pieces into (dst + strlen(dst)). */
11499 tree newdst, call;
11500 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11501 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11502
11503 if (!strlen_fn || !strcpy_fn)
11504 return NULL_TREE;
11505
11506 /* If we don't have a movstr we don't want to emit an strcpy
11507 call. We have to do that if the length of the source string
11508 isn't computable (in that case we can use memcpy probably
11509 later expanding to a sequence of mov instructions). If we
11510 have movstr instructions we can emit strcpy calls. */
11511 if (!HAVE_movstr)
11512 {
11513 tree len = c_strlen (src, 1);
11514 if (! len || TREE_SIDE_EFFECTS (len))
11515 return NULL_TREE;
11516 }
11517
11518 /* Stabilize the argument list. */
11519 dst = builtin_save_expr (dst);
11520
11521 /* Create strlen (dst). */
11522 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11523 /* Create (dst p+ strlen (dst)). */
11524
11525 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11526 TREE_TYPE (dst), dst, newdst);
11527 newdst = builtin_save_expr (newdst);
11528
11529 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11530 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11531 }
11532 return NULL_TREE;
11533 }
11534 }
11535
11536 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11537 arguments to the call.
11538
11539 Return NULL_TREE if no simplification was possible, otherwise return the
11540 simplified form of the call as a tree.
11541
11542 The simplified form may be a constant or other expression which
11543 computes the same value, but in a more efficient manner (including
11544 calls to other builtin functions).
11545
11546 The call may contain arguments which need to be evaluated, but
11547 which are not useful to determine the result of the call. In
11548 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11549 COMPOUND_EXPR will be an argument which must be evaluated.
11550 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11551 COMPOUND_EXPR in the chain will contain the tree for the simplified
11552 form of the builtin function call. */
11553
11554 static tree
11555 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11556 {
11557 if (!validate_arg (dst, POINTER_TYPE)
11558 || !validate_arg (src, POINTER_TYPE)
11559 || !validate_arg (len, INTEGER_TYPE))
11560 return NULL_TREE;
11561 else
11562 {
11563 const char *p = c_getstr (src);
11564
11565 /* If the requested length is zero, or the src parameter string
11566 length is zero, return the dst parameter. */
11567 if (integer_zerop (len) || (p && *p == '\0'))
11568 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11569
11570 /* If the requested len is greater than or equal to the string
11571 length, call strcat. */
11572 if (TREE_CODE (len) == INTEGER_CST && p
11573 && compare_tree_int (len, strlen (p)) >= 0)
11574 {
11575 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11576
11577 /* If the replacement _DECL isn't initialized, don't do the
11578 transformation. */
11579 if (!fn)
11580 return NULL_TREE;
11581
11582 return build_call_expr_loc (loc, fn, 2, dst, src);
11583 }
11584 return NULL_TREE;
11585 }
11586 }
11587
11588 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11589 to the call.
11590
11591 Return NULL_TREE if no simplification was possible, otherwise return the
11592 simplified form of the call as a tree.
11593
11594 The simplified form may be a constant or other expression which
11595 computes the same value, but in a more efficient manner (including
11596 calls to other builtin functions).
11597
11598 The call may contain arguments which need to be evaluated, but
11599 which are not useful to determine the result of the call. In
11600 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11601 COMPOUND_EXPR will be an argument which must be evaluated.
11602 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11603 COMPOUND_EXPR in the chain will contain the tree for the simplified
11604 form of the builtin function call. */
11605
11606 static tree
11607 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11608 {
11609 if (!validate_arg (s1, POINTER_TYPE)
11610 || !validate_arg (s2, POINTER_TYPE))
11611 return NULL_TREE;
11612 else
11613 {
11614 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11615
11616 /* If both arguments are constants, evaluate at compile-time. */
11617 if (p1 && p2)
11618 {
11619 const size_t r = strspn (p1, p2);
11620 return size_int (r);
11621 }
11622
11623 /* If either argument is "", return NULL_TREE. */
11624 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11625 /* Evaluate and ignore both arguments in case either one has
11626 side-effects. */
11627 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11628 s1, s2);
11629 return NULL_TREE;
11630 }
11631 }
11632
11633 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11634 to the call.
11635
11636 Return NULL_TREE if no simplification was possible, otherwise return the
11637 simplified form of the call as a tree.
11638
11639 The simplified form may be a constant or other expression which
11640 computes the same value, but in a more efficient manner (including
11641 calls to other builtin functions).
11642
11643 The call may contain arguments which need to be evaluated, but
11644 which are not useful to determine the result of the call. In
11645 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11646 COMPOUND_EXPR will be an argument which must be evaluated.
11647 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11648 COMPOUND_EXPR in the chain will contain the tree for the simplified
11649 form of the builtin function call. */
11650
11651 static tree
11652 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11653 {
11654 if (!validate_arg (s1, POINTER_TYPE)
11655 || !validate_arg (s2, POINTER_TYPE))
11656 return NULL_TREE;
11657 else
11658 {
11659 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11660
11661 /* If both arguments are constants, evaluate at compile-time. */
11662 if (p1 && p2)
11663 {
11664 const size_t r = strcspn (p1, p2);
11665 return size_int (r);
11666 }
11667
11668 /* If the first argument is "", return NULL_TREE. */
11669 if (p1 && *p1 == '\0')
11670 {
11671 /* Evaluate and ignore argument s2 in case it has
11672 side-effects. */
11673 return omit_one_operand_loc (loc, size_type_node,
11674 size_zero_node, s2);
11675 }
11676
11677 /* If the second argument is "", return __builtin_strlen(s1). */
11678 if (p2 && *p2 == '\0')
11679 {
11680 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11681
11682 /* If the replacement _DECL isn't initialized, don't do the
11683 transformation. */
11684 if (!fn)
11685 return NULL_TREE;
11686
11687 return build_call_expr_loc (loc, fn, 1, s1);
11688 }
11689 return NULL_TREE;
11690 }
11691 }
11692
11693 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11694 to the call. IGNORE is true if the value returned
11695 by the builtin will be ignored. UNLOCKED is true is true if this
11696 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11697 the known length of the string. Return NULL_TREE if no simplification
11698 was possible. */
11699
11700 tree
11701 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11702 bool ignore, bool unlocked, tree len)
11703 {
11704 /* If we're using an unlocked function, assume the other unlocked
11705 functions exist explicitly. */
11706 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11707 : implicit_built_in_decls[BUILT_IN_FPUTC];
11708 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11709 : implicit_built_in_decls[BUILT_IN_FWRITE];
11710
11711 /* If the return value is used, don't do the transformation. */
11712 if (!ignore)
11713 return NULL_TREE;
11714
11715 /* Verify the arguments in the original call. */
11716 if (!validate_arg (arg0, POINTER_TYPE)
11717 || !validate_arg (arg1, POINTER_TYPE))
11718 return NULL_TREE;
11719
11720 if (! len)
11721 len = c_strlen (arg0, 0);
11722
11723 /* Get the length of the string passed to fputs. If the length
11724 can't be determined, punt. */
11725 if (!len
11726 || TREE_CODE (len) != INTEGER_CST)
11727 return NULL_TREE;
11728
11729 switch (compare_tree_int (len, 1))
11730 {
11731 case -1: /* length is 0, delete the call entirely . */
11732 return omit_one_operand_loc (loc, integer_type_node,
11733 integer_zero_node, arg1);;
11734
11735 case 0: /* length is 1, call fputc. */
11736 {
11737 const char *p = c_getstr (arg0);
11738
11739 if (p != NULL)
11740 {
11741 if (fn_fputc)
11742 return build_call_expr_loc (loc, fn_fputc, 2,
11743 build_int_cst (NULL_TREE, p[0]), arg1);
11744 else
11745 return NULL_TREE;
11746 }
11747 }
11748 /* FALLTHROUGH */
11749 case 1: /* length is greater than 1, call fwrite. */
11750 {
11751 /* If optimizing for size keep fputs. */
11752 if (optimize_function_for_size_p (cfun))
11753 return NULL_TREE;
11754 /* New argument list transforming fputs(string, stream) to
11755 fwrite(string, 1, len, stream). */
11756 if (fn_fwrite)
11757 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11758 size_one_node, len, arg1);
11759 else
11760 return NULL_TREE;
11761 }
11762 default:
11763 gcc_unreachable ();
11764 }
11765 return NULL_TREE;
11766 }
11767
11768 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11769 produced. False otherwise. This is done so that we don't output the error
11770 or warning twice or three times. */
11771
11772 bool
11773 fold_builtin_next_arg (tree exp, bool va_start_p)
11774 {
11775 tree fntype = TREE_TYPE (current_function_decl);
11776 int nargs = call_expr_nargs (exp);
11777 tree arg;
11778
11779 if (!stdarg_p (fntype))
11780 {
11781 error ("%<va_start%> used in function with fixed args");
11782 return true;
11783 }
11784
11785 if (va_start_p)
11786 {
11787 if (va_start_p && (nargs != 2))
11788 {
11789 error ("wrong number of arguments to function %<va_start%>");
11790 return true;
11791 }
11792 arg = CALL_EXPR_ARG (exp, 1);
11793 }
11794 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11795 when we checked the arguments and if needed issued a warning. */
11796 else
11797 {
11798 if (nargs == 0)
11799 {
11800 /* Evidently an out of date version of <stdarg.h>; can't validate
11801 va_start's second argument, but can still work as intended. */
11802 warning (0, "%<__builtin_next_arg%> called without an argument");
11803 return true;
11804 }
11805 else if (nargs > 1)
11806 {
11807 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11808 return true;
11809 }
11810 arg = CALL_EXPR_ARG (exp, 0);
11811 }
11812
11813 if (TREE_CODE (arg) == SSA_NAME)
11814 arg = SSA_NAME_VAR (arg);
11815
11816 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11817 or __builtin_next_arg (0) the first time we see it, after checking
11818 the arguments and if needed issuing a warning. */
11819 if (!integer_zerop (arg))
11820 {
11821 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11822
11823 /* Strip off all nops for the sake of the comparison. This
11824 is not quite the same as STRIP_NOPS. It does more.
11825 We must also strip off INDIRECT_EXPR for C++ reference
11826 parameters. */
11827 while (CONVERT_EXPR_P (arg)
11828 || TREE_CODE (arg) == INDIRECT_REF)
11829 arg = TREE_OPERAND (arg, 0);
11830 if (arg != last_parm)
11831 {
11832 /* FIXME: Sometimes with the tree optimizers we can get the
11833 not the last argument even though the user used the last
11834 argument. We just warn and set the arg to be the last
11835 argument so that we will get wrong-code because of
11836 it. */
11837 warning (0, "second parameter of %<va_start%> not last named argument");
11838 }
11839
11840 /* Undefined by C99 7.15.1.4p4 (va_start):
11841 "If the parameter parmN is declared with the register storage
11842 class, with a function or array type, or with a type that is
11843 not compatible with the type that results after application of
11844 the default argument promotions, the behavior is undefined."
11845 */
11846 else if (DECL_REGISTER (arg))
11847 warning (0, "undefined behaviour when second parameter of "
11848 "%<va_start%> is declared with %<register%> storage");
11849
11850 /* We want to verify the second parameter just once before the tree
11851 optimizers are run and then avoid keeping it in the tree,
11852 as otherwise we could warn even for correct code like:
11853 void foo (int i, ...)
11854 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11855 if (va_start_p)
11856 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11857 else
11858 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11859 }
11860 return false;
11861 }
11862
11863
11864 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11865 ORIG may be null if this is a 2-argument call. We don't attempt to
11866 simplify calls with more than 3 arguments.
11867
11868 Return NULL_TREE if no simplification was possible, otherwise return the
11869 simplified form of the call as a tree. If IGNORED is true, it means that
11870 the caller does not use the returned value of the function. */
11871
11872 static tree
11873 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11874 tree orig, int ignored)
11875 {
11876 tree call, retval;
11877 const char *fmt_str = NULL;
11878
11879 /* Verify the required arguments in the original call. We deal with two
11880 types of sprintf() calls: 'sprintf (str, fmt)' and
11881 'sprintf (dest, "%s", orig)'. */
11882 if (!validate_arg (dest, POINTER_TYPE)
11883 || !validate_arg (fmt, POINTER_TYPE))
11884 return NULL_TREE;
11885 if (orig && !validate_arg (orig, POINTER_TYPE))
11886 return NULL_TREE;
11887
11888 /* Check whether the format is a literal string constant. */
11889 fmt_str = c_getstr (fmt);
11890 if (fmt_str == NULL)
11891 return NULL_TREE;
11892
11893 call = NULL_TREE;
11894 retval = NULL_TREE;
11895
11896 if (!init_target_chars ())
11897 return NULL_TREE;
11898
11899 /* If the format doesn't contain % args or %%, use strcpy. */
11900 if (strchr (fmt_str, target_percent) == NULL)
11901 {
11902 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11903
11904 if (!fn)
11905 return NULL_TREE;
11906
11907 /* Don't optimize sprintf (buf, "abc", ptr++). */
11908 if (orig)
11909 return NULL_TREE;
11910
11911 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11912 'format' is known to contain no % formats. */
11913 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11914 if (!ignored)
11915 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11916 }
11917
11918 /* If the format is "%s", use strcpy if the result isn't used. */
11919 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11920 {
11921 tree fn;
11922 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11923
11924 if (!fn)
11925 return NULL_TREE;
11926
11927 /* Don't crash on sprintf (str1, "%s"). */
11928 if (!orig)
11929 return NULL_TREE;
11930
11931 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11932 if (!ignored)
11933 {
11934 retval = c_strlen (orig, 1);
11935 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11936 return NULL_TREE;
11937 }
11938 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11939 }
11940
11941 if (call && retval)
11942 {
11943 retval = fold_convert_loc
11944 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11945 retval);
11946 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11947 }
11948 else
11949 return call;
11950 }
11951
11952 /* Expand a call EXP to __builtin_object_size. */
11953
11954 rtx
11955 expand_builtin_object_size (tree exp)
11956 {
11957 tree ost;
11958 int object_size_type;
11959 tree fndecl = get_callee_fndecl (exp);
11960
11961 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11962 {
11963 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11964 exp, fndecl);
11965 expand_builtin_trap ();
11966 return const0_rtx;
11967 }
11968
11969 ost = CALL_EXPR_ARG (exp, 1);
11970 STRIP_NOPS (ost);
11971
11972 if (TREE_CODE (ost) != INTEGER_CST
11973 || tree_int_cst_sgn (ost) < 0
11974 || compare_tree_int (ost, 3) > 0)
11975 {
11976 error ("%Klast argument of %D is not integer constant between 0 and 3",
11977 exp, fndecl);
11978 expand_builtin_trap ();
11979 return const0_rtx;
11980 }
11981
11982 object_size_type = tree_low_cst (ost, 0);
11983
11984 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11985 }
11986
11987 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11988 FCODE is the BUILT_IN_* to use.
11989 Return NULL_RTX if we failed; the caller should emit a normal call,
11990 otherwise try to get the result in TARGET, if convenient (and in
11991 mode MODE if that's convenient). */
11992
11993 static rtx
11994 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11995 enum built_in_function fcode)
11996 {
11997 tree dest, src, len, size;
11998
11999 if (!validate_arglist (exp,
12000 POINTER_TYPE,
12001 fcode == BUILT_IN_MEMSET_CHK
12002 ? INTEGER_TYPE : POINTER_TYPE,
12003 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12004 return NULL_RTX;
12005
12006 dest = CALL_EXPR_ARG (exp, 0);
12007 src = CALL_EXPR_ARG (exp, 1);
12008 len = CALL_EXPR_ARG (exp, 2);
12009 size = CALL_EXPR_ARG (exp, 3);
12010
12011 if (! host_integerp (size, 1))
12012 return NULL_RTX;
12013
12014 if (host_integerp (len, 1) || integer_all_onesp (size))
12015 {
12016 tree fn;
12017
12018 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12019 {
12020 warning_at (tree_nonartificial_location (exp),
12021 0, "%Kcall to %D will always overflow destination buffer",
12022 exp, get_callee_fndecl (exp));
12023 return NULL_RTX;
12024 }
12025
12026 fn = NULL_TREE;
12027 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12028 mem{cpy,pcpy,move,set} is available. */
12029 switch (fcode)
12030 {
12031 case BUILT_IN_MEMCPY_CHK:
12032 fn = built_in_decls[BUILT_IN_MEMCPY];
12033 break;
12034 case BUILT_IN_MEMPCPY_CHK:
12035 fn = built_in_decls[BUILT_IN_MEMPCPY];
12036 break;
12037 case BUILT_IN_MEMMOVE_CHK:
12038 fn = built_in_decls[BUILT_IN_MEMMOVE];
12039 break;
12040 case BUILT_IN_MEMSET_CHK:
12041 fn = built_in_decls[BUILT_IN_MEMSET];
12042 break;
12043 default:
12044 break;
12045 }
12046
12047 if (! fn)
12048 return NULL_RTX;
12049
12050 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12051 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12052 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12053 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12054 }
12055 else if (fcode == BUILT_IN_MEMSET_CHK)
12056 return NULL_RTX;
12057 else
12058 {
12059 unsigned int dest_align
12060 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
12061
12062 /* If DEST is not a pointer type, call the normal function. */
12063 if (dest_align == 0)
12064 return NULL_RTX;
12065
12066 /* If SRC and DEST are the same (and not volatile), do nothing. */
12067 if (operand_equal_p (src, dest, 0))
12068 {
12069 tree expr;
12070
12071 if (fcode != BUILT_IN_MEMPCPY_CHK)
12072 {
12073 /* Evaluate and ignore LEN in case it has side-effects. */
12074 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12075 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12076 }
12077
12078 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12079 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12080 }
12081
12082 /* __memmove_chk special case. */
12083 if (fcode == BUILT_IN_MEMMOVE_CHK)
12084 {
12085 unsigned int src_align
12086 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
12087
12088 if (src_align == 0)
12089 return NULL_RTX;
12090
12091 /* If src is categorized for a readonly section we can use
12092 normal __memcpy_chk. */
12093 if (readonly_data_expr (src))
12094 {
12095 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12096 if (!fn)
12097 return NULL_RTX;
12098 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12099 dest, src, len, size);
12100 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12101 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12102 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12103 }
12104 }
12105 return NULL_RTX;
12106 }
12107 }
12108
12109 /* Emit warning if a buffer overflow is detected at compile time. */
12110
12111 static void
12112 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12113 {
12114 int is_strlen = 0;
12115 tree len, size;
12116 location_t loc = tree_nonartificial_location (exp);
12117
12118 switch (fcode)
12119 {
12120 case BUILT_IN_STRCPY_CHK:
12121 case BUILT_IN_STPCPY_CHK:
12122 /* For __strcat_chk the warning will be emitted only if overflowing
12123 by at least strlen (dest) + 1 bytes. */
12124 case BUILT_IN_STRCAT_CHK:
12125 len = CALL_EXPR_ARG (exp, 1);
12126 size = CALL_EXPR_ARG (exp, 2);
12127 is_strlen = 1;
12128 break;
12129 case BUILT_IN_STRNCAT_CHK:
12130 case BUILT_IN_STRNCPY_CHK:
12131 len = CALL_EXPR_ARG (exp, 2);
12132 size = CALL_EXPR_ARG (exp, 3);
12133 break;
12134 case BUILT_IN_SNPRINTF_CHK:
12135 case BUILT_IN_VSNPRINTF_CHK:
12136 len = CALL_EXPR_ARG (exp, 1);
12137 size = CALL_EXPR_ARG (exp, 3);
12138 break;
12139 default:
12140 gcc_unreachable ();
12141 }
12142
12143 if (!len || !size)
12144 return;
12145
12146 if (! host_integerp (size, 1) || integer_all_onesp (size))
12147 return;
12148
12149 if (is_strlen)
12150 {
12151 len = c_strlen (len, 1);
12152 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12153 return;
12154 }
12155 else if (fcode == BUILT_IN_STRNCAT_CHK)
12156 {
12157 tree src = CALL_EXPR_ARG (exp, 1);
12158 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12159 return;
12160 src = c_strlen (src, 1);
12161 if (! src || ! host_integerp (src, 1))
12162 {
12163 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12164 exp, get_callee_fndecl (exp));
12165 return;
12166 }
12167 else if (tree_int_cst_lt (src, size))
12168 return;
12169 }
12170 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12171 return;
12172
12173 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12174 exp, get_callee_fndecl (exp));
12175 }
12176
12177 /* Emit warning if a buffer overflow is detected at compile time
12178 in __sprintf_chk/__vsprintf_chk calls. */
12179
12180 static void
12181 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12182 {
12183 tree size, len, fmt;
12184 const char *fmt_str;
12185 int nargs = call_expr_nargs (exp);
12186
12187 /* Verify the required arguments in the original call. */
12188
12189 if (nargs < 4)
12190 return;
12191 size = CALL_EXPR_ARG (exp, 2);
12192 fmt = CALL_EXPR_ARG (exp, 3);
12193
12194 if (! host_integerp (size, 1) || integer_all_onesp (size))
12195 return;
12196
12197 /* Check whether the format is a literal string constant. */
12198 fmt_str = c_getstr (fmt);
12199 if (fmt_str == NULL)
12200 return;
12201
12202 if (!init_target_chars ())
12203 return;
12204
12205 /* If the format doesn't contain % args or %%, we know its size. */
12206 if (strchr (fmt_str, target_percent) == 0)
12207 len = build_int_cstu (size_type_node, strlen (fmt_str));
12208 /* If the format is "%s" and first ... argument is a string literal,
12209 we know it too. */
12210 else if (fcode == BUILT_IN_SPRINTF_CHK
12211 && strcmp (fmt_str, target_percent_s) == 0)
12212 {
12213 tree arg;
12214
12215 if (nargs < 5)
12216 return;
12217 arg = CALL_EXPR_ARG (exp, 4);
12218 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12219 return;
12220
12221 len = c_strlen (arg, 1);
12222 if (!len || ! host_integerp (len, 1))
12223 return;
12224 }
12225 else
12226 return;
12227
12228 if (! tree_int_cst_lt (len, size))
12229 warning_at (tree_nonartificial_location (exp),
12230 0, "%Kcall to %D will always overflow destination buffer",
12231 exp, get_callee_fndecl (exp));
12232 }
12233
12234 /* Emit warning if a free is called with address of a variable. */
12235
12236 static void
12237 maybe_emit_free_warning (tree exp)
12238 {
12239 tree arg = CALL_EXPR_ARG (exp, 0);
12240
12241 STRIP_NOPS (arg);
12242 if (TREE_CODE (arg) != ADDR_EXPR)
12243 return;
12244
12245 arg = get_base_address (TREE_OPERAND (arg, 0));
12246 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12247 return;
12248
12249 if (SSA_VAR_P (arg))
12250 warning_at (tree_nonartificial_location (exp),
12251 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12252 else
12253 warning_at (tree_nonartificial_location (exp),
12254 0, "%Kattempt to free a non-heap object", exp);
12255 }
12256
12257 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12258 if possible. */
12259
12260 tree
12261 fold_builtin_object_size (tree ptr, tree ost)
12262 {
12263 unsigned HOST_WIDE_INT bytes;
12264 int object_size_type;
12265
12266 if (!validate_arg (ptr, POINTER_TYPE)
12267 || !validate_arg (ost, INTEGER_TYPE))
12268 return NULL_TREE;
12269
12270 STRIP_NOPS (ost);
12271
12272 if (TREE_CODE (ost) != INTEGER_CST
12273 || tree_int_cst_sgn (ost) < 0
12274 || compare_tree_int (ost, 3) > 0)
12275 return NULL_TREE;
12276
12277 object_size_type = tree_low_cst (ost, 0);
12278
12279 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12280 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12281 and (size_t) 0 for types 2 and 3. */
12282 if (TREE_SIDE_EFFECTS (ptr))
12283 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12284
12285 if (TREE_CODE (ptr) == ADDR_EXPR)
12286 {
12287 bytes = compute_builtin_object_size (ptr, object_size_type);
12288 if (double_int_fits_to_tree_p (size_type_node,
12289 uhwi_to_double_int (bytes)))
12290 return build_int_cstu (size_type_node, bytes);
12291 }
12292 else if (TREE_CODE (ptr) == SSA_NAME)
12293 {
12294 /* If object size is not known yet, delay folding until
12295 later. Maybe subsequent passes will help determining
12296 it. */
12297 bytes = compute_builtin_object_size (ptr, object_size_type);
12298 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12299 && double_int_fits_to_tree_p (size_type_node,
12300 uhwi_to_double_int (bytes)))
12301 return build_int_cstu (size_type_node, bytes);
12302 }
12303
12304 return NULL_TREE;
12305 }
12306
12307 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12308 DEST, SRC, LEN, and SIZE are the arguments to the call.
12309 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12310 code of the builtin. If MAXLEN is not NULL, it is maximum length
12311 passed as third argument. */
12312
12313 tree
12314 fold_builtin_memory_chk (location_t loc, tree fndecl,
12315 tree dest, tree src, tree len, tree size,
12316 tree maxlen, bool ignore,
12317 enum built_in_function fcode)
12318 {
12319 tree fn;
12320
12321 if (!validate_arg (dest, POINTER_TYPE)
12322 || !validate_arg (src,
12323 (fcode == BUILT_IN_MEMSET_CHK
12324 ? INTEGER_TYPE : POINTER_TYPE))
12325 || !validate_arg (len, INTEGER_TYPE)
12326 || !validate_arg (size, INTEGER_TYPE))
12327 return NULL_TREE;
12328
12329 /* If SRC and DEST are the same (and not volatile), return DEST
12330 (resp. DEST+LEN for __mempcpy_chk). */
12331 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12332 {
12333 if (fcode != BUILT_IN_MEMPCPY_CHK)
12334 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12335 dest, len);
12336 else
12337 {
12338 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
12339 dest, len);
12340 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12341 }
12342 }
12343
12344 if (! host_integerp (size, 1))
12345 return NULL_TREE;
12346
12347 if (! integer_all_onesp (size))
12348 {
12349 if (! host_integerp (len, 1))
12350 {
12351 /* If LEN is not constant, try MAXLEN too.
12352 For MAXLEN only allow optimizing into non-_ocs function
12353 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12354 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12355 {
12356 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12357 {
12358 /* (void) __mempcpy_chk () can be optimized into
12359 (void) __memcpy_chk (). */
12360 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12361 if (!fn)
12362 return NULL_TREE;
12363
12364 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12365 }
12366 return NULL_TREE;
12367 }
12368 }
12369 else
12370 maxlen = len;
12371
12372 if (tree_int_cst_lt (size, maxlen))
12373 return NULL_TREE;
12374 }
12375
12376 fn = NULL_TREE;
12377 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12378 mem{cpy,pcpy,move,set} is available. */
12379 switch (fcode)
12380 {
12381 case BUILT_IN_MEMCPY_CHK:
12382 fn = built_in_decls[BUILT_IN_MEMCPY];
12383 break;
12384 case BUILT_IN_MEMPCPY_CHK:
12385 fn = built_in_decls[BUILT_IN_MEMPCPY];
12386 break;
12387 case BUILT_IN_MEMMOVE_CHK:
12388 fn = built_in_decls[BUILT_IN_MEMMOVE];
12389 break;
12390 case BUILT_IN_MEMSET_CHK:
12391 fn = built_in_decls[BUILT_IN_MEMSET];
12392 break;
12393 default:
12394 break;
12395 }
12396
12397 if (!fn)
12398 return NULL_TREE;
12399
12400 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12401 }
12402
12403 /* Fold a call to the __st[rp]cpy_chk builtin.
12404 DEST, SRC, and SIZE are the arguments to the call.
12405 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12406 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12407 strings passed as second argument. */
12408
12409 tree
12410 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12411 tree src, tree size,
12412 tree maxlen, bool ignore,
12413 enum built_in_function fcode)
12414 {
12415 tree len, fn;
12416
12417 if (!validate_arg (dest, POINTER_TYPE)
12418 || !validate_arg (src, POINTER_TYPE)
12419 || !validate_arg (size, INTEGER_TYPE))
12420 return NULL_TREE;
12421
12422 /* If SRC and DEST are the same (and not volatile), return DEST. */
12423 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12424 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12425
12426 if (! host_integerp (size, 1))
12427 return NULL_TREE;
12428
12429 if (! integer_all_onesp (size))
12430 {
12431 len = c_strlen (src, 1);
12432 if (! len || ! host_integerp (len, 1))
12433 {
12434 /* If LEN is not constant, try MAXLEN too.
12435 For MAXLEN only allow optimizing into non-_ocs function
12436 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12437 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12438 {
12439 if (fcode == BUILT_IN_STPCPY_CHK)
12440 {
12441 if (! ignore)
12442 return NULL_TREE;
12443
12444 /* If return value of __stpcpy_chk is ignored,
12445 optimize into __strcpy_chk. */
12446 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12447 if (!fn)
12448 return NULL_TREE;
12449
12450 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12451 }
12452
12453 if (! len || TREE_SIDE_EFFECTS (len))
12454 return NULL_TREE;
12455
12456 /* If c_strlen returned something, but not a constant,
12457 transform __strcpy_chk into __memcpy_chk. */
12458 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12459 if (!fn)
12460 return NULL_TREE;
12461
12462 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12463 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12464 build_call_expr_loc (loc, fn, 4,
12465 dest, src, len, size));
12466 }
12467 }
12468 else
12469 maxlen = len;
12470
12471 if (! tree_int_cst_lt (maxlen, size))
12472 return NULL_TREE;
12473 }
12474
12475 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12476 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12477 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12478 if (!fn)
12479 return NULL_TREE;
12480
12481 return build_call_expr_loc (loc, fn, 2, dest, src);
12482 }
12483
12484 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12485 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12486 length passed as third argument. */
12487
12488 tree
12489 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12490 tree len, tree size, tree maxlen)
12491 {
12492 tree fn;
12493
12494 if (!validate_arg (dest, POINTER_TYPE)
12495 || !validate_arg (src, POINTER_TYPE)
12496 || !validate_arg (len, INTEGER_TYPE)
12497 || !validate_arg (size, INTEGER_TYPE))
12498 return NULL_TREE;
12499
12500 if (! host_integerp (size, 1))
12501 return NULL_TREE;
12502
12503 if (! integer_all_onesp (size))
12504 {
12505 if (! host_integerp (len, 1))
12506 {
12507 /* If LEN is not constant, try MAXLEN too.
12508 For MAXLEN only allow optimizing into non-_ocs function
12509 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12510 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12511 return NULL_TREE;
12512 }
12513 else
12514 maxlen = len;
12515
12516 if (tree_int_cst_lt (size, maxlen))
12517 return NULL_TREE;
12518 }
12519
12520 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12521 fn = built_in_decls[BUILT_IN_STRNCPY];
12522 if (!fn)
12523 return NULL_TREE;
12524
12525 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12526 }
12527
12528 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12529 are the arguments to the call. */
12530
12531 static tree
12532 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12533 tree src, tree size)
12534 {
12535 tree fn;
12536 const char *p;
12537
12538 if (!validate_arg (dest, POINTER_TYPE)
12539 || !validate_arg (src, POINTER_TYPE)
12540 || !validate_arg (size, INTEGER_TYPE))
12541 return NULL_TREE;
12542
12543 p = c_getstr (src);
12544 /* If the SRC parameter is "", return DEST. */
12545 if (p && *p == '\0')
12546 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12547
12548 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12549 return NULL_TREE;
12550
12551 /* If __builtin_strcat_chk is used, assume strcat is available. */
12552 fn = built_in_decls[BUILT_IN_STRCAT];
12553 if (!fn)
12554 return NULL_TREE;
12555
12556 return build_call_expr_loc (loc, fn, 2, dest, src);
12557 }
12558
12559 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12560 LEN, and SIZE. */
12561
12562 static tree
12563 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12564 tree dest, tree src, tree len, tree size)
12565 {
12566 tree fn;
12567 const char *p;
12568
12569 if (!validate_arg (dest, POINTER_TYPE)
12570 || !validate_arg (src, POINTER_TYPE)
12571 || !validate_arg (size, INTEGER_TYPE)
12572 || !validate_arg (size, INTEGER_TYPE))
12573 return NULL_TREE;
12574
12575 p = c_getstr (src);
12576 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12577 if (p && *p == '\0')
12578 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12579 else if (integer_zerop (len))
12580 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12581
12582 if (! host_integerp (size, 1))
12583 return NULL_TREE;
12584
12585 if (! integer_all_onesp (size))
12586 {
12587 tree src_len = c_strlen (src, 1);
12588 if (src_len
12589 && host_integerp (src_len, 1)
12590 && host_integerp (len, 1)
12591 && ! tree_int_cst_lt (len, src_len))
12592 {
12593 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12594 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12595 if (!fn)
12596 return NULL_TREE;
12597
12598 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12599 }
12600 return NULL_TREE;
12601 }
12602
12603 /* If __builtin_strncat_chk is used, assume strncat is available. */
12604 fn = built_in_decls[BUILT_IN_STRNCAT];
12605 if (!fn)
12606 return NULL_TREE;
12607
12608 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12609 }
12610
12611 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
12612 Return NULL_TREE if a normal call should be emitted rather than
12613 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
12614 or BUILT_IN_VSPRINTF_CHK. */
12615
12616 static tree
12617 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
12618 enum built_in_function fcode)
12619 {
12620 tree dest, size, len, fn, fmt, flag;
12621 const char *fmt_str;
12622
12623 /* Verify the required arguments in the original call. */
12624 if (nargs < 4)
12625 return NULL_TREE;
12626 dest = args[0];
12627 if (!validate_arg (dest, POINTER_TYPE))
12628 return NULL_TREE;
12629 flag = args[1];
12630 if (!validate_arg (flag, INTEGER_TYPE))
12631 return NULL_TREE;
12632 size = args[2];
12633 if (!validate_arg (size, INTEGER_TYPE))
12634 return NULL_TREE;
12635 fmt = args[3];
12636 if (!validate_arg (fmt, POINTER_TYPE))
12637 return NULL_TREE;
12638
12639 if (! host_integerp (size, 1))
12640 return NULL_TREE;
12641
12642 len = NULL_TREE;
12643
12644 if (!init_target_chars ())
12645 return NULL_TREE;
12646
12647 /* Check whether the format is a literal string constant. */
12648 fmt_str = c_getstr (fmt);
12649 if (fmt_str != NULL)
12650 {
12651 /* If the format doesn't contain % args or %%, we know the size. */
12652 if (strchr (fmt_str, target_percent) == 0)
12653 {
12654 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12655 len = build_int_cstu (size_type_node, strlen (fmt_str));
12656 }
12657 /* If the format is "%s" and first ... argument is a string literal,
12658 we know the size too. */
12659 else if (fcode == BUILT_IN_SPRINTF_CHK
12660 && strcmp (fmt_str, target_percent_s) == 0)
12661 {
12662 tree arg;
12663
12664 if (nargs == 5)
12665 {
12666 arg = args[4];
12667 if (validate_arg (arg, POINTER_TYPE))
12668 {
12669 len = c_strlen (arg, 1);
12670 if (! len || ! host_integerp (len, 1))
12671 len = NULL_TREE;
12672 }
12673 }
12674 }
12675 }
12676
12677 if (! integer_all_onesp (size))
12678 {
12679 if (! len || ! tree_int_cst_lt (len, size))
12680 return NULL_TREE;
12681 }
12682
12683 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12684 or if format doesn't contain % chars or is "%s". */
12685 if (! integer_zerop (flag))
12686 {
12687 if (fmt_str == NULL)
12688 return NULL_TREE;
12689 if (strchr (fmt_str, target_percent) != NULL
12690 && strcmp (fmt_str, target_percent_s))
12691 return NULL_TREE;
12692 }
12693
12694 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12695 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12696 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12697 if (!fn)
12698 return NULL_TREE;
12699
12700 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
12701 }
12702
12703 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12704 a normal call should be emitted rather than expanding the function
12705 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12706
12707 static tree
12708 fold_builtin_sprintf_chk (location_t loc, tree exp,
12709 enum built_in_function fcode)
12710 {
12711 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
12712 CALL_EXPR_ARGP (exp), fcode);
12713 }
12714
12715 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
12716 NULL_TREE if a normal call should be emitted rather than expanding
12717 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12718 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12719 passed as second argument. */
12720
12721 static tree
12722 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
12723 tree maxlen, enum built_in_function fcode)
12724 {
12725 tree dest, size, len, fn, fmt, flag;
12726 const char *fmt_str;
12727
12728 /* Verify the required arguments in the original call. */
12729 if (nargs < 5)
12730 return NULL_TREE;
12731 dest = args[0];
12732 if (!validate_arg (dest, POINTER_TYPE))
12733 return NULL_TREE;
12734 len = args[1];
12735 if (!validate_arg (len, INTEGER_TYPE))
12736 return NULL_TREE;
12737 flag = args[2];
12738 if (!validate_arg (flag, INTEGER_TYPE))
12739 return NULL_TREE;
12740 size = args[3];
12741 if (!validate_arg (size, INTEGER_TYPE))
12742 return NULL_TREE;
12743 fmt = args[4];
12744 if (!validate_arg (fmt, POINTER_TYPE))
12745 return NULL_TREE;
12746
12747 if (! host_integerp (size, 1))
12748 return NULL_TREE;
12749
12750 if (! integer_all_onesp (size))
12751 {
12752 if (! host_integerp (len, 1))
12753 {
12754 /* If LEN is not constant, try MAXLEN too.
12755 For MAXLEN only allow optimizing into non-_ocs function
12756 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12757 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12758 return NULL_TREE;
12759 }
12760 else
12761 maxlen = len;
12762
12763 if (tree_int_cst_lt (size, maxlen))
12764 return NULL_TREE;
12765 }
12766
12767 if (!init_target_chars ())
12768 return NULL_TREE;
12769
12770 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12771 or if format doesn't contain % chars or is "%s". */
12772 if (! integer_zerop (flag))
12773 {
12774 fmt_str = c_getstr (fmt);
12775 if (fmt_str == NULL)
12776 return NULL_TREE;
12777 if (strchr (fmt_str, target_percent) != NULL
12778 && strcmp (fmt_str, target_percent_s))
12779 return NULL_TREE;
12780 }
12781
12782 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12783 available. */
12784 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12785 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12786 if (!fn)
12787 return NULL_TREE;
12788
12789 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
12790 }
12791
12792 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12793 a normal call should be emitted rather than expanding the function
12794 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12795 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12796 passed as second argument. */
12797
12798 tree
12799 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12800 enum built_in_function fcode)
12801 {
12802 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
12803 CALL_EXPR_ARGP (exp), maxlen, fcode);
12804 }
12805
12806 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12807 FMT and ARG are the arguments to the call; we don't fold cases with
12808 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12809
12810 Return NULL_TREE if no simplification was possible, otherwise return the
12811 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12812 code of the function to be simplified. */
12813
12814 static tree
12815 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12816 tree arg, bool ignore,
12817 enum built_in_function fcode)
12818 {
12819 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12820 const char *fmt_str = NULL;
12821
12822 /* If the return value is used, don't do the transformation. */
12823 if (! ignore)
12824 return NULL_TREE;
12825
12826 /* Verify the required arguments in the original call. */
12827 if (!validate_arg (fmt, POINTER_TYPE))
12828 return NULL_TREE;
12829
12830 /* Check whether the format is a literal string constant. */
12831 fmt_str = c_getstr (fmt);
12832 if (fmt_str == NULL)
12833 return NULL_TREE;
12834
12835 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12836 {
12837 /* If we're using an unlocked function, assume the other
12838 unlocked functions exist explicitly. */
12839 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12840 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12841 }
12842 else
12843 {
12844 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12845 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12846 }
12847
12848 if (!init_target_chars ())
12849 return NULL_TREE;
12850
12851 if (strcmp (fmt_str, target_percent_s) == 0
12852 || strchr (fmt_str, target_percent) == NULL)
12853 {
12854 const char *str;
12855
12856 if (strcmp (fmt_str, target_percent_s) == 0)
12857 {
12858 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12859 return NULL_TREE;
12860
12861 if (!arg || !validate_arg (arg, POINTER_TYPE))
12862 return NULL_TREE;
12863
12864 str = c_getstr (arg);
12865 if (str == NULL)
12866 return NULL_TREE;
12867 }
12868 else
12869 {
12870 /* The format specifier doesn't contain any '%' characters. */
12871 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12872 && arg)
12873 return NULL_TREE;
12874 str = fmt_str;
12875 }
12876
12877 /* If the string was "", printf does nothing. */
12878 if (str[0] == '\0')
12879 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12880
12881 /* If the string has length of 1, call putchar. */
12882 if (str[1] == '\0')
12883 {
12884 /* Given printf("c"), (where c is any one character,)
12885 convert "c"[0] to an int and pass that to the replacement
12886 function. */
12887 newarg = build_int_cst (NULL_TREE, str[0]);
12888 if (fn_putchar)
12889 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12890 }
12891 else
12892 {
12893 /* If the string was "string\n", call puts("string"). */
12894 size_t len = strlen (str);
12895 if ((unsigned char)str[len - 1] == target_newline
12896 && (size_t) (int) len == len
12897 && (int) len > 0)
12898 {
12899 char *newstr;
12900 tree offset_node, string_cst;
12901
12902 /* Create a NUL-terminated string that's one char shorter
12903 than the original, stripping off the trailing '\n'. */
12904 newarg = build_string_literal (len, str);
12905 string_cst = string_constant (newarg, &offset_node);
12906 gcc_checking_assert (string_cst
12907 && (TREE_STRING_LENGTH (string_cst)
12908 == (int) len)
12909 && integer_zerop (offset_node)
12910 && (unsigned char)
12911 TREE_STRING_POINTER (string_cst)[len - 1]
12912 == target_newline);
12913 /* build_string_literal creates a new STRING_CST,
12914 modify it in place to avoid double copying. */
12915 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
12916 newstr[len - 1] = '\0';
12917 if (fn_puts)
12918 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12919 }
12920 else
12921 /* We'd like to arrange to call fputs(string,stdout) here,
12922 but we need stdout and don't have a way to get it yet. */
12923 return NULL_TREE;
12924 }
12925 }
12926
12927 /* The other optimizations can be done only on the non-va_list variants. */
12928 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12929 return NULL_TREE;
12930
12931 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12932 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12933 {
12934 if (!arg || !validate_arg (arg, POINTER_TYPE))
12935 return NULL_TREE;
12936 if (fn_puts)
12937 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12938 }
12939
12940 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12941 else if (strcmp (fmt_str, target_percent_c) == 0)
12942 {
12943 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12944 return NULL_TREE;
12945 if (fn_putchar)
12946 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12947 }
12948
12949 if (!call)
12950 return NULL_TREE;
12951
12952 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12953 }
12954
12955 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12956 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12957 more than 3 arguments, and ARG may be null in the 2-argument case.
12958
12959 Return NULL_TREE if no simplification was possible, otherwise return the
12960 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12961 code of the function to be simplified. */
12962
12963 static tree
12964 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12965 tree fmt, tree arg, bool ignore,
12966 enum built_in_function fcode)
12967 {
12968 tree fn_fputc, fn_fputs, call = NULL_TREE;
12969 const char *fmt_str = NULL;
12970
12971 /* If the return value is used, don't do the transformation. */
12972 if (! ignore)
12973 return NULL_TREE;
12974
12975 /* Verify the required arguments in the original call. */
12976 if (!validate_arg (fp, POINTER_TYPE))
12977 return NULL_TREE;
12978 if (!validate_arg (fmt, POINTER_TYPE))
12979 return NULL_TREE;
12980
12981 /* Check whether the format is a literal string constant. */
12982 fmt_str = c_getstr (fmt);
12983 if (fmt_str == NULL)
12984 return NULL_TREE;
12985
12986 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12987 {
12988 /* If we're using an unlocked function, assume the other
12989 unlocked functions exist explicitly. */
12990 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12991 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12992 }
12993 else
12994 {
12995 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12996 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12997 }
12998
12999 if (!init_target_chars ())
13000 return NULL_TREE;
13001
13002 /* If the format doesn't contain % args or %%, use strcpy. */
13003 if (strchr (fmt_str, target_percent) == NULL)
13004 {
13005 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13006 && arg)
13007 return NULL_TREE;
13008
13009 /* If the format specifier was "", fprintf does nothing. */
13010 if (fmt_str[0] == '\0')
13011 {
13012 /* If FP has side-effects, just wait until gimplification is
13013 done. */
13014 if (TREE_SIDE_EFFECTS (fp))
13015 return NULL_TREE;
13016
13017 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13018 }
13019
13020 /* When "string" doesn't contain %, replace all cases of
13021 fprintf (fp, string) with fputs (string, fp). The fputs
13022 builtin will take care of special cases like length == 1. */
13023 if (fn_fputs)
13024 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13025 }
13026
13027 /* The other optimizations can be done only on the non-va_list variants. */
13028 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13029 return NULL_TREE;
13030
13031 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13032 else if (strcmp (fmt_str, target_percent_s) == 0)
13033 {
13034 if (!arg || !validate_arg (arg, POINTER_TYPE))
13035 return NULL_TREE;
13036 if (fn_fputs)
13037 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13038 }
13039
13040 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13041 else if (strcmp (fmt_str, target_percent_c) == 0)
13042 {
13043 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13044 return NULL_TREE;
13045 if (fn_fputc)
13046 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13047 }
13048
13049 if (!call)
13050 return NULL_TREE;
13051 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13052 }
13053
13054 /* Initialize format string characters in the target charset. */
13055
13056 static bool
13057 init_target_chars (void)
13058 {
13059 static bool init;
13060 if (!init)
13061 {
13062 target_newline = lang_hooks.to_target_charset ('\n');
13063 target_percent = lang_hooks.to_target_charset ('%');
13064 target_c = lang_hooks.to_target_charset ('c');
13065 target_s = lang_hooks.to_target_charset ('s');
13066 if (target_newline == 0 || target_percent == 0 || target_c == 0
13067 || target_s == 0)
13068 return false;
13069
13070 target_percent_c[0] = target_percent;
13071 target_percent_c[1] = target_c;
13072 target_percent_c[2] = '\0';
13073
13074 target_percent_s[0] = target_percent;
13075 target_percent_s[1] = target_s;
13076 target_percent_s[2] = '\0';
13077
13078 target_percent_s_newline[0] = target_percent;
13079 target_percent_s_newline[1] = target_s;
13080 target_percent_s_newline[2] = target_newline;
13081 target_percent_s_newline[3] = '\0';
13082
13083 init = true;
13084 }
13085 return true;
13086 }
13087
13088 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13089 and no overflow/underflow occurred. INEXACT is true if M was not
13090 exactly calculated. TYPE is the tree type for the result. This
13091 function assumes that you cleared the MPFR flags and then
13092 calculated M to see if anything subsequently set a flag prior to
13093 entering this function. Return NULL_TREE if any checks fail. */
13094
13095 static tree
13096 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13097 {
13098 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13099 overflow/underflow occurred. If -frounding-math, proceed iff the
13100 result of calling FUNC was exact. */
13101 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13102 && (!flag_rounding_math || !inexact))
13103 {
13104 REAL_VALUE_TYPE rr;
13105
13106 real_from_mpfr (&rr, m, type, GMP_RNDN);
13107 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13108 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13109 but the mpft_t is not, then we underflowed in the
13110 conversion. */
13111 if (real_isfinite (&rr)
13112 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13113 {
13114 REAL_VALUE_TYPE rmode;
13115
13116 real_convert (&rmode, TYPE_MODE (type), &rr);
13117 /* Proceed iff the specified mode can hold the value. */
13118 if (real_identical (&rmode, &rr))
13119 return build_real (type, rmode);
13120 }
13121 }
13122 return NULL_TREE;
13123 }
13124
13125 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13126 number and no overflow/underflow occurred. INEXACT is true if M
13127 was not exactly calculated. TYPE is the tree type for the result.
13128 This function assumes that you cleared the MPFR flags and then
13129 calculated M to see if anything subsequently set a flag prior to
13130 entering this function. Return NULL_TREE if any checks fail, if
13131 FORCE_CONVERT is true, then bypass the checks. */
13132
13133 static tree
13134 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13135 {
13136 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13137 overflow/underflow occurred. If -frounding-math, proceed iff the
13138 result of calling FUNC was exact. */
13139 if (force_convert
13140 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13141 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13142 && (!flag_rounding_math || !inexact)))
13143 {
13144 REAL_VALUE_TYPE re, im;
13145
13146 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13147 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13148 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13149 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13150 but the mpft_t is not, then we underflowed in the
13151 conversion. */
13152 if (force_convert
13153 || (real_isfinite (&re) && real_isfinite (&im)
13154 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13155 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13156 {
13157 REAL_VALUE_TYPE re_mode, im_mode;
13158
13159 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13160 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13161 /* Proceed iff the specified mode can hold the value. */
13162 if (force_convert
13163 || (real_identical (&re_mode, &re)
13164 && real_identical (&im_mode, &im)))
13165 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13166 build_real (TREE_TYPE (type), im_mode));
13167 }
13168 }
13169 return NULL_TREE;
13170 }
13171
13172 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13173 FUNC on it and return the resulting value as a tree with type TYPE.
13174 If MIN and/or MAX are not NULL, then the supplied ARG must be
13175 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13176 acceptable values, otherwise they are not. The mpfr precision is
13177 set to the precision of TYPE. We assume that function FUNC returns
13178 zero if the result could be calculated exactly within the requested
13179 precision. */
13180
13181 static tree
13182 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13183 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13184 bool inclusive)
13185 {
13186 tree result = NULL_TREE;
13187
13188 STRIP_NOPS (arg);
13189
13190 /* To proceed, MPFR must exactly represent the target floating point
13191 format, which only happens when the target base equals two. */
13192 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13193 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13194 {
13195 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13196
13197 if (real_isfinite (ra)
13198 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13199 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13200 {
13201 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13202 const int prec = fmt->p;
13203 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13204 int inexact;
13205 mpfr_t m;
13206
13207 mpfr_init2 (m, prec);
13208 mpfr_from_real (m, ra, GMP_RNDN);
13209 mpfr_clear_flags ();
13210 inexact = func (m, m, rnd);
13211 result = do_mpfr_ckconv (m, type, inexact);
13212 mpfr_clear (m);
13213 }
13214 }
13215
13216 return result;
13217 }
13218
13219 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13220 FUNC on it and return the resulting value as a tree with type TYPE.
13221 The mpfr precision is set to the precision of TYPE. We assume that
13222 function FUNC returns zero if the result could be calculated
13223 exactly within the requested precision. */
13224
13225 static tree
13226 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13227 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13228 {
13229 tree result = NULL_TREE;
13230
13231 STRIP_NOPS (arg1);
13232 STRIP_NOPS (arg2);
13233
13234 /* To proceed, MPFR must exactly represent the target floating point
13235 format, which only happens when the target base equals two. */
13236 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13237 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13238 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13239 {
13240 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13241 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13242
13243 if (real_isfinite (ra1) && real_isfinite (ra2))
13244 {
13245 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13246 const int prec = fmt->p;
13247 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13248 int inexact;
13249 mpfr_t m1, m2;
13250
13251 mpfr_inits2 (prec, m1, m2, NULL);
13252 mpfr_from_real (m1, ra1, GMP_RNDN);
13253 mpfr_from_real (m2, ra2, GMP_RNDN);
13254 mpfr_clear_flags ();
13255 inexact = func (m1, m1, m2, rnd);
13256 result = do_mpfr_ckconv (m1, type, inexact);
13257 mpfr_clears (m1, m2, NULL);
13258 }
13259 }
13260
13261 return result;
13262 }
13263
13264 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13265 FUNC on it and return the resulting value as a tree with type TYPE.
13266 The mpfr precision is set to the precision of TYPE. We assume that
13267 function FUNC returns zero if the result could be calculated
13268 exactly within the requested precision. */
13269
13270 static tree
13271 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13272 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13273 {
13274 tree result = NULL_TREE;
13275
13276 STRIP_NOPS (arg1);
13277 STRIP_NOPS (arg2);
13278 STRIP_NOPS (arg3);
13279
13280 /* To proceed, MPFR must exactly represent the target floating point
13281 format, which only happens when the target base equals two. */
13282 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13283 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13284 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13285 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13286 {
13287 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13288 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13289 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13290
13291 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13292 {
13293 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13294 const int prec = fmt->p;
13295 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13296 int inexact;
13297 mpfr_t m1, m2, m3;
13298
13299 mpfr_inits2 (prec, m1, m2, m3, NULL);
13300 mpfr_from_real (m1, ra1, GMP_RNDN);
13301 mpfr_from_real (m2, ra2, GMP_RNDN);
13302 mpfr_from_real (m3, ra3, GMP_RNDN);
13303 mpfr_clear_flags ();
13304 inexact = func (m1, m1, m2, m3, rnd);
13305 result = do_mpfr_ckconv (m1, type, inexact);
13306 mpfr_clears (m1, m2, m3, NULL);
13307 }
13308 }
13309
13310 return result;
13311 }
13312
13313 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13314 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13315 If ARG_SINP and ARG_COSP are NULL then the result is returned
13316 as a complex value.
13317 The type is taken from the type of ARG and is used for setting the
13318 precision of the calculation and results. */
13319
13320 static tree
13321 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13322 {
13323 tree const type = TREE_TYPE (arg);
13324 tree result = NULL_TREE;
13325
13326 STRIP_NOPS (arg);
13327
13328 /* To proceed, MPFR must exactly represent the target floating point
13329 format, which only happens when the target base equals two. */
13330 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13331 && TREE_CODE (arg) == REAL_CST
13332 && !TREE_OVERFLOW (arg))
13333 {
13334 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13335
13336 if (real_isfinite (ra))
13337 {
13338 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13339 const int prec = fmt->p;
13340 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13341 tree result_s, result_c;
13342 int inexact;
13343 mpfr_t m, ms, mc;
13344
13345 mpfr_inits2 (prec, m, ms, mc, NULL);
13346 mpfr_from_real (m, ra, GMP_RNDN);
13347 mpfr_clear_flags ();
13348 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13349 result_s = do_mpfr_ckconv (ms, type, inexact);
13350 result_c = do_mpfr_ckconv (mc, type, inexact);
13351 mpfr_clears (m, ms, mc, NULL);
13352 if (result_s && result_c)
13353 {
13354 /* If we are to return in a complex value do so. */
13355 if (!arg_sinp && !arg_cosp)
13356 return build_complex (build_complex_type (type),
13357 result_c, result_s);
13358
13359 /* Dereference the sin/cos pointer arguments. */
13360 arg_sinp = build_fold_indirect_ref (arg_sinp);
13361 arg_cosp = build_fold_indirect_ref (arg_cosp);
13362 /* Proceed if valid pointer type were passed in. */
13363 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13364 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13365 {
13366 /* Set the values. */
13367 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13368 result_s);
13369 TREE_SIDE_EFFECTS (result_s) = 1;
13370 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13371 result_c);
13372 TREE_SIDE_EFFECTS (result_c) = 1;
13373 /* Combine the assignments into a compound expr. */
13374 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13375 result_s, result_c));
13376 }
13377 }
13378 }
13379 }
13380 return result;
13381 }
13382
13383 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13384 two-argument mpfr order N Bessel function FUNC on them and return
13385 the resulting value as a tree with type TYPE. The mpfr precision
13386 is set to the precision of TYPE. We assume that function FUNC
13387 returns zero if the result could be calculated exactly within the
13388 requested precision. */
13389 static tree
13390 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13391 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13392 const REAL_VALUE_TYPE *min, bool inclusive)
13393 {
13394 tree result = NULL_TREE;
13395
13396 STRIP_NOPS (arg1);
13397 STRIP_NOPS (arg2);
13398
13399 /* To proceed, MPFR must exactly represent the target floating point
13400 format, which only happens when the target base equals two. */
13401 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13402 && host_integerp (arg1, 0)
13403 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13404 {
13405 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13406 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13407
13408 if (n == (long)n
13409 && real_isfinite (ra)
13410 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13411 {
13412 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13413 const int prec = fmt->p;
13414 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13415 int inexact;
13416 mpfr_t m;
13417
13418 mpfr_init2 (m, prec);
13419 mpfr_from_real (m, ra, GMP_RNDN);
13420 mpfr_clear_flags ();
13421 inexact = func (m, n, m, rnd);
13422 result = do_mpfr_ckconv (m, type, inexact);
13423 mpfr_clear (m);
13424 }
13425 }
13426
13427 return result;
13428 }
13429
13430 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13431 the pointer *(ARG_QUO) and return the result. The type is taken
13432 from the type of ARG0 and is used for setting the precision of the
13433 calculation and results. */
13434
13435 static tree
13436 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13437 {
13438 tree const type = TREE_TYPE (arg0);
13439 tree result = NULL_TREE;
13440
13441 STRIP_NOPS (arg0);
13442 STRIP_NOPS (arg1);
13443
13444 /* To proceed, MPFR must exactly represent the target floating point
13445 format, which only happens when the target base equals two. */
13446 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13447 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13448 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13449 {
13450 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13451 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13452
13453 if (real_isfinite (ra0) && real_isfinite (ra1))
13454 {
13455 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13456 const int prec = fmt->p;
13457 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13458 tree result_rem;
13459 long integer_quo;
13460 mpfr_t m0, m1;
13461
13462 mpfr_inits2 (prec, m0, m1, NULL);
13463 mpfr_from_real (m0, ra0, GMP_RNDN);
13464 mpfr_from_real (m1, ra1, GMP_RNDN);
13465 mpfr_clear_flags ();
13466 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13467 /* Remquo is independent of the rounding mode, so pass
13468 inexact=0 to do_mpfr_ckconv(). */
13469 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13470 mpfr_clears (m0, m1, NULL);
13471 if (result_rem)
13472 {
13473 /* MPFR calculates quo in the host's long so it may
13474 return more bits in quo than the target int can hold
13475 if sizeof(host long) > sizeof(target int). This can
13476 happen even for native compilers in LP64 mode. In
13477 these cases, modulo the quo value with the largest
13478 number that the target int can hold while leaving one
13479 bit for the sign. */
13480 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13481 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13482
13483 /* Dereference the quo pointer argument. */
13484 arg_quo = build_fold_indirect_ref (arg_quo);
13485 /* Proceed iff a valid pointer type was passed in. */
13486 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13487 {
13488 /* Set the value. */
13489 tree result_quo = fold_build2 (MODIFY_EXPR,
13490 TREE_TYPE (arg_quo), arg_quo,
13491 build_int_cst (NULL, integer_quo));
13492 TREE_SIDE_EFFECTS (result_quo) = 1;
13493 /* Combine the quo assignment with the rem. */
13494 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13495 result_quo, result_rem));
13496 }
13497 }
13498 }
13499 }
13500 return result;
13501 }
13502
13503 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13504 resulting value as a tree with type TYPE. The mpfr precision is
13505 set to the precision of TYPE. We assume that this mpfr function
13506 returns zero if the result could be calculated exactly within the
13507 requested precision. In addition, the integer pointer represented
13508 by ARG_SG will be dereferenced and set to the appropriate signgam
13509 (-1,1) value. */
13510
13511 static tree
13512 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13513 {
13514 tree result = NULL_TREE;
13515
13516 STRIP_NOPS (arg);
13517
13518 /* To proceed, MPFR must exactly represent the target floating point
13519 format, which only happens when the target base equals two. Also
13520 verify ARG is a constant and that ARG_SG is an int pointer. */
13521 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13522 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13523 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13524 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13525 {
13526 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13527
13528 /* In addition to NaN and Inf, the argument cannot be zero or a
13529 negative integer. */
13530 if (real_isfinite (ra)
13531 && ra->cl != rvc_zero
13532 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13533 {
13534 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13535 const int prec = fmt->p;
13536 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13537 int inexact, sg;
13538 mpfr_t m;
13539 tree result_lg;
13540
13541 mpfr_init2 (m, prec);
13542 mpfr_from_real (m, ra, GMP_RNDN);
13543 mpfr_clear_flags ();
13544 inexact = mpfr_lgamma (m, &sg, m, rnd);
13545 result_lg = do_mpfr_ckconv (m, type, inexact);
13546 mpfr_clear (m);
13547 if (result_lg)
13548 {
13549 tree result_sg;
13550
13551 /* Dereference the arg_sg pointer argument. */
13552 arg_sg = build_fold_indirect_ref (arg_sg);
13553 /* Assign the signgam value into *arg_sg. */
13554 result_sg = fold_build2 (MODIFY_EXPR,
13555 TREE_TYPE (arg_sg), arg_sg,
13556 build_int_cst (NULL, sg));
13557 TREE_SIDE_EFFECTS (result_sg) = 1;
13558 /* Combine the signgam assignment with the lgamma result. */
13559 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13560 result_sg, result_lg));
13561 }
13562 }
13563 }
13564
13565 return result;
13566 }
13567
13568 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13569 function FUNC on it and return the resulting value as a tree with
13570 type TYPE. The mpfr precision is set to the precision of TYPE. We
13571 assume that function FUNC returns zero if the result could be
13572 calculated exactly within the requested precision. */
13573
13574 static tree
13575 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13576 {
13577 tree result = NULL_TREE;
13578
13579 STRIP_NOPS (arg);
13580
13581 /* To proceed, MPFR must exactly represent the target floating point
13582 format, which only happens when the target base equals two. */
13583 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13584 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13585 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13586 {
13587 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13588 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13589
13590 if (real_isfinite (re) && real_isfinite (im))
13591 {
13592 const struct real_format *const fmt =
13593 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13594 const int prec = fmt->p;
13595 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13596 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13597 int inexact;
13598 mpc_t m;
13599
13600 mpc_init2 (m, prec);
13601 mpfr_from_real (mpc_realref(m), re, rnd);
13602 mpfr_from_real (mpc_imagref(m), im, rnd);
13603 mpfr_clear_flags ();
13604 inexact = func (m, m, crnd);
13605 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13606 mpc_clear (m);
13607 }
13608 }
13609
13610 return result;
13611 }
13612
13613 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13614 mpc function FUNC on it and return the resulting value as a tree
13615 with type TYPE. The mpfr precision is set to the precision of
13616 TYPE. We assume that function FUNC returns zero if the result
13617 could be calculated exactly within the requested precision. If
13618 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13619 in the arguments and/or results. */
13620
13621 tree
13622 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13623 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13624 {
13625 tree result = NULL_TREE;
13626
13627 STRIP_NOPS (arg0);
13628 STRIP_NOPS (arg1);
13629
13630 /* To proceed, MPFR must exactly represent the target floating point
13631 format, which only happens when the target base equals two. */
13632 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13633 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13634 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13635 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13636 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13637 {
13638 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13639 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13640 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13641 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13642
13643 if (do_nonfinite
13644 || (real_isfinite (re0) && real_isfinite (im0)
13645 && real_isfinite (re1) && real_isfinite (im1)))
13646 {
13647 const struct real_format *const fmt =
13648 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13649 const int prec = fmt->p;
13650 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13651 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13652 int inexact;
13653 mpc_t m0, m1;
13654
13655 mpc_init2 (m0, prec);
13656 mpc_init2 (m1, prec);
13657 mpfr_from_real (mpc_realref(m0), re0, rnd);
13658 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13659 mpfr_from_real (mpc_realref(m1), re1, rnd);
13660 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13661 mpfr_clear_flags ();
13662 inexact = func (m0, m0, m1, crnd);
13663 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13664 mpc_clear (m0);
13665 mpc_clear (m1);
13666 }
13667 }
13668
13669 return result;
13670 }
13671
13672 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13673 a normal call should be emitted rather than expanding the function
13674 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13675
13676 static tree
13677 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13678 {
13679 int nargs = gimple_call_num_args (stmt);
13680
13681 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
13682 (nargs > 0
13683 ? gimple_call_arg_ptr (stmt, 0)
13684 : &error_mark_node), fcode);
13685 }
13686
13687 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13688 a normal call should be emitted rather than expanding the function
13689 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13690 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13691 passed as second argument. */
13692
13693 tree
13694 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13695 enum built_in_function fcode)
13696 {
13697 int nargs = gimple_call_num_args (stmt);
13698
13699 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
13700 (nargs > 0
13701 ? gimple_call_arg_ptr (stmt, 0)
13702 : &error_mark_node), maxlen, fcode);
13703 }
13704
13705 /* Builtins with folding operations that operate on "..." arguments
13706 need special handling; we need to store the arguments in a convenient
13707 data structure before attempting any folding. Fortunately there are
13708 only a few builtins that fall into this category. FNDECL is the
13709 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13710 result of the function call is ignored. */
13711
13712 static tree
13713 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13714 bool ignore ATTRIBUTE_UNUSED)
13715 {
13716 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13717 tree ret = NULL_TREE;
13718
13719 switch (fcode)
13720 {
13721 case BUILT_IN_SPRINTF_CHK:
13722 case BUILT_IN_VSPRINTF_CHK:
13723 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13724 break;
13725
13726 case BUILT_IN_SNPRINTF_CHK:
13727 case BUILT_IN_VSNPRINTF_CHK:
13728 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13729
13730 default:
13731 break;
13732 }
13733 if (ret)
13734 {
13735 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13736 TREE_NO_WARNING (ret) = 1;
13737 return ret;
13738 }
13739 return NULL_TREE;
13740 }
13741
13742 /* A wrapper function for builtin folding that prevents warnings for
13743 "statement without effect" and the like, caused by removing the
13744 call node earlier than the warning is generated. */
13745
13746 tree
13747 fold_call_stmt (gimple stmt, bool ignore)
13748 {
13749 tree ret = NULL_TREE;
13750 tree fndecl = gimple_call_fndecl (stmt);
13751 location_t loc = gimple_location (stmt);
13752 if (fndecl
13753 && TREE_CODE (fndecl) == FUNCTION_DECL
13754 && DECL_BUILT_IN (fndecl)
13755 && !gimple_call_va_arg_pack_p (stmt))
13756 {
13757 int nargs = gimple_call_num_args (stmt);
13758 tree *args = (nargs > 0
13759 ? gimple_call_arg_ptr (stmt, 0)
13760 : &error_mark_node);
13761
13762 if (avoid_folding_inline_builtin (fndecl))
13763 return NULL_TREE;
13764 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13765 {
13766 return targetm.fold_builtin (fndecl, nargs, args, ignore);
13767 }
13768 else
13769 {
13770 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13771 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13772 if (!ret)
13773 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13774 if (ret)
13775 {
13776 /* Propagate location information from original call to
13777 expansion of builtin. Otherwise things like
13778 maybe_emit_chk_warning, that operate on the expansion
13779 of a builtin, will use the wrong location information. */
13780 if (gimple_has_location (stmt))
13781 {
13782 tree realret = ret;
13783 if (TREE_CODE (ret) == NOP_EXPR)
13784 realret = TREE_OPERAND (ret, 0);
13785 if (CAN_HAVE_LOCATION_P (realret)
13786 && !EXPR_HAS_LOCATION (realret))
13787 SET_EXPR_LOCATION (realret, loc);
13788 return realret;
13789 }
13790 return ret;
13791 }
13792 }
13793 }
13794 return NULL_TREE;
13795 }
13796
13797 /* Look up the function in built_in_decls that corresponds to DECL
13798 and set ASMSPEC as its user assembler name. DECL must be a
13799 function decl that declares a builtin. */
13800
13801 void
13802 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13803 {
13804 tree builtin;
13805 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13806 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13807 && asmspec != 0);
13808
13809 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13810 set_user_assembler_name (builtin, asmspec);
13811 switch (DECL_FUNCTION_CODE (decl))
13812 {
13813 case BUILT_IN_MEMCPY:
13814 init_block_move_fn (asmspec);
13815 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13816 break;
13817 case BUILT_IN_MEMSET:
13818 init_block_clear_fn (asmspec);
13819 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13820 break;
13821 case BUILT_IN_MEMMOVE:
13822 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13823 break;
13824 case BUILT_IN_MEMCMP:
13825 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13826 break;
13827 case BUILT_IN_ABORT:
13828 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13829 break;
13830 case BUILT_IN_FFS:
13831 if (INT_TYPE_SIZE < BITS_PER_WORD)
13832 {
13833 set_user_assembler_libfunc ("ffs", asmspec);
13834 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13835 MODE_INT, 0), "ffs");
13836 }
13837 break;
13838 default:
13839 break;
13840 }
13841 }
13842
13843 /* Return true if DECL is a builtin that expands to a constant or similarly
13844 simple code. */
13845 bool
13846 is_simple_builtin (tree decl)
13847 {
13848 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13849 switch (DECL_FUNCTION_CODE (decl))
13850 {
13851 /* Builtins that expand to constants. */
13852 case BUILT_IN_CONSTANT_P:
13853 case BUILT_IN_EXPECT:
13854 case BUILT_IN_OBJECT_SIZE:
13855 case BUILT_IN_UNREACHABLE:
13856 /* Simple register moves or loads from stack. */
13857 case BUILT_IN_RETURN_ADDRESS:
13858 case BUILT_IN_EXTRACT_RETURN_ADDR:
13859 case BUILT_IN_FROB_RETURN_ADDR:
13860 case BUILT_IN_RETURN:
13861 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
13862 case BUILT_IN_FRAME_ADDRESS:
13863 case BUILT_IN_VA_END:
13864 case BUILT_IN_STACK_SAVE:
13865 case BUILT_IN_STACK_RESTORE:
13866 /* Exception state returns or moves registers around. */
13867 case BUILT_IN_EH_FILTER:
13868 case BUILT_IN_EH_POINTER:
13869 case BUILT_IN_EH_COPY_VALUES:
13870 return true;
13871
13872 default:
13873 return false;
13874 }
13875
13876 return false;
13877 }
13878
13879 /* Return true if DECL is a builtin that is not expensive, i.e., they are
13880 most probably expanded inline into reasonably simple code. This is a
13881 superset of is_simple_builtin. */
13882 bool
13883 is_inexpensive_builtin (tree decl)
13884 {
13885 if (!decl)
13886 return false;
13887 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
13888 return true;
13889 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13890 switch (DECL_FUNCTION_CODE (decl))
13891 {
13892 case BUILT_IN_ABS:
13893 case BUILT_IN_ALLOCA:
13894 case BUILT_IN_BSWAP32:
13895 case BUILT_IN_BSWAP64:
13896 case BUILT_IN_CLZ:
13897 case BUILT_IN_CLZIMAX:
13898 case BUILT_IN_CLZL:
13899 case BUILT_IN_CLZLL:
13900 case BUILT_IN_CTZ:
13901 case BUILT_IN_CTZIMAX:
13902 case BUILT_IN_CTZL:
13903 case BUILT_IN_CTZLL:
13904 case BUILT_IN_FFS:
13905 case BUILT_IN_FFSIMAX:
13906 case BUILT_IN_FFSL:
13907 case BUILT_IN_FFSLL:
13908 case BUILT_IN_IMAXABS:
13909 case BUILT_IN_FINITE:
13910 case BUILT_IN_FINITEF:
13911 case BUILT_IN_FINITEL:
13912 case BUILT_IN_FINITED32:
13913 case BUILT_IN_FINITED64:
13914 case BUILT_IN_FINITED128:
13915 case BUILT_IN_FPCLASSIFY:
13916 case BUILT_IN_ISFINITE:
13917 case BUILT_IN_ISINF_SIGN:
13918 case BUILT_IN_ISINF:
13919 case BUILT_IN_ISINFF:
13920 case BUILT_IN_ISINFL:
13921 case BUILT_IN_ISINFD32:
13922 case BUILT_IN_ISINFD64:
13923 case BUILT_IN_ISINFD128:
13924 case BUILT_IN_ISNAN:
13925 case BUILT_IN_ISNANF:
13926 case BUILT_IN_ISNANL:
13927 case BUILT_IN_ISNAND32:
13928 case BUILT_IN_ISNAND64:
13929 case BUILT_IN_ISNAND128:
13930 case BUILT_IN_ISNORMAL:
13931 case BUILT_IN_ISGREATER:
13932 case BUILT_IN_ISGREATEREQUAL:
13933 case BUILT_IN_ISLESS:
13934 case BUILT_IN_ISLESSEQUAL:
13935 case BUILT_IN_ISLESSGREATER:
13936 case BUILT_IN_ISUNORDERED:
13937 case BUILT_IN_VA_ARG_PACK:
13938 case BUILT_IN_VA_ARG_PACK_LEN:
13939 case BUILT_IN_VA_COPY:
13940 case BUILT_IN_TRAP:
13941 case BUILT_IN_SAVEREGS:
13942 case BUILT_IN_POPCOUNTL:
13943 case BUILT_IN_POPCOUNTLL:
13944 case BUILT_IN_POPCOUNTIMAX:
13945 case BUILT_IN_POPCOUNT:
13946 case BUILT_IN_PARITYL:
13947 case BUILT_IN_PARITYLL:
13948 case BUILT_IN_PARITYIMAX:
13949 case BUILT_IN_PARITY:
13950 case BUILT_IN_LABS:
13951 case BUILT_IN_LLABS:
13952 case BUILT_IN_PREFETCH:
13953 return true;
13954
13955 default:
13956 return is_simple_builtin (decl);
13957 }
13958
13959 return false;
13960 }