re PR tree-optimization/44485 (ICE in get_expr_operands, at tree-ssa-operands.c:1020)
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "realmpfr.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic-core.h"
53 #include "builtins.h"
54
55 #ifndef SLOW_UNALIGNED_ACCESS
56 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
57 #endif
58
59 #ifndef PAD_VARARGS_DOWN
60 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
61 #endif
62 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
63
64 struct target_builtins default_target_builtins;
65 #if SWITCHABLE_TARGET
66 struct target_builtins *this_target_builtins = &default_target_builtins;
67 #endif
68
69 /* Define the names of the builtin function types and codes. */
70 const char *const built_in_class_names[4]
71 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
72
73 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
74 const char * built_in_names[(int) END_BUILTINS] =
75 {
76 #include "builtins.def"
77 };
78 #undef DEF_BUILTIN
79
80 /* Setup an array of _DECL trees, make sure each element is
81 initialized to NULL_TREE. */
82 tree built_in_decls[(int) END_BUILTINS];
83 /* Declarations used when constructing the builtin implicitly in the compiler.
84 It may be NULL_TREE when this is invalid (for instance runtime is not
85 required to implement the function call in all cases). */
86 tree implicit_built_in_decls[(int) END_BUILTINS];
87
88 static const char *c_getstr (tree);
89 static rtx c_readstr (const char *, enum machine_mode);
90 static int target_char_cast (tree, char *);
91 static rtx get_memory_rtx (tree, tree);
92 static int apply_args_size (void);
93 static int apply_result_size (void);
94 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
95 static rtx result_vector (int, rtx);
96 #endif
97 static void expand_builtin_update_setjmp_buf (rtx);
98 static void expand_builtin_prefetch (tree);
99 static rtx expand_builtin_apply_args (void);
100 static rtx expand_builtin_apply_args_1 (void);
101 static rtx expand_builtin_apply (rtx, rtx, rtx);
102 static void expand_builtin_return (rtx);
103 static enum type_class type_to_class (tree);
104 static rtx expand_builtin_classify_type (tree);
105 static void expand_errno_check (tree, rtx);
106 static rtx expand_builtin_mathfn (tree, rtx, rtx);
107 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
109 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
110 static rtx expand_builtin_sincos (tree);
111 static rtx expand_builtin_cexpi (tree, rtx, rtx);
112 static rtx expand_builtin_int_roundingfn (tree, rtx);
113 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
114 static rtx expand_builtin_next_arg (void);
115 static rtx expand_builtin_va_start (tree);
116 static rtx expand_builtin_va_end (tree);
117 static rtx expand_builtin_va_copy (tree);
118 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_strcmp (tree, rtx);
120 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
121 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
122 static rtx expand_builtin_memcpy (tree, rtx);
123 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
125 enum machine_mode, int);
126 static rtx expand_builtin_strcpy (tree, rtx);
127 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
128 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
129 static rtx expand_builtin_strncpy (tree, rtx);
130 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
131 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
133 static rtx expand_builtin_bzero (tree);
134 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_alloca (tree, rtx);
136 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
137 static rtx expand_builtin_frame_address (tree, tree);
138 static tree stabilize_va_list_loc (location_t, tree, int);
139 static rtx expand_builtin_expect (tree, rtx);
140 static tree fold_builtin_constant_p (tree);
141 static tree fold_builtin_expect (location_t, tree, tree);
142 static tree fold_builtin_classify_type (tree);
143 static tree fold_builtin_strlen (location_t, tree, tree);
144 static tree fold_builtin_inf (location_t, tree, int);
145 static tree fold_builtin_nan (tree, tree, int);
146 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
147 static bool validate_arg (const_tree, enum tree_code code);
148 static bool integer_valued_real_p (tree);
149 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
150 static bool readonly_data_expr (tree);
151 static rtx expand_builtin_fabs (tree, rtx, rtx);
152 static rtx expand_builtin_signbit (tree, rtx);
153 static tree fold_builtin_sqrt (location_t, tree, tree);
154 static tree fold_builtin_cbrt (location_t, tree, tree);
155 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
156 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
157 static tree fold_builtin_cos (location_t, tree, tree, tree);
158 static tree fold_builtin_cosh (location_t, tree, tree, tree);
159 static tree fold_builtin_tan (tree, tree);
160 static tree fold_builtin_trunc (location_t, tree, tree);
161 static tree fold_builtin_floor (location_t, tree, tree);
162 static tree fold_builtin_ceil (location_t, tree, tree);
163 static tree fold_builtin_round (location_t, tree, tree);
164 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
165 static tree fold_builtin_bitop (tree, tree);
166 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
167 static tree fold_builtin_strchr (location_t, tree, tree, tree);
168 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
169 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
170 static tree fold_builtin_strcmp (location_t, tree, tree);
171 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
172 static tree fold_builtin_signbit (location_t, tree, tree);
173 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
174 static tree fold_builtin_isascii (location_t, tree);
175 static tree fold_builtin_toascii (location_t, tree);
176 static tree fold_builtin_isdigit (location_t, tree);
177 static tree fold_builtin_fabs (location_t, tree, tree);
178 static tree fold_builtin_abs (location_t, tree, tree);
179 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
180 enum tree_code);
181 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
182 static tree fold_builtin_0 (location_t, tree, bool);
183 static tree fold_builtin_1 (location_t, tree, tree, bool);
184 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
185 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
186 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
187 static tree fold_builtin_varargs (location_t, tree, tree, bool);
188
189 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
190 static tree fold_builtin_strstr (location_t, tree, tree, tree);
191 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
192 static tree fold_builtin_strcat (location_t, tree, tree);
193 static tree fold_builtin_strncat (location_t, tree, tree, tree);
194 static tree fold_builtin_strspn (location_t, tree, tree);
195 static tree fold_builtin_strcspn (location_t, tree, tree);
196 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
197
198 static rtx expand_builtin_object_size (tree);
199 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
200 enum built_in_function);
201 static void maybe_emit_chk_warning (tree, enum built_in_function);
202 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
203 static void maybe_emit_free_warning (tree);
204 static tree fold_builtin_object_size (tree, tree);
205 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
206 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
207 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
208 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
209 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
210 enum built_in_function);
211 static bool init_target_chars (void);
212
213 static unsigned HOST_WIDE_INT target_newline;
214 static unsigned HOST_WIDE_INT target_percent;
215 static unsigned HOST_WIDE_INT target_c;
216 static unsigned HOST_WIDE_INT target_s;
217 static char target_percent_c[3];
218 static char target_percent_s[3];
219 static char target_percent_s_newline[4];
220 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
221 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
222 static tree do_mpfr_arg2 (tree, tree, tree,
223 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
224 static tree do_mpfr_arg3 (tree, tree, tree, tree,
225 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
226 static tree do_mpfr_sincos (tree, tree, tree);
227 static tree do_mpfr_bessel_n (tree, tree, tree,
228 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
229 const REAL_VALUE_TYPE *, bool);
230 static tree do_mpfr_remquo (tree, tree, tree);
231 static tree do_mpfr_lgamma_r (tree, tree, tree);
232
233 /* Return true if NAME starts with __builtin_ or __sync_. */
234
235 bool
236 is_builtin_name (const char *name)
237 {
238 if (strncmp (name, "__builtin_", 10) == 0)
239 return true;
240 if (strncmp (name, "__sync_", 7) == 0)
241 return true;
242 return false;
243 }
244
245
246 /* Return true if DECL is a function symbol representing a built-in. */
247
248 bool
249 is_builtin_fn (tree decl)
250 {
251 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
252 }
253
254
255 /* Return true if NODE should be considered for inline expansion regardless
256 of the optimization level. This means whenever a function is invoked with
257 its "internal" name, which normally contains the prefix "__builtin". */
258
259 static bool
260 called_as_built_in (tree node)
261 {
262 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
263 we want the name used to call the function, not the name it
264 will have. */
265 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
266 return is_builtin_name (name);
267 }
268
269 /* Return the alignment in bits of EXP, an object.
270 Don't return more than MAX_ALIGN no matter what. */
271
272 unsigned int
273 get_object_alignment (tree exp, unsigned int max_align)
274 {
275 HOST_WIDE_INT bitsize, bitpos;
276 tree offset;
277 enum machine_mode mode;
278 int unsignedp, volatilep;
279 unsigned int align, inner;
280
281 /* Get the innermost object and the constant (bitpos) and possibly
282 variable (offset) offset of the access. */
283 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
284 &mode, &unsignedp, &volatilep, true);
285
286 /* Extract alignment information from the innermost object and
287 possibly adjust bitpos and offset. */
288 if (TREE_CODE (exp) == CONST_DECL)
289 exp = DECL_INITIAL (exp);
290 if (DECL_P (exp)
291 && TREE_CODE (exp) != LABEL_DECL)
292 align = DECL_ALIGN (exp);
293 else if (CONSTANT_CLASS_P (exp))
294 {
295 align = TYPE_ALIGN (TREE_TYPE (exp));
296 #ifdef CONSTANT_ALIGNMENT
297 align = (unsigned)CONSTANT_ALIGNMENT (exp, align);
298 #endif
299 }
300 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
301 align = TYPE_ALIGN (TREE_TYPE (exp));
302 else if (TREE_CODE (exp) == INDIRECT_REF)
303 align = TYPE_ALIGN (TREE_TYPE (exp));
304 else if (TREE_CODE (exp) == MISALIGNED_INDIRECT_REF)
305 {
306 tree op1 = TREE_OPERAND (exp, 1);
307 align = integer_zerop (op1) ? BITS_PER_UNIT : TREE_INT_CST_LOW (op1);
308 }
309 else if (TREE_CODE (exp) == MEM_REF)
310 {
311 tree addr = TREE_OPERAND (exp, 0);
312 struct ptr_info_def *pi;
313 if (TREE_CODE (addr) == BIT_AND_EXPR
314 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
315 {
316 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
317 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
318 align *= BITS_PER_UNIT;
319 addr = TREE_OPERAND (addr, 0);
320 }
321 else
322 align = BITS_PER_UNIT;
323 if (TREE_CODE (addr) == SSA_NAME
324 && (pi = SSA_NAME_PTR_INFO (addr)))
325 {
326 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
327 align = MAX (pi->align * BITS_PER_UNIT, align);
328 }
329 else if (TREE_CODE (addr) == ADDR_EXPR)
330 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0),
331 max_align));
332 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
333 }
334 else if (TREE_CODE (exp) == TARGET_MEM_REF
335 && TMR_BASE (exp)
336 && POINTER_TYPE_P (TREE_TYPE (TMR_BASE (exp))))
337 {
338 struct ptr_info_def *pi;
339 tree addr = TMR_BASE (exp);
340 if (TREE_CODE (addr) == BIT_AND_EXPR
341 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
342 {
343 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
344 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
345 align *= BITS_PER_UNIT;
346 addr = TREE_OPERAND (addr, 0);
347 }
348 else
349 align = BITS_PER_UNIT;
350 if (TREE_CODE (addr) == SSA_NAME
351 && (pi = SSA_NAME_PTR_INFO (addr)))
352 {
353 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
354 align = MAX (pi->align * BITS_PER_UNIT, align);
355 }
356 else if (TREE_CODE (addr) == ADDR_EXPR)
357 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0),
358 max_align));
359 if (TMR_OFFSET (exp))
360 bitpos += TREE_INT_CST_LOW (TMR_OFFSET (exp)) * BITS_PER_UNIT;
361 if (TMR_INDEX (exp) && TMR_STEP (exp))
362 {
363 unsigned HOST_WIDE_INT step = TREE_INT_CST_LOW (TMR_STEP (exp));
364 align = MIN (align, (step & -step) * BITS_PER_UNIT);
365 }
366 else if (TMR_INDEX (exp))
367 align = BITS_PER_UNIT;
368 }
369 else if (TREE_CODE (exp) == TARGET_MEM_REF
370 && TMR_SYMBOL (exp))
371 {
372 align = get_object_alignment (TMR_SYMBOL (exp), max_align);
373 if (TMR_OFFSET (exp))
374 bitpos += TREE_INT_CST_LOW (TMR_OFFSET (exp)) * BITS_PER_UNIT;
375 if (TMR_INDEX (exp) && TMR_STEP (exp))
376 {
377 unsigned HOST_WIDE_INT step = TREE_INT_CST_LOW (TMR_STEP (exp));
378 align = MIN (align, (step & -step) * BITS_PER_UNIT);
379 }
380 else if (TMR_INDEX (exp))
381 align = BITS_PER_UNIT;
382 }
383 else
384 align = BITS_PER_UNIT;
385
386 /* If there is a non-constant offset part extract the maximum
387 alignment that can prevail. */
388 inner = max_align;
389 while (offset)
390 {
391 tree next_offset;
392
393 if (TREE_CODE (offset) == PLUS_EXPR)
394 {
395 next_offset = TREE_OPERAND (offset, 0);
396 offset = TREE_OPERAND (offset, 1);
397 }
398 else
399 next_offset = NULL;
400 if (host_integerp (offset, 1))
401 {
402 /* Any overflow in calculating offset_bits won't change
403 the alignment. */
404 unsigned offset_bits
405 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
406
407 if (offset_bits)
408 inner = MIN (inner, (offset_bits & -offset_bits));
409 }
410 else if (TREE_CODE (offset) == MULT_EXPR
411 && host_integerp (TREE_OPERAND (offset, 1), 1))
412 {
413 /* Any overflow in calculating offset_factor won't change
414 the alignment. */
415 unsigned offset_factor
416 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
417 * BITS_PER_UNIT);
418
419 if (offset_factor)
420 inner = MIN (inner, (offset_factor & -offset_factor));
421 }
422 else
423 {
424 inner = MIN (inner, BITS_PER_UNIT);
425 break;
426 }
427 offset = next_offset;
428 }
429
430 /* Alignment is innermost object alignment adjusted by the constant
431 and non-constant offset parts. */
432 align = MIN (align, inner);
433 bitpos = bitpos & (align - 1);
434
435 /* align and bitpos now specify known low bits of the pointer.
436 ptr & (align - 1) == bitpos. */
437
438 if (bitpos != 0)
439 align = (bitpos & -bitpos);
440
441 return MIN (align, max_align);
442 }
443
444 /* Returns true iff we can trust that alignment information has been
445 calculated properly. */
446
447 bool
448 can_trust_pointer_alignment (void)
449 {
450 /* We rely on TER to compute accurate alignment information. */
451 return (optimize && flag_tree_ter);
452 }
453
454 /* Return the alignment in bits of EXP, a pointer valued expression.
455 But don't return more than MAX_ALIGN no matter what.
456 The alignment returned is, by default, the alignment of the thing that
457 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
458
459 Otherwise, look at the expression to see if we can do better, i.e., if the
460 expression is actually pointing at an object whose alignment is tighter. */
461
462 unsigned int
463 get_pointer_alignment (tree exp, unsigned int max_align)
464 {
465 STRIP_NOPS (exp);
466
467 if (TREE_CODE (exp) == ADDR_EXPR)
468 return get_object_alignment (TREE_OPERAND (exp, 0), max_align);
469 else if (TREE_CODE (exp) == SSA_NAME
470 && POINTER_TYPE_P (TREE_TYPE (exp)))
471 {
472 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
473 unsigned align;
474 if (!pi)
475 return BITS_PER_UNIT;
476 if (pi->misalign != 0)
477 align = (pi->misalign & -pi->misalign);
478 else
479 align = pi->align;
480 return MIN (max_align, align * BITS_PER_UNIT);
481 }
482
483 return POINTER_TYPE_P (TREE_TYPE (exp)) ? BITS_PER_UNIT : 0;
484 }
485
486 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
487 way, because it could contain a zero byte in the middle.
488 TREE_STRING_LENGTH is the size of the character array, not the string.
489
490 ONLY_VALUE should be nonzero if the result is not going to be emitted
491 into the instruction stream and zero if it is going to be expanded.
492 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
493 is returned, otherwise NULL, since
494 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
495 evaluate the side-effects.
496
497 The value returned is of type `ssizetype'.
498
499 Unfortunately, string_constant can't access the values of const char
500 arrays with initializers, so neither can we do so here. */
501
502 tree
503 c_strlen (tree src, int only_value)
504 {
505 tree offset_node;
506 HOST_WIDE_INT offset;
507 int max;
508 const char *ptr;
509 location_t loc;
510
511 STRIP_NOPS (src);
512 if (TREE_CODE (src) == COND_EXPR
513 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
514 {
515 tree len1, len2;
516
517 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
518 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
519 if (tree_int_cst_equal (len1, len2))
520 return len1;
521 }
522
523 if (TREE_CODE (src) == COMPOUND_EXPR
524 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
525 return c_strlen (TREE_OPERAND (src, 1), only_value);
526
527 if (EXPR_HAS_LOCATION (src))
528 loc = EXPR_LOCATION (src);
529 else
530 loc = input_location;
531
532 src = string_constant (src, &offset_node);
533 if (src == 0)
534 return NULL_TREE;
535
536 max = TREE_STRING_LENGTH (src) - 1;
537 ptr = TREE_STRING_POINTER (src);
538
539 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
540 {
541 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
542 compute the offset to the following null if we don't know where to
543 start searching for it. */
544 int i;
545
546 for (i = 0; i < max; i++)
547 if (ptr[i] == 0)
548 return NULL_TREE;
549
550 /* We don't know the starting offset, but we do know that the string
551 has no internal zero bytes. We can assume that the offset falls
552 within the bounds of the string; otherwise, the programmer deserves
553 what he gets. Subtract the offset from the length of the string,
554 and return that. This would perhaps not be valid if we were dealing
555 with named arrays in addition to literal string constants. */
556
557 return size_diffop_loc (loc, size_int (max), offset_node);
558 }
559
560 /* We have a known offset into the string. Start searching there for
561 a null character if we can represent it as a single HOST_WIDE_INT. */
562 if (offset_node == 0)
563 offset = 0;
564 else if (! host_integerp (offset_node, 0))
565 offset = -1;
566 else
567 offset = tree_low_cst (offset_node, 0);
568
569 /* If the offset is known to be out of bounds, warn, and call strlen at
570 runtime. */
571 if (offset < 0 || offset > max)
572 {
573 /* Suppress multiple warnings for propagated constant strings. */
574 if (! TREE_NO_WARNING (src))
575 {
576 warning_at (loc, 0, "offset outside bounds of constant string");
577 TREE_NO_WARNING (src) = 1;
578 }
579 return NULL_TREE;
580 }
581
582 /* Use strlen to search for the first zero byte. Since any strings
583 constructed with build_string will have nulls appended, we win even
584 if we get handed something like (char[4])"abcd".
585
586 Since OFFSET is our starting index into the string, no further
587 calculation is needed. */
588 return ssize_int (strlen (ptr + offset));
589 }
590
591 /* Return a char pointer for a C string if it is a string constant
592 or sum of string constant and integer constant. */
593
594 static const char *
595 c_getstr (tree src)
596 {
597 tree offset_node;
598
599 src = string_constant (src, &offset_node);
600 if (src == 0)
601 return 0;
602
603 if (offset_node == 0)
604 return TREE_STRING_POINTER (src);
605 else if (!host_integerp (offset_node, 1)
606 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
607 return 0;
608
609 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
610 }
611
612 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
613 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
614
615 static rtx
616 c_readstr (const char *str, enum machine_mode mode)
617 {
618 HOST_WIDE_INT c[2];
619 HOST_WIDE_INT ch;
620 unsigned int i, j;
621
622 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
623
624 c[0] = 0;
625 c[1] = 0;
626 ch = 1;
627 for (i = 0; i < GET_MODE_SIZE (mode); i++)
628 {
629 j = i;
630 if (WORDS_BIG_ENDIAN)
631 j = GET_MODE_SIZE (mode) - i - 1;
632 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
633 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
634 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
635 j *= BITS_PER_UNIT;
636 gcc_assert (j < 2 * HOST_BITS_PER_WIDE_INT);
637
638 if (ch)
639 ch = (unsigned char) str[i];
640 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
641 }
642 return immed_double_const (c[0], c[1], mode);
643 }
644
645 /* Cast a target constant CST to target CHAR and if that value fits into
646 host char type, return zero and put that value into variable pointed to by
647 P. */
648
649 static int
650 target_char_cast (tree cst, char *p)
651 {
652 unsigned HOST_WIDE_INT val, hostval;
653
654 if (!host_integerp (cst, 1)
655 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
656 return 1;
657
658 val = tree_low_cst (cst, 1);
659 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
660 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
661
662 hostval = val;
663 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
664 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
665
666 if (val != hostval)
667 return 1;
668
669 *p = hostval;
670 return 0;
671 }
672
673 /* Similar to save_expr, but assumes that arbitrary code is not executed
674 in between the multiple evaluations. In particular, we assume that a
675 non-addressable local variable will not be modified. */
676
677 static tree
678 builtin_save_expr (tree exp)
679 {
680 if (TREE_ADDRESSABLE (exp) == 0
681 && (TREE_CODE (exp) == PARM_DECL
682 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
683 return exp;
684
685 return save_expr (exp);
686 }
687
688 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
689 times to get the address of either a higher stack frame, or a return
690 address located within it (depending on FNDECL_CODE). */
691
692 static rtx
693 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
694 {
695 int i;
696
697 #ifdef INITIAL_FRAME_ADDRESS_RTX
698 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
699 #else
700 rtx tem;
701
702 /* For a zero count with __builtin_return_address, we don't care what
703 frame address we return, because target-specific definitions will
704 override us. Therefore frame pointer elimination is OK, and using
705 the soft frame pointer is OK.
706
707 For a nonzero count, or a zero count with __builtin_frame_address,
708 we require a stable offset from the current frame pointer to the
709 previous one, so we must use the hard frame pointer, and
710 we must disable frame pointer elimination. */
711 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
712 tem = frame_pointer_rtx;
713 else
714 {
715 tem = hard_frame_pointer_rtx;
716
717 /* Tell reload not to eliminate the frame pointer. */
718 crtl->accesses_prior_frames = 1;
719 }
720 #endif
721
722 /* Some machines need special handling before we can access
723 arbitrary frames. For example, on the SPARC, we must first flush
724 all register windows to the stack. */
725 #ifdef SETUP_FRAME_ADDRESSES
726 if (count > 0)
727 SETUP_FRAME_ADDRESSES ();
728 #endif
729
730 /* On the SPARC, the return address is not in the frame, it is in a
731 register. There is no way to access it off of the current frame
732 pointer, but it can be accessed off the previous frame pointer by
733 reading the value from the register window save area. */
734 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
735 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
736 count--;
737 #endif
738
739 /* Scan back COUNT frames to the specified frame. */
740 for (i = 0; i < count; i++)
741 {
742 /* Assume the dynamic chain pointer is in the word that the
743 frame address points to, unless otherwise specified. */
744 #ifdef DYNAMIC_CHAIN_ADDRESS
745 tem = DYNAMIC_CHAIN_ADDRESS (tem);
746 #endif
747 tem = memory_address (Pmode, tem);
748 tem = gen_frame_mem (Pmode, tem);
749 tem = copy_to_reg (tem);
750 }
751
752 /* For __builtin_frame_address, return what we've got. But, on
753 the SPARC for example, we may have to add a bias. */
754 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
755 #ifdef FRAME_ADDR_RTX
756 return FRAME_ADDR_RTX (tem);
757 #else
758 return tem;
759 #endif
760
761 /* For __builtin_return_address, get the return address from that frame. */
762 #ifdef RETURN_ADDR_RTX
763 tem = RETURN_ADDR_RTX (count, tem);
764 #else
765 tem = memory_address (Pmode,
766 plus_constant (tem, GET_MODE_SIZE (Pmode)));
767 tem = gen_frame_mem (Pmode, tem);
768 #endif
769 return tem;
770 }
771
772 /* Alias set used for setjmp buffer. */
773 static alias_set_type setjmp_alias_set = -1;
774
775 /* Construct the leading half of a __builtin_setjmp call. Control will
776 return to RECEIVER_LABEL. This is also called directly by the SJLJ
777 exception handling code. */
778
779 void
780 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
781 {
782 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
783 rtx stack_save;
784 rtx mem;
785
786 if (setjmp_alias_set == -1)
787 setjmp_alias_set = new_alias_set ();
788
789 buf_addr = convert_memory_address (Pmode, buf_addr);
790
791 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
792
793 /* We store the frame pointer and the address of receiver_label in
794 the buffer and use the rest of it for the stack save area, which
795 is machine-dependent. */
796
797 mem = gen_rtx_MEM (Pmode, buf_addr);
798 set_mem_alias_set (mem, setjmp_alias_set);
799 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
800
801 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
802 set_mem_alias_set (mem, setjmp_alias_set);
803
804 emit_move_insn (validize_mem (mem),
805 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
806
807 stack_save = gen_rtx_MEM (sa_mode,
808 plus_constant (buf_addr,
809 2 * GET_MODE_SIZE (Pmode)));
810 set_mem_alias_set (stack_save, setjmp_alias_set);
811 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
812
813 /* If there is further processing to do, do it. */
814 #ifdef HAVE_builtin_setjmp_setup
815 if (HAVE_builtin_setjmp_setup)
816 emit_insn (gen_builtin_setjmp_setup (buf_addr));
817 #endif
818
819 /* Tell optimize_save_area_alloca that extra work is going to
820 need to go on during alloca. */
821 cfun->calls_setjmp = 1;
822
823 /* We have a nonlocal label. */
824 cfun->has_nonlocal_label = 1;
825 }
826
827 /* Construct the trailing part of a __builtin_setjmp call. This is
828 also called directly by the SJLJ exception handling code. */
829
830 void
831 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
832 {
833 rtx chain;
834
835 /* Clobber the FP when we get here, so we have to make sure it's
836 marked as used by this function. */
837 emit_use (hard_frame_pointer_rtx);
838
839 /* Mark the static chain as clobbered here so life information
840 doesn't get messed up for it. */
841 chain = targetm.calls.static_chain (current_function_decl, true);
842 if (chain && REG_P (chain))
843 emit_clobber (chain);
844
845 /* Now put in the code to restore the frame pointer, and argument
846 pointer, if needed. */
847 #ifdef HAVE_nonlocal_goto
848 if (! HAVE_nonlocal_goto)
849 #endif
850 {
851 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
852 /* This might change the hard frame pointer in ways that aren't
853 apparent to early optimization passes, so force a clobber. */
854 emit_clobber (hard_frame_pointer_rtx);
855 }
856
857 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
858 if (fixed_regs[ARG_POINTER_REGNUM])
859 {
860 #ifdef ELIMINABLE_REGS
861 size_t i;
862 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
863
864 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
865 if (elim_regs[i].from == ARG_POINTER_REGNUM
866 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
867 break;
868
869 if (i == ARRAY_SIZE (elim_regs))
870 #endif
871 {
872 /* Now restore our arg pointer from the address at which it
873 was saved in our stack frame. */
874 emit_move_insn (crtl->args.internal_arg_pointer,
875 copy_to_reg (get_arg_pointer_save_area ()));
876 }
877 }
878 #endif
879
880 #ifdef HAVE_builtin_setjmp_receiver
881 if (HAVE_builtin_setjmp_receiver)
882 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
883 else
884 #endif
885 #ifdef HAVE_nonlocal_goto_receiver
886 if (HAVE_nonlocal_goto_receiver)
887 emit_insn (gen_nonlocal_goto_receiver ());
888 else
889 #endif
890 { /* Nothing */ }
891
892 /* We must not allow the code we just generated to be reordered by
893 scheduling. Specifically, the update of the frame pointer must
894 happen immediately, not later. */
895 emit_insn (gen_blockage ());
896 }
897
898 /* __builtin_longjmp is passed a pointer to an array of five words (not
899 all will be used on all machines). It operates similarly to the C
900 library function of the same name, but is more efficient. Much of
901 the code below is copied from the handling of non-local gotos. */
902
903 static void
904 expand_builtin_longjmp (rtx buf_addr, rtx value)
905 {
906 rtx fp, lab, stack, insn, last;
907 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
908
909 /* DRAP is needed for stack realign if longjmp is expanded to current
910 function */
911 if (SUPPORTS_STACK_ALIGNMENT)
912 crtl->need_drap = true;
913
914 if (setjmp_alias_set == -1)
915 setjmp_alias_set = new_alias_set ();
916
917 buf_addr = convert_memory_address (Pmode, buf_addr);
918
919 buf_addr = force_reg (Pmode, buf_addr);
920
921 /* We require that the user must pass a second argument of 1, because
922 that is what builtin_setjmp will return. */
923 gcc_assert (value == const1_rtx);
924
925 last = get_last_insn ();
926 #ifdef HAVE_builtin_longjmp
927 if (HAVE_builtin_longjmp)
928 emit_insn (gen_builtin_longjmp (buf_addr));
929 else
930 #endif
931 {
932 fp = gen_rtx_MEM (Pmode, buf_addr);
933 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
934 GET_MODE_SIZE (Pmode)));
935
936 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
937 2 * GET_MODE_SIZE (Pmode)));
938 set_mem_alias_set (fp, setjmp_alias_set);
939 set_mem_alias_set (lab, setjmp_alias_set);
940 set_mem_alias_set (stack, setjmp_alias_set);
941
942 /* Pick up FP, label, and SP from the block and jump. This code is
943 from expand_goto in stmt.c; see there for detailed comments. */
944 #ifdef HAVE_nonlocal_goto
945 if (HAVE_nonlocal_goto)
946 /* We have to pass a value to the nonlocal_goto pattern that will
947 get copied into the static_chain pointer, but it does not matter
948 what that value is, because builtin_setjmp does not use it. */
949 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
950 else
951 #endif
952 {
953 lab = copy_to_reg (lab);
954
955 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
956 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
957
958 emit_move_insn (hard_frame_pointer_rtx, fp);
959 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
960
961 emit_use (hard_frame_pointer_rtx);
962 emit_use (stack_pointer_rtx);
963 emit_indirect_jump (lab);
964 }
965 }
966
967 /* Search backwards and mark the jump insn as a non-local goto.
968 Note that this precludes the use of __builtin_longjmp to a
969 __builtin_setjmp target in the same function. However, we've
970 already cautioned the user that these functions are for
971 internal exception handling use only. */
972 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
973 {
974 gcc_assert (insn != last);
975
976 if (JUMP_P (insn))
977 {
978 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
979 break;
980 }
981 else if (CALL_P (insn))
982 break;
983 }
984 }
985
986 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
987 and the address of the save area. */
988
989 static rtx
990 expand_builtin_nonlocal_goto (tree exp)
991 {
992 tree t_label, t_save_area;
993 rtx r_label, r_save_area, r_fp, r_sp, insn;
994
995 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
996 return NULL_RTX;
997
998 t_label = CALL_EXPR_ARG (exp, 0);
999 t_save_area = CALL_EXPR_ARG (exp, 1);
1000
1001 r_label = expand_normal (t_label);
1002 r_label = convert_memory_address (Pmode, r_label);
1003 r_save_area = expand_normal (t_save_area);
1004 r_save_area = convert_memory_address (Pmode, r_save_area);
1005 /* Copy the address of the save location to a register just in case it was based
1006 on the frame pointer. */
1007 r_save_area = copy_to_reg (r_save_area);
1008 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1009 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1010 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
1011
1012 crtl->has_nonlocal_goto = 1;
1013
1014 #ifdef HAVE_nonlocal_goto
1015 /* ??? We no longer need to pass the static chain value, afaik. */
1016 if (HAVE_nonlocal_goto)
1017 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1018 else
1019 #endif
1020 {
1021 r_label = copy_to_reg (r_label);
1022
1023 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1024 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1025
1026 /* Restore frame pointer for containing function.
1027 This sets the actual hard register used for the frame pointer
1028 to the location of the function's incoming static chain info.
1029 The non-local goto handler will then adjust it to contain the
1030 proper value and reload the argument pointer, if needed. */
1031 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1032 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
1033
1034 /* USE of hard_frame_pointer_rtx added for consistency;
1035 not clear if really needed. */
1036 emit_use (hard_frame_pointer_rtx);
1037 emit_use (stack_pointer_rtx);
1038
1039 /* If the architecture is using a GP register, we must
1040 conservatively assume that the target function makes use of it.
1041 The prologue of functions with nonlocal gotos must therefore
1042 initialize the GP register to the appropriate value, and we
1043 must then make sure that this value is live at the point
1044 of the jump. (Note that this doesn't necessarily apply
1045 to targets with a nonlocal_goto pattern; they are free
1046 to implement it in their own way. Note also that this is
1047 a no-op if the GP register is a global invariant.) */
1048 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1049 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1050 emit_use (pic_offset_table_rtx);
1051
1052 emit_indirect_jump (r_label);
1053 }
1054
1055 /* Search backwards to the jump insn and mark it as a
1056 non-local goto. */
1057 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1058 {
1059 if (JUMP_P (insn))
1060 {
1061 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1062 break;
1063 }
1064 else if (CALL_P (insn))
1065 break;
1066 }
1067
1068 return const0_rtx;
1069 }
1070
1071 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1072 (not all will be used on all machines) that was passed to __builtin_setjmp.
1073 It updates the stack pointer in that block to correspond to the current
1074 stack pointer. */
1075
1076 static void
1077 expand_builtin_update_setjmp_buf (rtx buf_addr)
1078 {
1079 enum machine_mode sa_mode = Pmode;
1080 rtx stack_save;
1081
1082
1083 #ifdef HAVE_save_stack_nonlocal
1084 if (HAVE_save_stack_nonlocal)
1085 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1086 #endif
1087 #ifdef STACK_SAVEAREA_MODE
1088 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1089 #endif
1090
1091 stack_save
1092 = gen_rtx_MEM (sa_mode,
1093 memory_address
1094 (sa_mode,
1095 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1096
1097 #ifdef HAVE_setjmp
1098 if (HAVE_setjmp)
1099 emit_insn (gen_setjmp ());
1100 #endif
1101
1102 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1103 }
1104
1105 /* Expand a call to __builtin_prefetch. For a target that does not support
1106 data prefetch, evaluate the memory address argument in case it has side
1107 effects. */
1108
1109 static void
1110 expand_builtin_prefetch (tree exp)
1111 {
1112 tree arg0, arg1, arg2;
1113 int nargs;
1114 rtx op0, op1, op2;
1115
1116 if (!validate_arglist (exp, POINTER_TYPE, 0))
1117 return;
1118
1119 arg0 = CALL_EXPR_ARG (exp, 0);
1120
1121 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1122 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1123 locality). */
1124 nargs = call_expr_nargs (exp);
1125 if (nargs > 1)
1126 arg1 = CALL_EXPR_ARG (exp, 1);
1127 else
1128 arg1 = integer_zero_node;
1129 if (nargs > 2)
1130 arg2 = CALL_EXPR_ARG (exp, 2);
1131 else
1132 arg2 = integer_three_node;
1133
1134 /* Argument 0 is an address. */
1135 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1136
1137 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1138 if (TREE_CODE (arg1) != INTEGER_CST)
1139 {
1140 error ("second argument to %<__builtin_prefetch%> must be a constant");
1141 arg1 = integer_zero_node;
1142 }
1143 op1 = expand_normal (arg1);
1144 /* Argument 1 must be either zero or one. */
1145 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1146 {
1147 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1148 " using zero");
1149 op1 = const0_rtx;
1150 }
1151
1152 /* Argument 2 (locality) must be a compile-time constant int. */
1153 if (TREE_CODE (arg2) != INTEGER_CST)
1154 {
1155 error ("third argument to %<__builtin_prefetch%> must be a constant");
1156 arg2 = integer_zero_node;
1157 }
1158 op2 = expand_normal (arg2);
1159 /* Argument 2 must be 0, 1, 2, or 3. */
1160 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1161 {
1162 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1163 op2 = const0_rtx;
1164 }
1165
1166 #ifdef HAVE_prefetch
1167 if (HAVE_prefetch)
1168 {
1169 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1170 (op0,
1171 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1172 || (GET_MODE (op0) != Pmode))
1173 {
1174 op0 = convert_memory_address (Pmode, op0);
1175 op0 = force_reg (Pmode, op0);
1176 }
1177 emit_insn (gen_prefetch (op0, op1, op2));
1178 }
1179 #endif
1180
1181 /* Don't do anything with direct references to volatile memory, but
1182 generate code to handle other side effects. */
1183 if (!MEM_P (op0) && side_effects_p (op0))
1184 emit_insn (op0);
1185 }
1186
1187 /* Get a MEM rtx for expression EXP which is the address of an operand
1188 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1189 the maximum length of the block of memory that might be accessed or
1190 NULL if unknown. */
1191
1192 static rtx
1193 get_memory_rtx (tree exp, tree len)
1194 {
1195 tree orig_exp = exp;
1196 rtx addr, mem;
1197 HOST_WIDE_INT off;
1198
1199 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1200 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1201 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1202 exp = TREE_OPERAND (exp, 0);
1203
1204 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1205 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1206
1207 /* Get an expression we can use to find the attributes to assign to MEM.
1208 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1209 we can. First remove any nops. */
1210 while (CONVERT_EXPR_P (exp)
1211 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1212 exp = TREE_OPERAND (exp, 0);
1213
1214 off = 0;
1215 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1216 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1217 && host_integerp (TREE_OPERAND (exp, 1), 0)
1218 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1219 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1220 else if (TREE_CODE (exp) == ADDR_EXPR)
1221 exp = TREE_OPERAND (exp, 0);
1222 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1223 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1224 else
1225 exp = NULL;
1226
1227 /* Honor attributes derived from exp, except for the alias set
1228 (as builtin stringops may alias with anything) and the size
1229 (as stringops may access multiple array elements). */
1230 if (exp)
1231 {
1232 set_mem_attributes (mem, exp, 0);
1233
1234 if (off)
1235 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1236
1237 /* Allow the string and memory builtins to overflow from one
1238 field into another, see http://gcc.gnu.org/PR23561.
1239 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1240 memory accessed by the string or memory builtin will fit
1241 within the field. */
1242 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1243 {
1244 tree mem_expr = MEM_EXPR (mem);
1245 HOST_WIDE_INT offset = -1, length = -1;
1246 tree inner = exp;
1247
1248 while (TREE_CODE (inner) == ARRAY_REF
1249 || CONVERT_EXPR_P (inner)
1250 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1251 || TREE_CODE (inner) == SAVE_EXPR)
1252 inner = TREE_OPERAND (inner, 0);
1253
1254 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1255
1256 if (MEM_OFFSET (mem)
1257 && CONST_INT_P (MEM_OFFSET (mem)))
1258 offset = INTVAL (MEM_OFFSET (mem));
1259
1260 if (offset >= 0 && len && host_integerp (len, 0))
1261 length = tree_low_cst (len, 0);
1262
1263 while (TREE_CODE (inner) == COMPONENT_REF)
1264 {
1265 tree field = TREE_OPERAND (inner, 1);
1266 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1267 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1268
1269 /* Bitfields are generally not byte-addressable. */
1270 gcc_assert (!DECL_BIT_FIELD (field)
1271 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1272 % BITS_PER_UNIT) == 0
1273 && host_integerp (DECL_SIZE (field), 0)
1274 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1275 % BITS_PER_UNIT) == 0));
1276
1277 /* If we can prove that the memory starting at XEXP (mem, 0) and
1278 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1279 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1280 fields without DECL_SIZE_UNIT like flexible array members. */
1281 if (length >= 0
1282 && DECL_SIZE_UNIT (field)
1283 && host_integerp (DECL_SIZE_UNIT (field), 0))
1284 {
1285 HOST_WIDE_INT size
1286 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1287 if (offset <= size
1288 && length <= size
1289 && offset + length <= size)
1290 break;
1291 }
1292
1293 if (offset >= 0
1294 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1295 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1296 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1297 / BITS_PER_UNIT;
1298 else
1299 {
1300 offset = -1;
1301 length = -1;
1302 }
1303
1304 mem_expr = TREE_OPERAND (mem_expr, 0);
1305 inner = TREE_OPERAND (inner, 0);
1306 }
1307
1308 if (mem_expr == NULL)
1309 offset = -1;
1310 if (mem_expr != MEM_EXPR (mem))
1311 {
1312 set_mem_expr (mem, mem_expr);
1313 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1314 }
1315 }
1316 set_mem_alias_set (mem, 0);
1317 set_mem_size (mem, NULL_RTX);
1318 }
1319
1320 return mem;
1321 }
1322 \f
1323 /* Built-in functions to perform an untyped call and return. */
1324
1325 #define apply_args_mode \
1326 (this_target_builtins->x_apply_args_mode)
1327 #define apply_result_mode \
1328 (this_target_builtins->x_apply_result_mode)
1329
1330 /* Return the size required for the block returned by __builtin_apply_args,
1331 and initialize apply_args_mode. */
1332
1333 static int
1334 apply_args_size (void)
1335 {
1336 static int size = -1;
1337 int align;
1338 unsigned int regno;
1339 enum machine_mode mode;
1340
1341 /* The values computed by this function never change. */
1342 if (size < 0)
1343 {
1344 /* The first value is the incoming arg-pointer. */
1345 size = GET_MODE_SIZE (Pmode);
1346
1347 /* The second value is the structure value address unless this is
1348 passed as an "invisible" first argument. */
1349 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1350 size += GET_MODE_SIZE (Pmode);
1351
1352 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1353 if (FUNCTION_ARG_REGNO_P (regno))
1354 {
1355 mode = reg_raw_mode[regno];
1356
1357 gcc_assert (mode != VOIDmode);
1358
1359 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1360 if (size % align != 0)
1361 size = CEIL (size, align) * align;
1362 size += GET_MODE_SIZE (mode);
1363 apply_args_mode[regno] = mode;
1364 }
1365 else
1366 {
1367 apply_args_mode[regno] = VOIDmode;
1368 }
1369 }
1370 return size;
1371 }
1372
1373 /* Return the size required for the block returned by __builtin_apply,
1374 and initialize apply_result_mode. */
1375
1376 static int
1377 apply_result_size (void)
1378 {
1379 static int size = -1;
1380 int align, regno;
1381 enum machine_mode mode;
1382
1383 /* The values computed by this function never change. */
1384 if (size < 0)
1385 {
1386 size = 0;
1387
1388 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1389 if (targetm.calls.function_value_regno_p (regno))
1390 {
1391 mode = reg_raw_mode[regno];
1392
1393 gcc_assert (mode != VOIDmode);
1394
1395 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1396 if (size % align != 0)
1397 size = CEIL (size, align) * align;
1398 size += GET_MODE_SIZE (mode);
1399 apply_result_mode[regno] = mode;
1400 }
1401 else
1402 apply_result_mode[regno] = VOIDmode;
1403
1404 /* Allow targets that use untyped_call and untyped_return to override
1405 the size so that machine-specific information can be stored here. */
1406 #ifdef APPLY_RESULT_SIZE
1407 size = APPLY_RESULT_SIZE;
1408 #endif
1409 }
1410 return size;
1411 }
1412
1413 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1414 /* Create a vector describing the result block RESULT. If SAVEP is true,
1415 the result block is used to save the values; otherwise it is used to
1416 restore the values. */
1417
1418 static rtx
1419 result_vector (int savep, rtx result)
1420 {
1421 int regno, size, align, nelts;
1422 enum machine_mode mode;
1423 rtx reg, mem;
1424 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1425
1426 size = nelts = 0;
1427 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1428 if ((mode = apply_result_mode[regno]) != VOIDmode)
1429 {
1430 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1431 if (size % align != 0)
1432 size = CEIL (size, align) * align;
1433 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1434 mem = adjust_address (result, mode, size);
1435 savevec[nelts++] = (savep
1436 ? gen_rtx_SET (VOIDmode, mem, reg)
1437 : gen_rtx_SET (VOIDmode, reg, mem));
1438 size += GET_MODE_SIZE (mode);
1439 }
1440 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1441 }
1442 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1443
1444 /* Save the state required to perform an untyped call with the same
1445 arguments as were passed to the current function. */
1446
1447 static rtx
1448 expand_builtin_apply_args_1 (void)
1449 {
1450 rtx registers, tem;
1451 int size, align, regno;
1452 enum machine_mode mode;
1453 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1454
1455 /* Create a block where the arg-pointer, structure value address,
1456 and argument registers can be saved. */
1457 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1458
1459 /* Walk past the arg-pointer and structure value address. */
1460 size = GET_MODE_SIZE (Pmode);
1461 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1462 size += GET_MODE_SIZE (Pmode);
1463
1464 /* Save each register used in calling a function to the block. */
1465 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1466 if ((mode = apply_args_mode[regno]) != VOIDmode)
1467 {
1468 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1469 if (size % align != 0)
1470 size = CEIL (size, align) * align;
1471
1472 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1473
1474 emit_move_insn (adjust_address (registers, mode, size), tem);
1475 size += GET_MODE_SIZE (mode);
1476 }
1477
1478 /* Save the arg pointer to the block. */
1479 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1480 #ifdef STACK_GROWS_DOWNWARD
1481 /* We need the pointer as the caller actually passed them to us, not
1482 as we might have pretended they were passed. Make sure it's a valid
1483 operand, as emit_move_insn isn't expected to handle a PLUS. */
1484 tem
1485 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1486 NULL_RTX);
1487 #endif
1488 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1489
1490 size = GET_MODE_SIZE (Pmode);
1491
1492 /* Save the structure value address unless this is passed as an
1493 "invisible" first argument. */
1494 if (struct_incoming_value)
1495 {
1496 emit_move_insn (adjust_address (registers, Pmode, size),
1497 copy_to_reg (struct_incoming_value));
1498 size += GET_MODE_SIZE (Pmode);
1499 }
1500
1501 /* Return the address of the block. */
1502 return copy_addr_to_reg (XEXP (registers, 0));
1503 }
1504
1505 /* __builtin_apply_args returns block of memory allocated on
1506 the stack into which is stored the arg pointer, structure
1507 value address, static chain, and all the registers that might
1508 possibly be used in performing a function call. The code is
1509 moved to the start of the function so the incoming values are
1510 saved. */
1511
1512 static rtx
1513 expand_builtin_apply_args (void)
1514 {
1515 /* Don't do __builtin_apply_args more than once in a function.
1516 Save the result of the first call and reuse it. */
1517 if (apply_args_value != 0)
1518 return apply_args_value;
1519 {
1520 /* When this function is called, it means that registers must be
1521 saved on entry to this function. So we migrate the
1522 call to the first insn of this function. */
1523 rtx temp;
1524 rtx seq;
1525
1526 start_sequence ();
1527 temp = expand_builtin_apply_args_1 ();
1528 seq = get_insns ();
1529 end_sequence ();
1530
1531 apply_args_value = temp;
1532
1533 /* Put the insns after the NOTE that starts the function.
1534 If this is inside a start_sequence, make the outer-level insn
1535 chain current, so the code is placed at the start of the
1536 function. If internal_arg_pointer is a non-virtual pseudo,
1537 it needs to be placed after the function that initializes
1538 that pseudo. */
1539 push_topmost_sequence ();
1540 if (REG_P (crtl->args.internal_arg_pointer)
1541 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1542 emit_insn_before (seq, parm_birth_insn);
1543 else
1544 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1545 pop_topmost_sequence ();
1546 return temp;
1547 }
1548 }
1549
1550 /* Perform an untyped call and save the state required to perform an
1551 untyped return of whatever value was returned by the given function. */
1552
1553 static rtx
1554 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1555 {
1556 int size, align, regno;
1557 enum machine_mode mode;
1558 rtx incoming_args, result, reg, dest, src, call_insn;
1559 rtx old_stack_level = 0;
1560 rtx call_fusage = 0;
1561 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1562
1563 arguments = convert_memory_address (Pmode, arguments);
1564
1565 /* Create a block where the return registers can be saved. */
1566 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1567
1568 /* Fetch the arg pointer from the ARGUMENTS block. */
1569 incoming_args = gen_reg_rtx (Pmode);
1570 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1571 #ifndef STACK_GROWS_DOWNWARD
1572 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1573 incoming_args, 0, OPTAB_LIB_WIDEN);
1574 #endif
1575
1576 /* Push a new argument block and copy the arguments. Do not allow
1577 the (potential) memcpy call below to interfere with our stack
1578 manipulations. */
1579 do_pending_stack_adjust ();
1580 NO_DEFER_POP;
1581
1582 /* Save the stack with nonlocal if available. */
1583 #ifdef HAVE_save_stack_nonlocal
1584 if (HAVE_save_stack_nonlocal)
1585 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1586 else
1587 #endif
1588 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1589
1590 /* Allocate a block of memory onto the stack and copy the memory
1591 arguments to the outgoing arguments address. */
1592 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1593
1594 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1595 may have already set current_function_calls_alloca to true.
1596 current_function_calls_alloca won't be set if argsize is zero,
1597 so we have to guarantee need_drap is true here. */
1598 if (SUPPORTS_STACK_ALIGNMENT)
1599 crtl->need_drap = true;
1600
1601 dest = virtual_outgoing_args_rtx;
1602 #ifndef STACK_GROWS_DOWNWARD
1603 if (CONST_INT_P (argsize))
1604 dest = plus_constant (dest, -INTVAL (argsize));
1605 else
1606 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1607 #endif
1608 dest = gen_rtx_MEM (BLKmode, dest);
1609 set_mem_align (dest, PARM_BOUNDARY);
1610 src = gen_rtx_MEM (BLKmode, incoming_args);
1611 set_mem_align (src, PARM_BOUNDARY);
1612 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1613
1614 /* Refer to the argument block. */
1615 apply_args_size ();
1616 arguments = gen_rtx_MEM (BLKmode, arguments);
1617 set_mem_align (arguments, PARM_BOUNDARY);
1618
1619 /* Walk past the arg-pointer and structure value address. */
1620 size = GET_MODE_SIZE (Pmode);
1621 if (struct_value)
1622 size += GET_MODE_SIZE (Pmode);
1623
1624 /* Restore each of the registers previously saved. Make USE insns
1625 for each of these registers for use in making the call. */
1626 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1627 if ((mode = apply_args_mode[regno]) != VOIDmode)
1628 {
1629 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1630 if (size % align != 0)
1631 size = CEIL (size, align) * align;
1632 reg = gen_rtx_REG (mode, regno);
1633 emit_move_insn (reg, adjust_address (arguments, mode, size));
1634 use_reg (&call_fusage, reg);
1635 size += GET_MODE_SIZE (mode);
1636 }
1637
1638 /* Restore the structure value address unless this is passed as an
1639 "invisible" first argument. */
1640 size = GET_MODE_SIZE (Pmode);
1641 if (struct_value)
1642 {
1643 rtx value = gen_reg_rtx (Pmode);
1644 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1645 emit_move_insn (struct_value, value);
1646 if (REG_P (struct_value))
1647 use_reg (&call_fusage, struct_value);
1648 size += GET_MODE_SIZE (Pmode);
1649 }
1650
1651 /* All arguments and registers used for the call are set up by now! */
1652 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1653
1654 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1655 and we don't want to load it into a register as an optimization,
1656 because prepare_call_address already did it if it should be done. */
1657 if (GET_CODE (function) != SYMBOL_REF)
1658 function = memory_address (FUNCTION_MODE, function);
1659
1660 /* Generate the actual call instruction and save the return value. */
1661 #ifdef HAVE_untyped_call
1662 if (HAVE_untyped_call)
1663 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1664 result, result_vector (1, result)));
1665 else
1666 #endif
1667 #ifdef HAVE_call_value
1668 if (HAVE_call_value)
1669 {
1670 rtx valreg = 0;
1671
1672 /* Locate the unique return register. It is not possible to
1673 express a call that sets more than one return register using
1674 call_value; use untyped_call for that. In fact, untyped_call
1675 only needs to save the return registers in the given block. */
1676 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1677 if ((mode = apply_result_mode[regno]) != VOIDmode)
1678 {
1679 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1680
1681 valreg = gen_rtx_REG (mode, regno);
1682 }
1683
1684 emit_call_insn (GEN_CALL_VALUE (valreg,
1685 gen_rtx_MEM (FUNCTION_MODE, function),
1686 const0_rtx, NULL_RTX, const0_rtx));
1687
1688 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1689 }
1690 else
1691 #endif
1692 gcc_unreachable ();
1693
1694 /* Find the CALL insn we just emitted, and attach the register usage
1695 information. */
1696 call_insn = last_call_insn ();
1697 add_function_usage_to (call_insn, call_fusage);
1698
1699 /* Restore the stack. */
1700 #ifdef HAVE_save_stack_nonlocal
1701 if (HAVE_save_stack_nonlocal)
1702 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1703 else
1704 #endif
1705 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1706
1707 OK_DEFER_POP;
1708
1709 /* Return the address of the result block. */
1710 result = copy_addr_to_reg (XEXP (result, 0));
1711 return convert_memory_address (ptr_mode, result);
1712 }
1713
1714 /* Perform an untyped return. */
1715
1716 static void
1717 expand_builtin_return (rtx result)
1718 {
1719 int size, align, regno;
1720 enum machine_mode mode;
1721 rtx reg;
1722 rtx call_fusage = 0;
1723
1724 result = convert_memory_address (Pmode, result);
1725
1726 apply_result_size ();
1727 result = gen_rtx_MEM (BLKmode, result);
1728
1729 #ifdef HAVE_untyped_return
1730 if (HAVE_untyped_return)
1731 {
1732 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1733 emit_barrier ();
1734 return;
1735 }
1736 #endif
1737
1738 /* Restore the return value and note that each value is used. */
1739 size = 0;
1740 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1741 if ((mode = apply_result_mode[regno]) != VOIDmode)
1742 {
1743 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1744 if (size % align != 0)
1745 size = CEIL (size, align) * align;
1746 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1747 emit_move_insn (reg, adjust_address (result, mode, size));
1748
1749 push_to_sequence (call_fusage);
1750 emit_use (reg);
1751 call_fusage = get_insns ();
1752 end_sequence ();
1753 size += GET_MODE_SIZE (mode);
1754 }
1755
1756 /* Put the USE insns before the return. */
1757 emit_insn (call_fusage);
1758
1759 /* Return whatever values was restored by jumping directly to the end
1760 of the function. */
1761 expand_naked_return ();
1762 }
1763
1764 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1765
1766 static enum type_class
1767 type_to_class (tree type)
1768 {
1769 switch (TREE_CODE (type))
1770 {
1771 case VOID_TYPE: return void_type_class;
1772 case INTEGER_TYPE: return integer_type_class;
1773 case ENUMERAL_TYPE: return enumeral_type_class;
1774 case BOOLEAN_TYPE: return boolean_type_class;
1775 case POINTER_TYPE: return pointer_type_class;
1776 case REFERENCE_TYPE: return reference_type_class;
1777 case OFFSET_TYPE: return offset_type_class;
1778 case REAL_TYPE: return real_type_class;
1779 case COMPLEX_TYPE: return complex_type_class;
1780 case FUNCTION_TYPE: return function_type_class;
1781 case METHOD_TYPE: return method_type_class;
1782 case RECORD_TYPE: return record_type_class;
1783 case UNION_TYPE:
1784 case QUAL_UNION_TYPE: return union_type_class;
1785 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1786 ? string_type_class : array_type_class);
1787 case LANG_TYPE: return lang_type_class;
1788 default: return no_type_class;
1789 }
1790 }
1791
1792 /* Expand a call EXP to __builtin_classify_type. */
1793
1794 static rtx
1795 expand_builtin_classify_type (tree exp)
1796 {
1797 if (call_expr_nargs (exp))
1798 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1799 return GEN_INT (no_type_class);
1800 }
1801
1802 /* This helper macro, meant to be used in mathfn_built_in below,
1803 determines which among a set of three builtin math functions is
1804 appropriate for a given type mode. The `F' and `L' cases are
1805 automatically generated from the `double' case. */
1806 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1807 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1808 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1809 fcodel = BUILT_IN_MATHFN##L ; break;
1810 /* Similar to above, but appends _R after any F/L suffix. */
1811 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1812 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1813 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1814 fcodel = BUILT_IN_MATHFN##L_R ; break;
1815
1816 /* Return mathematic function equivalent to FN but operating directly
1817 on TYPE, if available. If IMPLICIT is true find the function in
1818 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1819 can't do the conversion, return zero. */
1820
1821 static tree
1822 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1823 {
1824 tree const *const fn_arr
1825 = implicit ? implicit_built_in_decls : built_in_decls;
1826 enum built_in_function fcode, fcodef, fcodel;
1827
1828 switch (fn)
1829 {
1830 CASE_MATHFN (BUILT_IN_ACOS)
1831 CASE_MATHFN (BUILT_IN_ACOSH)
1832 CASE_MATHFN (BUILT_IN_ASIN)
1833 CASE_MATHFN (BUILT_IN_ASINH)
1834 CASE_MATHFN (BUILT_IN_ATAN)
1835 CASE_MATHFN (BUILT_IN_ATAN2)
1836 CASE_MATHFN (BUILT_IN_ATANH)
1837 CASE_MATHFN (BUILT_IN_CBRT)
1838 CASE_MATHFN (BUILT_IN_CEIL)
1839 CASE_MATHFN (BUILT_IN_CEXPI)
1840 CASE_MATHFN (BUILT_IN_COPYSIGN)
1841 CASE_MATHFN (BUILT_IN_COS)
1842 CASE_MATHFN (BUILT_IN_COSH)
1843 CASE_MATHFN (BUILT_IN_DREM)
1844 CASE_MATHFN (BUILT_IN_ERF)
1845 CASE_MATHFN (BUILT_IN_ERFC)
1846 CASE_MATHFN (BUILT_IN_EXP)
1847 CASE_MATHFN (BUILT_IN_EXP10)
1848 CASE_MATHFN (BUILT_IN_EXP2)
1849 CASE_MATHFN (BUILT_IN_EXPM1)
1850 CASE_MATHFN (BUILT_IN_FABS)
1851 CASE_MATHFN (BUILT_IN_FDIM)
1852 CASE_MATHFN (BUILT_IN_FLOOR)
1853 CASE_MATHFN (BUILT_IN_FMA)
1854 CASE_MATHFN (BUILT_IN_FMAX)
1855 CASE_MATHFN (BUILT_IN_FMIN)
1856 CASE_MATHFN (BUILT_IN_FMOD)
1857 CASE_MATHFN (BUILT_IN_FREXP)
1858 CASE_MATHFN (BUILT_IN_GAMMA)
1859 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1860 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1861 CASE_MATHFN (BUILT_IN_HYPOT)
1862 CASE_MATHFN (BUILT_IN_ILOGB)
1863 CASE_MATHFN (BUILT_IN_INF)
1864 CASE_MATHFN (BUILT_IN_ISINF)
1865 CASE_MATHFN (BUILT_IN_J0)
1866 CASE_MATHFN (BUILT_IN_J1)
1867 CASE_MATHFN (BUILT_IN_JN)
1868 CASE_MATHFN (BUILT_IN_LCEIL)
1869 CASE_MATHFN (BUILT_IN_LDEXP)
1870 CASE_MATHFN (BUILT_IN_LFLOOR)
1871 CASE_MATHFN (BUILT_IN_LGAMMA)
1872 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1873 CASE_MATHFN (BUILT_IN_LLCEIL)
1874 CASE_MATHFN (BUILT_IN_LLFLOOR)
1875 CASE_MATHFN (BUILT_IN_LLRINT)
1876 CASE_MATHFN (BUILT_IN_LLROUND)
1877 CASE_MATHFN (BUILT_IN_LOG)
1878 CASE_MATHFN (BUILT_IN_LOG10)
1879 CASE_MATHFN (BUILT_IN_LOG1P)
1880 CASE_MATHFN (BUILT_IN_LOG2)
1881 CASE_MATHFN (BUILT_IN_LOGB)
1882 CASE_MATHFN (BUILT_IN_LRINT)
1883 CASE_MATHFN (BUILT_IN_LROUND)
1884 CASE_MATHFN (BUILT_IN_MODF)
1885 CASE_MATHFN (BUILT_IN_NAN)
1886 CASE_MATHFN (BUILT_IN_NANS)
1887 CASE_MATHFN (BUILT_IN_NEARBYINT)
1888 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1889 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1890 CASE_MATHFN (BUILT_IN_POW)
1891 CASE_MATHFN (BUILT_IN_POWI)
1892 CASE_MATHFN (BUILT_IN_POW10)
1893 CASE_MATHFN (BUILT_IN_REMAINDER)
1894 CASE_MATHFN (BUILT_IN_REMQUO)
1895 CASE_MATHFN (BUILT_IN_RINT)
1896 CASE_MATHFN (BUILT_IN_ROUND)
1897 CASE_MATHFN (BUILT_IN_SCALB)
1898 CASE_MATHFN (BUILT_IN_SCALBLN)
1899 CASE_MATHFN (BUILT_IN_SCALBN)
1900 CASE_MATHFN (BUILT_IN_SIGNBIT)
1901 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1902 CASE_MATHFN (BUILT_IN_SIN)
1903 CASE_MATHFN (BUILT_IN_SINCOS)
1904 CASE_MATHFN (BUILT_IN_SINH)
1905 CASE_MATHFN (BUILT_IN_SQRT)
1906 CASE_MATHFN (BUILT_IN_TAN)
1907 CASE_MATHFN (BUILT_IN_TANH)
1908 CASE_MATHFN (BUILT_IN_TGAMMA)
1909 CASE_MATHFN (BUILT_IN_TRUNC)
1910 CASE_MATHFN (BUILT_IN_Y0)
1911 CASE_MATHFN (BUILT_IN_Y1)
1912 CASE_MATHFN (BUILT_IN_YN)
1913
1914 default:
1915 return NULL_TREE;
1916 }
1917
1918 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1919 return fn_arr[fcode];
1920 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1921 return fn_arr[fcodef];
1922 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1923 return fn_arr[fcodel];
1924 else
1925 return NULL_TREE;
1926 }
1927
1928 /* Like mathfn_built_in_1(), but always use the implicit array. */
1929
1930 tree
1931 mathfn_built_in (tree type, enum built_in_function fn)
1932 {
1933 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1934 }
1935
1936 /* If errno must be maintained, expand the RTL to check if the result,
1937 TARGET, of a built-in function call, EXP, is NaN, and if so set
1938 errno to EDOM. */
1939
1940 static void
1941 expand_errno_check (tree exp, rtx target)
1942 {
1943 rtx lab = gen_label_rtx ();
1944
1945 /* Test the result; if it is NaN, set errno=EDOM because
1946 the argument was not in the domain. */
1947 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1948 NULL_RTX, NULL_RTX, lab,
1949 /* The jump is very likely. */
1950 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1951
1952 #ifdef TARGET_EDOM
1953 /* If this built-in doesn't throw an exception, set errno directly. */
1954 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1955 {
1956 #ifdef GEN_ERRNO_RTX
1957 rtx errno_rtx = GEN_ERRNO_RTX;
1958 #else
1959 rtx errno_rtx
1960 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1961 #endif
1962 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1963 emit_label (lab);
1964 return;
1965 }
1966 #endif
1967
1968 /* Make sure the library call isn't expanded as a tail call. */
1969 CALL_EXPR_TAILCALL (exp) = 0;
1970
1971 /* We can't set errno=EDOM directly; let the library call do it.
1972 Pop the arguments right away in case the call gets deleted. */
1973 NO_DEFER_POP;
1974 expand_call (exp, target, 0);
1975 OK_DEFER_POP;
1976 emit_label (lab);
1977 }
1978
1979 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1980 Return NULL_RTX if a normal call should be emitted rather than expanding
1981 the function in-line. EXP is the expression that is a call to the builtin
1982 function; if convenient, the result should be placed in TARGET.
1983 SUBTARGET may be used as the target for computing one of EXP's operands. */
1984
1985 static rtx
1986 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1987 {
1988 optab builtin_optab;
1989 rtx op0, insns;
1990 tree fndecl = get_callee_fndecl (exp);
1991 enum machine_mode mode;
1992 bool errno_set = false;
1993 tree arg;
1994
1995 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1996 return NULL_RTX;
1997
1998 arg = CALL_EXPR_ARG (exp, 0);
1999
2000 switch (DECL_FUNCTION_CODE (fndecl))
2001 {
2002 CASE_FLT_FN (BUILT_IN_SQRT):
2003 errno_set = ! tree_expr_nonnegative_p (arg);
2004 builtin_optab = sqrt_optab;
2005 break;
2006 CASE_FLT_FN (BUILT_IN_EXP):
2007 errno_set = true; builtin_optab = exp_optab; break;
2008 CASE_FLT_FN (BUILT_IN_EXP10):
2009 CASE_FLT_FN (BUILT_IN_POW10):
2010 errno_set = true; builtin_optab = exp10_optab; break;
2011 CASE_FLT_FN (BUILT_IN_EXP2):
2012 errno_set = true; builtin_optab = exp2_optab; break;
2013 CASE_FLT_FN (BUILT_IN_EXPM1):
2014 errno_set = true; builtin_optab = expm1_optab; break;
2015 CASE_FLT_FN (BUILT_IN_LOGB):
2016 errno_set = true; builtin_optab = logb_optab; break;
2017 CASE_FLT_FN (BUILT_IN_LOG):
2018 errno_set = true; builtin_optab = log_optab; break;
2019 CASE_FLT_FN (BUILT_IN_LOG10):
2020 errno_set = true; builtin_optab = log10_optab; break;
2021 CASE_FLT_FN (BUILT_IN_LOG2):
2022 errno_set = true; builtin_optab = log2_optab; break;
2023 CASE_FLT_FN (BUILT_IN_LOG1P):
2024 errno_set = true; builtin_optab = log1p_optab; break;
2025 CASE_FLT_FN (BUILT_IN_ASIN):
2026 builtin_optab = asin_optab; break;
2027 CASE_FLT_FN (BUILT_IN_ACOS):
2028 builtin_optab = acos_optab; break;
2029 CASE_FLT_FN (BUILT_IN_TAN):
2030 builtin_optab = tan_optab; break;
2031 CASE_FLT_FN (BUILT_IN_ATAN):
2032 builtin_optab = atan_optab; break;
2033 CASE_FLT_FN (BUILT_IN_FLOOR):
2034 builtin_optab = floor_optab; break;
2035 CASE_FLT_FN (BUILT_IN_CEIL):
2036 builtin_optab = ceil_optab; break;
2037 CASE_FLT_FN (BUILT_IN_TRUNC):
2038 builtin_optab = btrunc_optab; break;
2039 CASE_FLT_FN (BUILT_IN_ROUND):
2040 builtin_optab = round_optab; break;
2041 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2042 builtin_optab = nearbyint_optab;
2043 if (flag_trapping_math)
2044 break;
2045 /* Else fallthrough and expand as rint. */
2046 CASE_FLT_FN (BUILT_IN_RINT):
2047 builtin_optab = rint_optab; break;
2048 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2049 builtin_optab = significand_optab; break;
2050 default:
2051 gcc_unreachable ();
2052 }
2053
2054 /* Make a suitable register to place result in. */
2055 mode = TYPE_MODE (TREE_TYPE (exp));
2056
2057 if (! flag_errno_math || ! HONOR_NANS (mode))
2058 errno_set = false;
2059
2060 /* Before working hard, check whether the instruction is available. */
2061 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2062 {
2063 target = gen_reg_rtx (mode);
2064
2065 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2066 need to expand the argument again. This way, we will not perform
2067 side-effects more the once. */
2068 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2069
2070 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2071
2072 start_sequence ();
2073
2074 /* Compute into TARGET.
2075 Set TARGET to wherever the result comes back. */
2076 target = expand_unop (mode, builtin_optab, op0, target, 0);
2077
2078 if (target != 0)
2079 {
2080 if (errno_set)
2081 expand_errno_check (exp, target);
2082
2083 /* Output the entire sequence. */
2084 insns = get_insns ();
2085 end_sequence ();
2086 emit_insn (insns);
2087 return target;
2088 }
2089
2090 /* If we were unable to expand via the builtin, stop the sequence
2091 (without outputting the insns) and call to the library function
2092 with the stabilized argument list. */
2093 end_sequence ();
2094 }
2095
2096 return expand_call (exp, target, target == const0_rtx);
2097 }
2098
2099 /* Expand a call to the builtin binary math functions (pow and atan2).
2100 Return NULL_RTX if a normal call should be emitted rather than expanding the
2101 function in-line. EXP is the expression that is a call to the builtin
2102 function; if convenient, the result should be placed in TARGET.
2103 SUBTARGET may be used as the target for computing one of EXP's
2104 operands. */
2105
2106 static rtx
2107 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2108 {
2109 optab builtin_optab;
2110 rtx op0, op1, insns;
2111 int op1_type = REAL_TYPE;
2112 tree fndecl = get_callee_fndecl (exp);
2113 tree arg0, arg1;
2114 enum machine_mode mode;
2115 bool errno_set = true;
2116
2117 switch (DECL_FUNCTION_CODE (fndecl))
2118 {
2119 CASE_FLT_FN (BUILT_IN_SCALBN):
2120 CASE_FLT_FN (BUILT_IN_SCALBLN):
2121 CASE_FLT_FN (BUILT_IN_LDEXP):
2122 op1_type = INTEGER_TYPE;
2123 default:
2124 break;
2125 }
2126
2127 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2128 return NULL_RTX;
2129
2130 arg0 = CALL_EXPR_ARG (exp, 0);
2131 arg1 = CALL_EXPR_ARG (exp, 1);
2132
2133 switch (DECL_FUNCTION_CODE (fndecl))
2134 {
2135 CASE_FLT_FN (BUILT_IN_POW):
2136 builtin_optab = pow_optab; break;
2137 CASE_FLT_FN (BUILT_IN_ATAN2):
2138 builtin_optab = atan2_optab; break;
2139 CASE_FLT_FN (BUILT_IN_SCALB):
2140 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2141 return 0;
2142 builtin_optab = scalb_optab; break;
2143 CASE_FLT_FN (BUILT_IN_SCALBN):
2144 CASE_FLT_FN (BUILT_IN_SCALBLN):
2145 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2146 return 0;
2147 /* Fall through... */
2148 CASE_FLT_FN (BUILT_IN_LDEXP):
2149 builtin_optab = ldexp_optab; break;
2150 CASE_FLT_FN (BUILT_IN_FMOD):
2151 builtin_optab = fmod_optab; break;
2152 CASE_FLT_FN (BUILT_IN_REMAINDER):
2153 CASE_FLT_FN (BUILT_IN_DREM):
2154 builtin_optab = remainder_optab; break;
2155 default:
2156 gcc_unreachable ();
2157 }
2158
2159 /* Make a suitable register to place result in. */
2160 mode = TYPE_MODE (TREE_TYPE (exp));
2161
2162 /* Before working hard, check whether the instruction is available. */
2163 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2164 return NULL_RTX;
2165
2166 target = gen_reg_rtx (mode);
2167
2168 if (! flag_errno_math || ! HONOR_NANS (mode))
2169 errno_set = false;
2170
2171 /* Always stabilize the argument list. */
2172 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2173 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2174
2175 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2176 op1 = expand_normal (arg1);
2177
2178 start_sequence ();
2179
2180 /* Compute into TARGET.
2181 Set TARGET to wherever the result comes back. */
2182 target = expand_binop (mode, builtin_optab, op0, op1,
2183 target, 0, OPTAB_DIRECT);
2184
2185 /* If we were unable to expand via the builtin, stop the sequence
2186 (without outputting the insns) and call to the library function
2187 with the stabilized argument list. */
2188 if (target == 0)
2189 {
2190 end_sequence ();
2191 return expand_call (exp, target, target == const0_rtx);
2192 }
2193
2194 if (errno_set)
2195 expand_errno_check (exp, target);
2196
2197 /* Output the entire sequence. */
2198 insns = get_insns ();
2199 end_sequence ();
2200 emit_insn (insns);
2201
2202 return target;
2203 }
2204
2205 /* Expand a call to the builtin sin and cos math functions.
2206 Return NULL_RTX if a normal call should be emitted rather than expanding the
2207 function in-line. EXP is the expression that is a call to the builtin
2208 function; if convenient, the result should be placed in TARGET.
2209 SUBTARGET may be used as the target for computing one of EXP's
2210 operands. */
2211
2212 static rtx
2213 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2214 {
2215 optab builtin_optab;
2216 rtx op0, insns;
2217 tree fndecl = get_callee_fndecl (exp);
2218 enum machine_mode mode;
2219 tree arg;
2220
2221 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2222 return NULL_RTX;
2223
2224 arg = CALL_EXPR_ARG (exp, 0);
2225
2226 switch (DECL_FUNCTION_CODE (fndecl))
2227 {
2228 CASE_FLT_FN (BUILT_IN_SIN):
2229 CASE_FLT_FN (BUILT_IN_COS):
2230 builtin_optab = sincos_optab; break;
2231 default:
2232 gcc_unreachable ();
2233 }
2234
2235 /* Make a suitable register to place result in. */
2236 mode = TYPE_MODE (TREE_TYPE (exp));
2237
2238 /* Check if sincos insn is available, otherwise fallback
2239 to sin or cos insn. */
2240 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2241 switch (DECL_FUNCTION_CODE (fndecl))
2242 {
2243 CASE_FLT_FN (BUILT_IN_SIN):
2244 builtin_optab = sin_optab; break;
2245 CASE_FLT_FN (BUILT_IN_COS):
2246 builtin_optab = cos_optab; break;
2247 default:
2248 gcc_unreachable ();
2249 }
2250
2251 /* Before working hard, check whether the instruction is available. */
2252 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2253 {
2254 target = gen_reg_rtx (mode);
2255
2256 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2257 need to expand the argument again. This way, we will not perform
2258 side-effects more the once. */
2259 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2260
2261 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2262
2263 start_sequence ();
2264
2265 /* Compute into TARGET.
2266 Set TARGET to wherever the result comes back. */
2267 if (builtin_optab == sincos_optab)
2268 {
2269 int result;
2270
2271 switch (DECL_FUNCTION_CODE (fndecl))
2272 {
2273 CASE_FLT_FN (BUILT_IN_SIN):
2274 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2275 break;
2276 CASE_FLT_FN (BUILT_IN_COS):
2277 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2278 break;
2279 default:
2280 gcc_unreachable ();
2281 }
2282 gcc_assert (result);
2283 }
2284 else
2285 {
2286 target = expand_unop (mode, builtin_optab, op0, target, 0);
2287 }
2288
2289 if (target != 0)
2290 {
2291 /* Output the entire sequence. */
2292 insns = get_insns ();
2293 end_sequence ();
2294 emit_insn (insns);
2295 return target;
2296 }
2297
2298 /* If we were unable to expand via the builtin, stop the sequence
2299 (without outputting the insns) and call to the library function
2300 with the stabilized argument list. */
2301 end_sequence ();
2302 }
2303
2304 target = expand_call (exp, target, target == const0_rtx);
2305
2306 return target;
2307 }
2308
2309 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2310 return an RTL instruction code that implements the functionality.
2311 If that isn't possible or available return CODE_FOR_nothing. */
2312
2313 static enum insn_code
2314 interclass_mathfn_icode (tree arg, tree fndecl)
2315 {
2316 bool errno_set = false;
2317 optab builtin_optab = 0;
2318 enum machine_mode mode;
2319
2320 switch (DECL_FUNCTION_CODE (fndecl))
2321 {
2322 CASE_FLT_FN (BUILT_IN_ILOGB):
2323 errno_set = true; builtin_optab = ilogb_optab; break;
2324 CASE_FLT_FN (BUILT_IN_ISINF):
2325 builtin_optab = isinf_optab; break;
2326 case BUILT_IN_ISNORMAL:
2327 case BUILT_IN_ISFINITE:
2328 CASE_FLT_FN (BUILT_IN_FINITE):
2329 case BUILT_IN_FINITED32:
2330 case BUILT_IN_FINITED64:
2331 case BUILT_IN_FINITED128:
2332 case BUILT_IN_ISINFD32:
2333 case BUILT_IN_ISINFD64:
2334 case BUILT_IN_ISINFD128:
2335 /* These builtins have no optabs (yet). */
2336 break;
2337 default:
2338 gcc_unreachable ();
2339 }
2340
2341 /* There's no easy way to detect the case we need to set EDOM. */
2342 if (flag_errno_math && errno_set)
2343 return CODE_FOR_nothing;
2344
2345 /* Optab mode depends on the mode of the input argument. */
2346 mode = TYPE_MODE (TREE_TYPE (arg));
2347
2348 if (builtin_optab)
2349 return optab_handler (builtin_optab, mode);
2350 return CODE_FOR_nothing;
2351 }
2352
2353 /* Expand a call to one of the builtin math functions that operate on
2354 floating point argument and output an integer result (ilogb, isinf,
2355 isnan, etc).
2356 Return 0 if a normal call should be emitted rather than expanding the
2357 function in-line. EXP is the expression that is a call to the builtin
2358 function; if convenient, the result should be placed in TARGET.
2359 SUBTARGET may be used as the target for computing one of EXP's operands. */
2360
2361 static rtx
2362 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2363 {
2364 enum insn_code icode = CODE_FOR_nothing;
2365 rtx op0;
2366 tree fndecl = get_callee_fndecl (exp);
2367 enum machine_mode mode;
2368 tree arg;
2369
2370 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2371 return NULL_RTX;
2372
2373 arg = CALL_EXPR_ARG (exp, 0);
2374 icode = interclass_mathfn_icode (arg, fndecl);
2375 mode = TYPE_MODE (TREE_TYPE (arg));
2376
2377 if (icode != CODE_FOR_nothing)
2378 {
2379 rtx last = get_last_insn ();
2380 tree orig_arg = arg;
2381 /* Make a suitable register to place result in. */
2382 if (!target
2383 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))
2384 || !insn_data[icode].operand[0].predicate (target, GET_MODE (target)))
2385 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2386
2387 gcc_assert (insn_data[icode].operand[0].predicate
2388 (target, GET_MODE (target)));
2389
2390 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2391 need to expand the argument again. This way, we will not perform
2392 side-effects more the once. */
2393 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2394
2395 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2396
2397 if (mode != GET_MODE (op0))
2398 op0 = convert_to_mode (mode, op0, 0);
2399
2400 /* Compute into TARGET.
2401 Set TARGET to wherever the result comes back. */
2402 if (maybe_emit_unop_insn (icode, target, op0, UNKNOWN))
2403 return target;
2404 delete_insns_since (last);
2405 CALL_EXPR_ARG (exp, 0) = orig_arg;
2406 }
2407
2408 return NULL_RTX;
2409 }
2410
2411 /* Expand a call to the builtin sincos math function.
2412 Return NULL_RTX if a normal call should be emitted rather than expanding the
2413 function in-line. EXP is the expression that is a call to the builtin
2414 function. */
2415
2416 static rtx
2417 expand_builtin_sincos (tree exp)
2418 {
2419 rtx op0, op1, op2, target1, target2;
2420 enum machine_mode mode;
2421 tree arg, sinp, cosp;
2422 int result;
2423 location_t loc = EXPR_LOCATION (exp);
2424
2425 if (!validate_arglist (exp, REAL_TYPE,
2426 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2427 return NULL_RTX;
2428
2429 arg = CALL_EXPR_ARG (exp, 0);
2430 sinp = CALL_EXPR_ARG (exp, 1);
2431 cosp = CALL_EXPR_ARG (exp, 2);
2432
2433 /* Make a suitable register to place result in. */
2434 mode = TYPE_MODE (TREE_TYPE (arg));
2435
2436 /* Check if sincos insn is available, otherwise emit the call. */
2437 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2438 return NULL_RTX;
2439
2440 target1 = gen_reg_rtx (mode);
2441 target2 = gen_reg_rtx (mode);
2442
2443 op0 = expand_normal (arg);
2444 op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2445 op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2446
2447 /* Compute into target1 and target2.
2448 Set TARGET to wherever the result comes back. */
2449 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2450 gcc_assert (result);
2451
2452 /* Move target1 and target2 to the memory locations indicated
2453 by op1 and op2. */
2454 emit_move_insn (op1, target1);
2455 emit_move_insn (op2, target2);
2456
2457 return const0_rtx;
2458 }
2459
2460 /* Expand a call to the internal cexpi builtin to the sincos math function.
2461 EXP is the expression that is a call to the builtin function; if convenient,
2462 the result should be placed in TARGET. SUBTARGET may be used as the target
2463 for computing one of EXP's operands. */
2464
2465 static rtx
2466 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2467 {
2468 tree fndecl = get_callee_fndecl (exp);
2469 tree arg, type;
2470 enum machine_mode mode;
2471 rtx op0, op1, op2;
2472 location_t loc = EXPR_LOCATION (exp);
2473
2474 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2475 return NULL_RTX;
2476
2477 arg = CALL_EXPR_ARG (exp, 0);
2478 type = TREE_TYPE (arg);
2479 mode = TYPE_MODE (TREE_TYPE (arg));
2480
2481 /* Try expanding via a sincos optab, fall back to emitting a libcall
2482 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2483 is only generated from sincos, cexp or if we have either of them. */
2484 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2485 {
2486 op1 = gen_reg_rtx (mode);
2487 op2 = gen_reg_rtx (mode);
2488
2489 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2490
2491 /* Compute into op1 and op2. */
2492 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2493 }
2494 else if (TARGET_HAS_SINCOS)
2495 {
2496 tree call, fn = NULL_TREE;
2497 tree top1, top2;
2498 rtx op1a, op2a;
2499
2500 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2501 fn = built_in_decls[BUILT_IN_SINCOSF];
2502 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2503 fn = built_in_decls[BUILT_IN_SINCOS];
2504 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2505 fn = built_in_decls[BUILT_IN_SINCOSL];
2506 else
2507 gcc_unreachable ();
2508
2509 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2510 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2511 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2512 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2513 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2514 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2515
2516 /* Make sure not to fold the sincos call again. */
2517 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2518 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2519 call, 3, arg, top1, top2));
2520 }
2521 else
2522 {
2523 tree call, fn = NULL_TREE, narg;
2524 tree ctype = build_complex_type (type);
2525
2526 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2527 fn = built_in_decls[BUILT_IN_CEXPF];
2528 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2529 fn = built_in_decls[BUILT_IN_CEXP];
2530 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2531 fn = built_in_decls[BUILT_IN_CEXPL];
2532 else
2533 gcc_unreachable ();
2534
2535 /* If we don't have a decl for cexp create one. This is the
2536 friendliest fallback if the user calls __builtin_cexpi
2537 without full target C99 function support. */
2538 if (fn == NULL_TREE)
2539 {
2540 tree fntype;
2541 const char *name = NULL;
2542
2543 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2544 name = "cexpf";
2545 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2546 name = "cexp";
2547 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2548 name = "cexpl";
2549
2550 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2551 fn = build_fn_decl (name, fntype);
2552 }
2553
2554 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2555 build_real (type, dconst0), arg);
2556
2557 /* Make sure not to fold the cexp call again. */
2558 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2559 return expand_expr (build_call_nary (ctype, call, 1, narg),
2560 target, VOIDmode, EXPAND_NORMAL);
2561 }
2562
2563 /* Now build the proper return type. */
2564 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2565 make_tree (TREE_TYPE (arg), op2),
2566 make_tree (TREE_TYPE (arg), op1)),
2567 target, VOIDmode, EXPAND_NORMAL);
2568 }
2569
2570 /* Conveniently construct a function call expression. FNDECL names the
2571 function to be called, N is the number of arguments, and the "..."
2572 parameters are the argument expressions. Unlike build_call_exr
2573 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2574
2575 static tree
2576 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2577 {
2578 va_list ap;
2579 tree fntype = TREE_TYPE (fndecl);
2580 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2581
2582 va_start (ap, n);
2583 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2584 va_end (ap);
2585 SET_EXPR_LOCATION (fn, loc);
2586 return fn;
2587 }
2588
2589 /* Expand a call to one of the builtin rounding functions gcc defines
2590 as an extension (lfloor and lceil). As these are gcc extensions we
2591 do not need to worry about setting errno to EDOM.
2592 If expanding via optab fails, lower expression to (int)(floor(x)).
2593 EXP is the expression that is a call to the builtin function;
2594 if convenient, the result should be placed in TARGET. */
2595
2596 static rtx
2597 expand_builtin_int_roundingfn (tree exp, rtx target)
2598 {
2599 convert_optab builtin_optab;
2600 rtx op0, insns, tmp;
2601 tree fndecl = get_callee_fndecl (exp);
2602 enum built_in_function fallback_fn;
2603 tree fallback_fndecl;
2604 enum machine_mode mode;
2605 tree arg;
2606
2607 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2608 gcc_unreachable ();
2609
2610 arg = CALL_EXPR_ARG (exp, 0);
2611
2612 switch (DECL_FUNCTION_CODE (fndecl))
2613 {
2614 CASE_FLT_FN (BUILT_IN_LCEIL):
2615 CASE_FLT_FN (BUILT_IN_LLCEIL):
2616 builtin_optab = lceil_optab;
2617 fallback_fn = BUILT_IN_CEIL;
2618 break;
2619
2620 CASE_FLT_FN (BUILT_IN_LFLOOR):
2621 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2622 builtin_optab = lfloor_optab;
2623 fallback_fn = BUILT_IN_FLOOR;
2624 break;
2625
2626 default:
2627 gcc_unreachable ();
2628 }
2629
2630 /* Make a suitable register to place result in. */
2631 mode = TYPE_MODE (TREE_TYPE (exp));
2632
2633 target = gen_reg_rtx (mode);
2634
2635 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2636 need to expand the argument again. This way, we will not perform
2637 side-effects more the once. */
2638 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2639
2640 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2641
2642 start_sequence ();
2643
2644 /* Compute into TARGET. */
2645 if (expand_sfix_optab (target, op0, builtin_optab))
2646 {
2647 /* Output the entire sequence. */
2648 insns = get_insns ();
2649 end_sequence ();
2650 emit_insn (insns);
2651 return target;
2652 }
2653
2654 /* If we were unable to expand via the builtin, stop the sequence
2655 (without outputting the insns). */
2656 end_sequence ();
2657
2658 /* Fall back to floating point rounding optab. */
2659 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2660
2661 /* For non-C99 targets we may end up without a fallback fndecl here
2662 if the user called __builtin_lfloor directly. In this case emit
2663 a call to the floor/ceil variants nevertheless. This should result
2664 in the best user experience for not full C99 targets. */
2665 if (fallback_fndecl == NULL_TREE)
2666 {
2667 tree fntype;
2668 const char *name = NULL;
2669
2670 switch (DECL_FUNCTION_CODE (fndecl))
2671 {
2672 case BUILT_IN_LCEIL:
2673 case BUILT_IN_LLCEIL:
2674 name = "ceil";
2675 break;
2676 case BUILT_IN_LCEILF:
2677 case BUILT_IN_LLCEILF:
2678 name = "ceilf";
2679 break;
2680 case BUILT_IN_LCEILL:
2681 case BUILT_IN_LLCEILL:
2682 name = "ceill";
2683 break;
2684 case BUILT_IN_LFLOOR:
2685 case BUILT_IN_LLFLOOR:
2686 name = "floor";
2687 break;
2688 case BUILT_IN_LFLOORF:
2689 case BUILT_IN_LLFLOORF:
2690 name = "floorf";
2691 break;
2692 case BUILT_IN_LFLOORL:
2693 case BUILT_IN_LLFLOORL:
2694 name = "floorl";
2695 break;
2696 default:
2697 gcc_unreachable ();
2698 }
2699
2700 fntype = build_function_type_list (TREE_TYPE (arg),
2701 TREE_TYPE (arg), NULL_TREE);
2702 fallback_fndecl = build_fn_decl (name, fntype);
2703 }
2704
2705 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2706
2707 tmp = expand_normal (exp);
2708
2709 /* Truncate the result of floating point optab to integer
2710 via expand_fix (). */
2711 target = gen_reg_rtx (mode);
2712 expand_fix (target, tmp, 0);
2713
2714 return target;
2715 }
2716
2717 /* Expand a call to one of the builtin math functions doing integer
2718 conversion (lrint).
2719 Return 0 if a normal call should be emitted rather than expanding the
2720 function in-line. EXP is the expression that is a call to the builtin
2721 function; if convenient, the result should be placed in TARGET. */
2722
2723 static rtx
2724 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2725 {
2726 convert_optab builtin_optab;
2727 rtx op0, insns;
2728 tree fndecl = get_callee_fndecl (exp);
2729 tree arg;
2730 enum machine_mode mode;
2731
2732 /* There's no easy way to detect the case we need to set EDOM. */
2733 if (flag_errno_math)
2734 return NULL_RTX;
2735
2736 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2737 gcc_unreachable ();
2738
2739 arg = CALL_EXPR_ARG (exp, 0);
2740
2741 switch (DECL_FUNCTION_CODE (fndecl))
2742 {
2743 CASE_FLT_FN (BUILT_IN_LRINT):
2744 CASE_FLT_FN (BUILT_IN_LLRINT):
2745 builtin_optab = lrint_optab; break;
2746 CASE_FLT_FN (BUILT_IN_LROUND):
2747 CASE_FLT_FN (BUILT_IN_LLROUND):
2748 builtin_optab = lround_optab; break;
2749 default:
2750 gcc_unreachable ();
2751 }
2752
2753 /* Make a suitable register to place result in. */
2754 mode = TYPE_MODE (TREE_TYPE (exp));
2755
2756 target = gen_reg_rtx (mode);
2757
2758 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2759 need to expand the argument again. This way, we will not perform
2760 side-effects more the once. */
2761 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2762
2763 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2764
2765 start_sequence ();
2766
2767 if (expand_sfix_optab (target, op0, builtin_optab))
2768 {
2769 /* Output the entire sequence. */
2770 insns = get_insns ();
2771 end_sequence ();
2772 emit_insn (insns);
2773 return target;
2774 }
2775
2776 /* If we were unable to expand via the builtin, stop the sequence
2777 (without outputting the insns) and call to the library function
2778 with the stabilized argument list. */
2779 end_sequence ();
2780
2781 target = expand_call (exp, target, target == const0_rtx);
2782
2783 return target;
2784 }
2785
2786 /* To evaluate powi(x,n), the floating point value x raised to the
2787 constant integer exponent n, we use a hybrid algorithm that
2788 combines the "window method" with look-up tables. For an
2789 introduction to exponentiation algorithms and "addition chains",
2790 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2791 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2792 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2793 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2794
2795 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2796 multiplications to inline before calling the system library's pow
2797 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2798 so this default never requires calling pow, powf or powl. */
2799
2800 #ifndef POWI_MAX_MULTS
2801 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2802 #endif
2803
2804 /* The size of the "optimal power tree" lookup table. All
2805 exponents less than this value are simply looked up in the
2806 powi_table below. This threshold is also used to size the
2807 cache of pseudo registers that hold intermediate results. */
2808 #define POWI_TABLE_SIZE 256
2809
2810 /* The size, in bits of the window, used in the "window method"
2811 exponentiation algorithm. This is equivalent to a radix of
2812 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2813 #define POWI_WINDOW_SIZE 3
2814
2815 /* The following table is an efficient representation of an
2816 "optimal power tree". For each value, i, the corresponding
2817 value, j, in the table states than an optimal evaluation
2818 sequence for calculating pow(x,i) can be found by evaluating
2819 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2820 100 integers is given in Knuth's "Seminumerical algorithms". */
2821
2822 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2823 {
2824 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2825 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2826 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2827 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2828 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2829 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2830 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2831 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2832 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2833 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2834 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2835 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2836 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2837 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2838 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2839 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2840 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2841 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2842 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2843 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2844 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2845 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2846 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2847 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2848 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2849 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2850 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2851 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2852 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2853 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2854 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2855 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2856 };
2857
2858
2859 /* Return the number of multiplications required to calculate
2860 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2861 subroutine of powi_cost. CACHE is an array indicating
2862 which exponents have already been calculated. */
2863
2864 static int
2865 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2866 {
2867 /* If we've already calculated this exponent, then this evaluation
2868 doesn't require any additional multiplications. */
2869 if (cache[n])
2870 return 0;
2871
2872 cache[n] = true;
2873 return powi_lookup_cost (n - powi_table[n], cache)
2874 + powi_lookup_cost (powi_table[n], cache) + 1;
2875 }
2876
2877 /* Return the number of multiplications required to calculate
2878 powi(x,n) for an arbitrary x, given the exponent N. This
2879 function needs to be kept in sync with expand_powi below. */
2880
2881 static int
2882 powi_cost (HOST_WIDE_INT n)
2883 {
2884 bool cache[POWI_TABLE_SIZE];
2885 unsigned HOST_WIDE_INT digit;
2886 unsigned HOST_WIDE_INT val;
2887 int result;
2888
2889 if (n == 0)
2890 return 0;
2891
2892 /* Ignore the reciprocal when calculating the cost. */
2893 val = (n < 0) ? -n : n;
2894
2895 /* Initialize the exponent cache. */
2896 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2897 cache[1] = true;
2898
2899 result = 0;
2900
2901 while (val >= POWI_TABLE_SIZE)
2902 {
2903 if (val & 1)
2904 {
2905 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2906 result += powi_lookup_cost (digit, cache)
2907 + POWI_WINDOW_SIZE + 1;
2908 val >>= POWI_WINDOW_SIZE;
2909 }
2910 else
2911 {
2912 val >>= 1;
2913 result++;
2914 }
2915 }
2916
2917 return result + powi_lookup_cost (val, cache);
2918 }
2919
2920 /* Recursive subroutine of expand_powi. This function takes the array,
2921 CACHE, of already calculated exponents and an exponent N and returns
2922 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2923
2924 static rtx
2925 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2926 {
2927 unsigned HOST_WIDE_INT digit;
2928 rtx target, result;
2929 rtx op0, op1;
2930
2931 if (n < POWI_TABLE_SIZE)
2932 {
2933 if (cache[n])
2934 return cache[n];
2935
2936 target = gen_reg_rtx (mode);
2937 cache[n] = target;
2938
2939 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2940 op1 = expand_powi_1 (mode, powi_table[n], cache);
2941 }
2942 else if (n & 1)
2943 {
2944 target = gen_reg_rtx (mode);
2945 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2946 op0 = expand_powi_1 (mode, n - digit, cache);
2947 op1 = expand_powi_1 (mode, digit, cache);
2948 }
2949 else
2950 {
2951 target = gen_reg_rtx (mode);
2952 op0 = expand_powi_1 (mode, n >> 1, cache);
2953 op1 = op0;
2954 }
2955
2956 result = expand_mult (mode, op0, op1, target, 0);
2957 if (result != target)
2958 emit_move_insn (target, result);
2959 return target;
2960 }
2961
2962 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2963 floating point operand in mode MODE, and N is the exponent. This
2964 function needs to be kept in sync with powi_cost above. */
2965
2966 static rtx
2967 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2968 {
2969 rtx cache[POWI_TABLE_SIZE];
2970 rtx result;
2971
2972 if (n == 0)
2973 return CONST1_RTX (mode);
2974
2975 memset (cache, 0, sizeof (cache));
2976 cache[1] = x;
2977
2978 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2979
2980 /* If the original exponent was negative, reciprocate the result. */
2981 if (n < 0)
2982 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2983 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2984
2985 return result;
2986 }
2987
2988 /* Fold a builtin function call to pow, powf, or powl into a series of sqrts or
2989 cbrts. Return NULL_RTX if no simplification can be made or expand the tree
2990 if we can simplify it. */
2991 static rtx
2992 expand_builtin_pow_root (location_t loc, tree arg0, tree arg1, tree type,
2993 rtx subtarget)
2994 {
2995 if (TREE_CODE (arg1) == REAL_CST
2996 && !TREE_OVERFLOW (arg1)
2997 && flag_unsafe_math_optimizations)
2998 {
2999 enum machine_mode mode = TYPE_MODE (type);
3000 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
3001 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
3002 REAL_VALUE_TYPE c = TREE_REAL_CST (arg1);
3003 tree op = NULL_TREE;
3004
3005 if (sqrtfn)
3006 {
3007 /* Optimize pow (x, 0.5) into sqrt. */
3008 if (REAL_VALUES_EQUAL (c, dconsthalf))
3009 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3010
3011 else
3012 {
3013 REAL_VALUE_TYPE dconst1_4 = dconst1;
3014 REAL_VALUE_TYPE dconst3_4;
3015 SET_REAL_EXP (&dconst1_4, REAL_EXP (&dconst1_4) - 2);
3016
3017 real_from_integer (&dconst3_4, VOIDmode, 3, 0, 0);
3018 SET_REAL_EXP (&dconst3_4, REAL_EXP (&dconst3_4) - 2);
3019
3020 /* Optimize pow (x, 0.25) into sqrt (sqrt (x)). Assume on most
3021 machines that a builtin sqrt instruction is smaller than a
3022 call to pow with 0.25, so do this optimization even if
3023 -Os. */
3024 if (REAL_VALUES_EQUAL (c, dconst1_4))
3025 {
3026 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3027 op = build_call_nofold_loc (loc, sqrtfn, 1, op);
3028 }
3029
3030 /* Optimize pow (x, 0.75) = sqrt (x) * sqrt (sqrt (x)) unless we
3031 are optimizing for space. */
3032 else if (optimize_insn_for_speed_p ()
3033 && !TREE_SIDE_EFFECTS (arg0)
3034 && REAL_VALUES_EQUAL (c, dconst3_4))
3035 {
3036 tree sqrt1 = build_call_expr_loc (loc, sqrtfn, 1, arg0);
3037 tree sqrt2 = builtin_save_expr (sqrt1);
3038 tree sqrt3 = build_call_expr_loc (loc, sqrtfn, 1, sqrt1);
3039 op = fold_build2_loc (loc, MULT_EXPR, type, sqrt2, sqrt3);
3040 }
3041 }
3042 }
3043
3044 /* Check whether we can do cbrt insstead of pow (x, 1./3.) and
3045 cbrt/sqrts instead of pow (x, 1./6.). */
3046 if (cbrtfn && ! op
3047 && (tree_expr_nonnegative_p (arg0) || !HONOR_NANS (mode)))
3048 {
3049 /* First try 1/3. */
3050 REAL_VALUE_TYPE dconst1_3
3051 = real_value_truncate (mode, dconst_third ());
3052
3053 if (REAL_VALUES_EQUAL (c, dconst1_3))
3054 op = build_call_nofold_loc (loc, cbrtfn, 1, arg0);
3055
3056 /* Now try 1/6. */
3057 else if (optimize_insn_for_speed_p ())
3058 {
3059 REAL_VALUE_TYPE dconst1_6 = dconst1_3;
3060 SET_REAL_EXP (&dconst1_6, REAL_EXP (&dconst1_6) - 1);
3061
3062 if (REAL_VALUES_EQUAL (c, dconst1_6))
3063 {
3064 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3065 op = build_call_nofold_loc (loc, cbrtfn, 1, op);
3066 }
3067 }
3068 }
3069
3070 if (op)
3071 return expand_expr (op, subtarget, mode, EXPAND_NORMAL);
3072 }
3073
3074 return NULL_RTX;
3075 }
3076
3077 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
3078 a normal call should be emitted rather than expanding the function
3079 in-line. EXP is the expression that is a call to the builtin
3080 function; if convenient, the result should be placed in TARGET. */
3081
3082 static rtx
3083 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
3084 {
3085 tree arg0, arg1;
3086 tree fn, narg0;
3087 tree type = TREE_TYPE (exp);
3088 REAL_VALUE_TYPE cint, c, c2;
3089 HOST_WIDE_INT n;
3090 rtx op, op2;
3091 enum machine_mode mode = TYPE_MODE (type);
3092
3093 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
3094 return NULL_RTX;
3095
3096 arg0 = CALL_EXPR_ARG (exp, 0);
3097 arg1 = CALL_EXPR_ARG (exp, 1);
3098
3099 if (TREE_CODE (arg1) != REAL_CST
3100 || TREE_OVERFLOW (arg1))
3101 return expand_builtin_mathfn_2 (exp, target, subtarget);
3102
3103 /* Handle constant exponents. */
3104
3105 /* For integer valued exponents we can expand to an optimal multiplication
3106 sequence using expand_powi. */
3107 c = TREE_REAL_CST (arg1);
3108 n = real_to_integer (&c);
3109 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3110 if (real_identical (&c, &cint)
3111 && ((n >= -1 && n <= 2)
3112 || (flag_unsafe_math_optimizations
3113 && optimize_insn_for_speed_p ()
3114 && powi_cost (n) <= POWI_MAX_MULTS)))
3115 {
3116 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3117 if (n != 1)
3118 {
3119 op = force_reg (mode, op);
3120 op = expand_powi (op, mode, n);
3121 }
3122 return op;
3123 }
3124
3125 narg0 = builtin_save_expr (arg0);
3126
3127 /* If the exponent is not integer valued, check if it is half of an integer.
3128 In this case we can expand to sqrt (x) * x**(n/2). */
3129 fn = mathfn_built_in (type, BUILT_IN_SQRT);
3130 if (fn != NULL_TREE)
3131 {
3132 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
3133 n = real_to_integer (&c2);
3134 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3135 if (real_identical (&c2, &cint)
3136 && ((flag_unsafe_math_optimizations
3137 && optimize_insn_for_speed_p ()
3138 && powi_cost (n/2) <= POWI_MAX_MULTS)
3139 /* Even the c == 0.5 case cannot be done unconditionally
3140 when we need to preserve signed zeros, as
3141 pow (-0, 0.5) is +0, while sqrt(-0) is -0. */
3142 || (!HONOR_SIGNED_ZEROS (mode) && n == 1)
3143 /* For c == 1.5 we can assume that x * sqrt (x) is always
3144 smaller than pow (x, 1.5) if sqrt will not be expanded
3145 as a call. */
3146 || (n == 3
3147 && optab_handler (sqrt_optab, mode) != CODE_FOR_nothing)))
3148 {
3149 tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
3150 narg0);
3151 /* Use expand_expr in case the newly built call expression
3152 was folded to a non-call. */
3153 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3154 if (n != 1)
3155 {
3156 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3157 op2 = force_reg (mode, op2);
3158 op2 = expand_powi (op2, mode, abs (n / 2));
3159 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3160 0, OPTAB_LIB_WIDEN);
3161 /* If the original exponent was negative, reciprocate the
3162 result. */
3163 if (n < 0)
3164 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3165 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3166 }
3167 return op;
3168 }
3169 }
3170
3171 /* Check whether we can do a series of sqrt or cbrt's instead of the pow
3172 call. */
3173 op = expand_builtin_pow_root (EXPR_LOCATION (exp), arg0, arg1, type,
3174 subtarget);
3175 if (op)
3176 return op;
3177
3178 /* Try if the exponent is a third of an integer. In this case
3179 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3180 different from pow (x, 1./3.) due to rounding and behavior
3181 with negative x we need to constrain this transformation to
3182 unsafe math and positive x or finite math. */
3183 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3184 if (fn != NULL_TREE
3185 && flag_unsafe_math_optimizations
3186 && (tree_expr_nonnegative_p (arg0)
3187 || !HONOR_NANS (mode)))
3188 {
3189 REAL_VALUE_TYPE dconst3;
3190 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3191 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3192 real_round (&c2, mode, &c2);
3193 n = real_to_integer (&c2);
3194 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3195 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3196 real_convert (&c2, mode, &c2);
3197 if (real_identical (&c2, &c)
3198 && ((optimize_insn_for_speed_p ()
3199 && powi_cost (n/3) <= POWI_MAX_MULTS)
3200 || n == 1))
3201 {
3202 tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
3203 narg0);
3204 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3205 if (abs (n) % 3 == 2)
3206 op = expand_simple_binop (mode, MULT, op, op, op,
3207 0, OPTAB_LIB_WIDEN);
3208 if (n != 1)
3209 {
3210 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3211 op2 = force_reg (mode, op2);
3212 op2 = expand_powi (op2, mode, abs (n / 3));
3213 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3214 0, OPTAB_LIB_WIDEN);
3215 /* If the original exponent was negative, reciprocate the
3216 result. */
3217 if (n < 0)
3218 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3219 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3220 }
3221 return op;
3222 }
3223 }
3224
3225 /* Fall back to optab expansion. */
3226 return expand_builtin_mathfn_2 (exp, target, subtarget);
3227 }
3228
3229 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3230 a normal call should be emitted rather than expanding the function
3231 in-line. EXP is the expression that is a call to the builtin
3232 function; if convenient, the result should be placed in TARGET. */
3233
3234 static rtx
3235 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3236 {
3237 tree arg0, arg1;
3238 rtx op0, op1;
3239 enum machine_mode mode;
3240 enum machine_mode mode2;
3241
3242 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3243 return NULL_RTX;
3244
3245 arg0 = CALL_EXPR_ARG (exp, 0);
3246 arg1 = CALL_EXPR_ARG (exp, 1);
3247 mode = TYPE_MODE (TREE_TYPE (exp));
3248
3249 /* Handle constant power. */
3250
3251 if (TREE_CODE (arg1) == INTEGER_CST
3252 && !TREE_OVERFLOW (arg1))
3253 {
3254 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3255
3256 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3257 Otherwise, check the number of multiplications required. */
3258 if ((TREE_INT_CST_HIGH (arg1) == 0
3259 || TREE_INT_CST_HIGH (arg1) == -1)
3260 && ((n >= -1 && n <= 2)
3261 || (optimize_insn_for_speed_p ()
3262 && powi_cost (n) <= POWI_MAX_MULTS)))
3263 {
3264 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3265 op0 = force_reg (mode, op0);
3266 return expand_powi (op0, mode, n);
3267 }
3268 }
3269
3270 /* Emit a libcall to libgcc. */
3271
3272 /* Mode of the 2nd argument must match that of an int. */
3273 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3274
3275 if (target == NULL_RTX)
3276 target = gen_reg_rtx (mode);
3277
3278 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3279 if (GET_MODE (op0) != mode)
3280 op0 = convert_to_mode (mode, op0, 0);
3281 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3282 if (GET_MODE (op1) != mode2)
3283 op1 = convert_to_mode (mode2, op1, 0);
3284
3285 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3286 target, LCT_CONST, mode, 2,
3287 op0, mode, op1, mode2);
3288
3289 return target;
3290 }
3291
3292 /* Expand expression EXP which is a call to the strlen builtin. Return
3293 NULL_RTX if we failed the caller should emit a normal call, otherwise
3294 try to get the result in TARGET, if convenient. */
3295
3296 static rtx
3297 expand_builtin_strlen (tree exp, rtx target,
3298 enum machine_mode target_mode)
3299 {
3300 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3301 return NULL_RTX;
3302 else
3303 {
3304 rtx pat;
3305 tree len;
3306 tree src = CALL_EXPR_ARG (exp, 0);
3307 rtx result, src_reg, char_rtx, before_strlen;
3308 enum machine_mode insn_mode = target_mode, char_mode;
3309 enum insn_code icode = CODE_FOR_nothing;
3310 unsigned int align;
3311
3312 /* If the length can be computed at compile-time, return it. */
3313 len = c_strlen (src, 0);
3314 if (len)
3315 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3316
3317 /* If the length can be computed at compile-time and is constant
3318 integer, but there are side-effects in src, evaluate
3319 src for side-effects, then return len.
3320 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3321 can be optimized into: i++; x = 3; */
3322 len = c_strlen (src, 1);
3323 if (len && TREE_CODE (len) == INTEGER_CST)
3324 {
3325 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3326 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3327 }
3328
3329 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3330
3331 /* If SRC is not a pointer type, don't do this operation inline. */
3332 if (align == 0)
3333 return NULL_RTX;
3334
3335 /* Bail out if we can't compute strlen in the right mode. */
3336 while (insn_mode != VOIDmode)
3337 {
3338 icode = optab_handler (strlen_optab, insn_mode);
3339 if (icode != CODE_FOR_nothing)
3340 break;
3341
3342 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3343 }
3344 if (insn_mode == VOIDmode)
3345 return NULL_RTX;
3346
3347 /* Make a place to write the result of the instruction. */
3348 result = target;
3349 if (! (result != 0
3350 && REG_P (result)
3351 && GET_MODE (result) == insn_mode
3352 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3353 result = gen_reg_rtx (insn_mode);
3354
3355 /* Make a place to hold the source address. We will not expand
3356 the actual source until we are sure that the expansion will
3357 not fail -- there are trees that cannot be expanded twice. */
3358 src_reg = gen_reg_rtx (Pmode);
3359
3360 /* Mark the beginning of the strlen sequence so we can emit the
3361 source operand later. */
3362 before_strlen = get_last_insn ();
3363
3364 char_rtx = const0_rtx;
3365 char_mode = insn_data[(int) icode].operand[2].mode;
3366 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3367 char_mode))
3368 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3369
3370 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3371 char_rtx, GEN_INT (align));
3372 if (! pat)
3373 return NULL_RTX;
3374 emit_insn (pat);
3375
3376 /* Now that we are assured of success, expand the source. */
3377 start_sequence ();
3378 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3379 if (pat != src_reg)
3380 emit_move_insn (src_reg, pat);
3381 pat = get_insns ();
3382 end_sequence ();
3383
3384 if (before_strlen)
3385 emit_insn_after (pat, before_strlen);
3386 else
3387 emit_insn_before (pat, get_insns ());
3388
3389 /* Return the value in the proper mode for this function. */
3390 if (GET_MODE (result) == target_mode)
3391 target = result;
3392 else if (target != 0)
3393 convert_move (target, result, 0);
3394 else
3395 target = convert_to_mode (target_mode, result, 0);
3396
3397 return target;
3398 }
3399 }
3400
3401 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3402 bytes from constant string DATA + OFFSET and return it as target
3403 constant. */
3404
3405 static rtx
3406 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3407 enum machine_mode mode)
3408 {
3409 const char *str = (const char *) data;
3410
3411 gcc_assert (offset >= 0
3412 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3413 <= strlen (str) + 1));
3414
3415 return c_readstr (str + offset, mode);
3416 }
3417
3418 /* Expand a call EXP to the memcpy builtin.
3419 Return NULL_RTX if we failed, the caller should emit a normal call,
3420 otherwise try to get the result in TARGET, if convenient (and in
3421 mode MODE if that's convenient). */
3422
3423 static rtx
3424 expand_builtin_memcpy (tree exp, rtx target)
3425 {
3426 if (!validate_arglist (exp,
3427 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3428 return NULL_RTX;
3429 else
3430 {
3431 tree dest = CALL_EXPR_ARG (exp, 0);
3432 tree src = CALL_EXPR_ARG (exp, 1);
3433 tree len = CALL_EXPR_ARG (exp, 2);
3434 const char *src_str;
3435 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3436 unsigned int dest_align
3437 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3438 rtx dest_mem, src_mem, dest_addr, len_rtx;
3439 HOST_WIDE_INT expected_size = -1;
3440 unsigned int expected_align = 0;
3441
3442 /* If DEST is not a pointer type, call the normal function. */
3443 if (dest_align == 0)
3444 return NULL_RTX;
3445
3446 /* If either SRC is not a pointer type, don't do this
3447 operation in-line. */
3448 if (src_align == 0)
3449 return NULL_RTX;
3450
3451 if (currently_expanding_gimple_stmt)
3452 stringop_block_profile (currently_expanding_gimple_stmt,
3453 &expected_align, &expected_size);
3454
3455 if (expected_align < dest_align)
3456 expected_align = dest_align;
3457 dest_mem = get_memory_rtx (dest, len);
3458 set_mem_align (dest_mem, dest_align);
3459 len_rtx = expand_normal (len);
3460 src_str = c_getstr (src);
3461
3462 /* If SRC is a string constant and block move would be done
3463 by pieces, we can avoid loading the string from memory
3464 and only stored the computed constants. */
3465 if (src_str
3466 && CONST_INT_P (len_rtx)
3467 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3468 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3469 CONST_CAST (char *, src_str),
3470 dest_align, false))
3471 {
3472 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3473 builtin_memcpy_read_str,
3474 CONST_CAST (char *, src_str),
3475 dest_align, false, 0);
3476 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3477 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3478 return dest_mem;
3479 }
3480
3481 src_mem = get_memory_rtx (src, len);
3482 set_mem_align (src_mem, src_align);
3483
3484 /* Copy word part most expediently. */
3485 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3486 CALL_EXPR_TAILCALL (exp)
3487 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3488 expected_align, expected_size);
3489
3490 if (dest_addr == 0)
3491 {
3492 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3493 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3494 }
3495 return dest_addr;
3496 }
3497 }
3498
3499 /* Expand a call EXP to the mempcpy builtin.
3500 Return NULL_RTX if we failed; the caller should emit a normal call,
3501 otherwise try to get the result in TARGET, if convenient (and in
3502 mode MODE if that's convenient). If ENDP is 0 return the
3503 destination pointer, if ENDP is 1 return the end pointer ala
3504 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3505 stpcpy. */
3506
3507 static rtx
3508 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3509 {
3510 if (!validate_arglist (exp,
3511 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3512 return NULL_RTX;
3513 else
3514 {
3515 tree dest = CALL_EXPR_ARG (exp, 0);
3516 tree src = CALL_EXPR_ARG (exp, 1);
3517 tree len = CALL_EXPR_ARG (exp, 2);
3518 return expand_builtin_mempcpy_args (dest, src, len,
3519 target, mode, /*endp=*/ 1);
3520 }
3521 }
3522
3523 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3524 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3525 so that this can also be called without constructing an actual CALL_EXPR.
3526 The other arguments and return value are the same as for
3527 expand_builtin_mempcpy. */
3528
3529 static rtx
3530 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3531 rtx target, enum machine_mode mode, int endp)
3532 {
3533 /* If return value is ignored, transform mempcpy into memcpy. */
3534 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3535 {
3536 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3537 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3538 dest, src, len);
3539 return expand_expr (result, target, mode, EXPAND_NORMAL);
3540 }
3541 else
3542 {
3543 const char *src_str;
3544 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3545 unsigned int dest_align
3546 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3547 rtx dest_mem, src_mem, len_rtx;
3548
3549 /* If either SRC or DEST is not a pointer type, don't do this
3550 operation in-line. */
3551 if (dest_align == 0 || src_align == 0)
3552 return NULL_RTX;
3553
3554 /* If LEN is not constant, call the normal function. */
3555 if (! host_integerp (len, 1))
3556 return NULL_RTX;
3557
3558 len_rtx = expand_normal (len);
3559 src_str = c_getstr (src);
3560
3561 /* If SRC is a string constant and block move would be done
3562 by pieces, we can avoid loading the string from memory
3563 and only stored the computed constants. */
3564 if (src_str
3565 && CONST_INT_P (len_rtx)
3566 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3567 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3568 CONST_CAST (char *, src_str),
3569 dest_align, false))
3570 {
3571 dest_mem = get_memory_rtx (dest, len);
3572 set_mem_align (dest_mem, dest_align);
3573 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3574 builtin_memcpy_read_str,
3575 CONST_CAST (char *, src_str),
3576 dest_align, false, endp);
3577 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3578 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3579 return dest_mem;
3580 }
3581
3582 if (CONST_INT_P (len_rtx)
3583 && can_move_by_pieces (INTVAL (len_rtx),
3584 MIN (dest_align, src_align)))
3585 {
3586 dest_mem = get_memory_rtx (dest, len);
3587 set_mem_align (dest_mem, dest_align);
3588 src_mem = get_memory_rtx (src, len);
3589 set_mem_align (src_mem, src_align);
3590 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3591 MIN (dest_align, src_align), endp);
3592 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3593 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3594 return dest_mem;
3595 }
3596
3597 return NULL_RTX;
3598 }
3599 }
3600
3601 #ifndef HAVE_movstr
3602 # define HAVE_movstr 0
3603 # define CODE_FOR_movstr CODE_FOR_nothing
3604 #endif
3605
3606 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3607 we failed, the caller should emit a normal call, otherwise try to
3608 get the result in TARGET, if convenient. If ENDP is 0 return the
3609 destination pointer, if ENDP is 1 return the end pointer ala
3610 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3611 stpcpy. */
3612
3613 static rtx
3614 expand_movstr (tree dest, tree src, rtx target, int endp)
3615 {
3616 rtx end;
3617 rtx dest_mem;
3618 rtx src_mem;
3619 rtx insn;
3620 const struct insn_data_d * data;
3621
3622 if (!HAVE_movstr)
3623 return NULL_RTX;
3624
3625 dest_mem = get_memory_rtx (dest, NULL);
3626 src_mem = get_memory_rtx (src, NULL);
3627 data = insn_data + CODE_FOR_movstr;
3628 if (!endp)
3629 {
3630 target = force_reg (Pmode, XEXP (dest_mem, 0));
3631 dest_mem = replace_equiv_address (dest_mem, target);
3632 end = gen_reg_rtx (Pmode);
3633 }
3634 else
3635 {
3636 if (target == 0
3637 || target == const0_rtx
3638 || ! (*data->operand[0].predicate) (target, Pmode))
3639 {
3640 end = gen_reg_rtx (Pmode);
3641 if (target != const0_rtx)
3642 target = end;
3643 }
3644 else
3645 end = target;
3646 }
3647
3648 if (data->operand[0].mode != VOIDmode)
3649 end = gen_lowpart (data->operand[0].mode, end);
3650
3651 insn = data->genfun (end, dest_mem, src_mem);
3652
3653 gcc_assert (insn);
3654
3655 emit_insn (insn);
3656
3657 /* movstr is supposed to set end to the address of the NUL
3658 terminator. If the caller requested a mempcpy-like return value,
3659 adjust it. */
3660 if (endp == 1 && target != const0_rtx)
3661 {
3662 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3663 emit_move_insn (target, force_operand (tem, NULL_RTX));
3664 }
3665
3666 return target;
3667 }
3668
3669 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3670 NULL_RTX if we failed the caller should emit a normal call, otherwise
3671 try to get the result in TARGET, if convenient (and in mode MODE if that's
3672 convenient). */
3673
3674 static rtx
3675 expand_builtin_strcpy (tree exp, rtx target)
3676 {
3677 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3678 {
3679 tree dest = CALL_EXPR_ARG (exp, 0);
3680 tree src = CALL_EXPR_ARG (exp, 1);
3681 return expand_builtin_strcpy_args (dest, src, target);
3682 }
3683 return NULL_RTX;
3684 }
3685
3686 /* Helper function to do the actual work for expand_builtin_strcpy. The
3687 arguments to the builtin_strcpy call DEST and SRC are broken out
3688 so that this can also be called without constructing an actual CALL_EXPR.
3689 The other arguments and return value are the same as for
3690 expand_builtin_strcpy. */
3691
3692 static rtx
3693 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3694 {
3695 return expand_movstr (dest, src, target, /*endp=*/0);
3696 }
3697
3698 /* Expand a call EXP to the stpcpy builtin.
3699 Return NULL_RTX if we failed the caller should emit a normal call,
3700 otherwise try to get the result in TARGET, if convenient (and in
3701 mode MODE if that's convenient). */
3702
3703 static rtx
3704 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3705 {
3706 tree dst, src;
3707 location_t loc = EXPR_LOCATION (exp);
3708
3709 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3710 return NULL_RTX;
3711
3712 dst = CALL_EXPR_ARG (exp, 0);
3713 src = CALL_EXPR_ARG (exp, 1);
3714
3715 /* If return value is ignored, transform stpcpy into strcpy. */
3716 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3717 {
3718 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3719 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3720 return expand_expr (result, target, mode, EXPAND_NORMAL);
3721 }
3722 else
3723 {
3724 tree len, lenp1;
3725 rtx ret;
3726
3727 /* Ensure we get an actual string whose length can be evaluated at
3728 compile-time, not an expression containing a string. This is
3729 because the latter will potentially produce pessimized code
3730 when used to produce the return value. */
3731 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3732 return expand_movstr (dst, src, target, /*endp=*/2);
3733
3734 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3735 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3736 target, mode, /*endp=*/2);
3737
3738 if (ret)
3739 return ret;
3740
3741 if (TREE_CODE (len) == INTEGER_CST)
3742 {
3743 rtx len_rtx = expand_normal (len);
3744
3745 if (CONST_INT_P (len_rtx))
3746 {
3747 ret = expand_builtin_strcpy_args (dst, src, target);
3748
3749 if (ret)
3750 {
3751 if (! target)
3752 {
3753 if (mode != VOIDmode)
3754 target = gen_reg_rtx (mode);
3755 else
3756 target = gen_reg_rtx (GET_MODE (ret));
3757 }
3758 if (GET_MODE (target) != GET_MODE (ret))
3759 ret = gen_lowpart (GET_MODE (target), ret);
3760
3761 ret = plus_constant (ret, INTVAL (len_rtx));
3762 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3763 gcc_assert (ret);
3764
3765 return target;
3766 }
3767 }
3768 }
3769
3770 return expand_movstr (dst, src, target, /*endp=*/2);
3771 }
3772 }
3773
3774 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3775 bytes from constant string DATA + OFFSET and return it as target
3776 constant. */
3777
3778 rtx
3779 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3780 enum machine_mode mode)
3781 {
3782 const char *str = (const char *) data;
3783
3784 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3785 return const0_rtx;
3786
3787 return c_readstr (str + offset, mode);
3788 }
3789
3790 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3791 NULL_RTX if we failed the caller should emit a normal call. */
3792
3793 static rtx
3794 expand_builtin_strncpy (tree exp, rtx target)
3795 {
3796 location_t loc = EXPR_LOCATION (exp);
3797
3798 if (validate_arglist (exp,
3799 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3800 {
3801 tree dest = CALL_EXPR_ARG (exp, 0);
3802 tree src = CALL_EXPR_ARG (exp, 1);
3803 tree len = CALL_EXPR_ARG (exp, 2);
3804 tree slen = c_strlen (src, 1);
3805
3806 /* We must be passed a constant len and src parameter. */
3807 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3808 return NULL_RTX;
3809
3810 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3811
3812 /* We're required to pad with trailing zeros if the requested
3813 len is greater than strlen(s2)+1. In that case try to
3814 use store_by_pieces, if it fails, punt. */
3815 if (tree_int_cst_lt (slen, len))
3816 {
3817 unsigned int dest_align
3818 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3819 const char *p = c_getstr (src);
3820 rtx dest_mem;
3821
3822 if (!p || dest_align == 0 || !host_integerp (len, 1)
3823 || !can_store_by_pieces (tree_low_cst (len, 1),
3824 builtin_strncpy_read_str,
3825 CONST_CAST (char *, p),
3826 dest_align, false))
3827 return NULL_RTX;
3828
3829 dest_mem = get_memory_rtx (dest, len);
3830 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3831 builtin_strncpy_read_str,
3832 CONST_CAST (char *, p), dest_align, false, 0);
3833 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3834 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3835 return dest_mem;
3836 }
3837 }
3838 return NULL_RTX;
3839 }
3840
3841 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3842 bytes from constant string DATA + OFFSET and return it as target
3843 constant. */
3844
3845 rtx
3846 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3847 enum machine_mode mode)
3848 {
3849 const char *c = (const char *) data;
3850 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3851
3852 memset (p, *c, GET_MODE_SIZE (mode));
3853
3854 return c_readstr (p, mode);
3855 }
3856
3857 /* Callback routine for store_by_pieces. Return the RTL of a register
3858 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3859 char value given in the RTL register data. For example, if mode is
3860 4 bytes wide, return the RTL for 0x01010101*data. */
3861
3862 static rtx
3863 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3864 enum machine_mode mode)
3865 {
3866 rtx target, coeff;
3867 size_t size;
3868 char *p;
3869
3870 size = GET_MODE_SIZE (mode);
3871 if (size == 1)
3872 return (rtx) data;
3873
3874 p = XALLOCAVEC (char, size);
3875 memset (p, 1, size);
3876 coeff = c_readstr (p, mode);
3877
3878 target = convert_to_mode (mode, (rtx) data, 1);
3879 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3880 return force_reg (mode, target);
3881 }
3882
3883 /* Expand expression EXP, which is a call to the memset builtin. Return
3884 NULL_RTX if we failed the caller should emit a normal call, otherwise
3885 try to get the result in TARGET, if convenient (and in mode MODE if that's
3886 convenient). */
3887
3888 static rtx
3889 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3890 {
3891 if (!validate_arglist (exp,
3892 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3893 return NULL_RTX;
3894 else
3895 {
3896 tree dest = CALL_EXPR_ARG (exp, 0);
3897 tree val = CALL_EXPR_ARG (exp, 1);
3898 tree len = CALL_EXPR_ARG (exp, 2);
3899 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3900 }
3901 }
3902
3903 /* Helper function to do the actual work for expand_builtin_memset. The
3904 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3905 so that this can also be called without constructing an actual CALL_EXPR.
3906 The other arguments and return value are the same as for
3907 expand_builtin_memset. */
3908
3909 static rtx
3910 expand_builtin_memset_args (tree dest, tree val, tree len,
3911 rtx target, enum machine_mode mode, tree orig_exp)
3912 {
3913 tree fndecl, fn;
3914 enum built_in_function fcode;
3915 char c;
3916 unsigned int dest_align;
3917 rtx dest_mem, dest_addr, len_rtx;
3918 HOST_WIDE_INT expected_size = -1;
3919 unsigned int expected_align = 0;
3920
3921 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3922
3923 /* If DEST is not a pointer type, don't do this operation in-line. */
3924 if (dest_align == 0)
3925 return NULL_RTX;
3926
3927 if (currently_expanding_gimple_stmt)
3928 stringop_block_profile (currently_expanding_gimple_stmt,
3929 &expected_align, &expected_size);
3930
3931 if (expected_align < dest_align)
3932 expected_align = dest_align;
3933
3934 /* If the LEN parameter is zero, return DEST. */
3935 if (integer_zerop (len))
3936 {
3937 /* Evaluate and ignore VAL in case it has side-effects. */
3938 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3939 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3940 }
3941
3942 /* Stabilize the arguments in case we fail. */
3943 dest = builtin_save_expr (dest);
3944 val = builtin_save_expr (val);
3945 len = builtin_save_expr (len);
3946
3947 len_rtx = expand_normal (len);
3948 dest_mem = get_memory_rtx (dest, len);
3949
3950 if (TREE_CODE (val) != INTEGER_CST)
3951 {
3952 rtx val_rtx;
3953
3954 val_rtx = expand_normal (val);
3955 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3956 val_rtx, 0);
3957
3958 /* Assume that we can memset by pieces if we can store
3959 * the coefficients by pieces (in the required modes).
3960 * We can't pass builtin_memset_gen_str as that emits RTL. */
3961 c = 1;
3962 if (host_integerp (len, 1)
3963 && can_store_by_pieces (tree_low_cst (len, 1),
3964 builtin_memset_read_str, &c, dest_align,
3965 true))
3966 {
3967 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3968 val_rtx);
3969 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3970 builtin_memset_gen_str, val_rtx, dest_align,
3971 true, 0);
3972 }
3973 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3974 dest_align, expected_align,
3975 expected_size))
3976 goto do_libcall;
3977
3978 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3979 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3980 return dest_mem;
3981 }
3982
3983 if (target_char_cast (val, &c))
3984 goto do_libcall;
3985
3986 if (c)
3987 {
3988 if (host_integerp (len, 1)
3989 && can_store_by_pieces (tree_low_cst (len, 1),
3990 builtin_memset_read_str, &c, dest_align,
3991 true))
3992 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3993 builtin_memset_read_str, &c, dest_align, true, 0);
3994 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3995 dest_align, expected_align,
3996 expected_size))
3997 goto do_libcall;
3998
3999 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4000 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4001 return dest_mem;
4002 }
4003
4004 set_mem_align (dest_mem, dest_align);
4005 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4006 CALL_EXPR_TAILCALL (orig_exp)
4007 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4008 expected_align, expected_size);
4009
4010 if (dest_addr == 0)
4011 {
4012 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4013 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4014 }
4015
4016 return dest_addr;
4017
4018 do_libcall:
4019 fndecl = get_callee_fndecl (orig_exp);
4020 fcode = DECL_FUNCTION_CODE (fndecl);
4021 if (fcode == BUILT_IN_MEMSET)
4022 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4023 dest, val, len);
4024 else if (fcode == BUILT_IN_BZERO)
4025 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4026 dest, len);
4027 else
4028 gcc_unreachable ();
4029 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4030 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4031 return expand_call (fn, target, target == const0_rtx);
4032 }
4033
4034 /* Expand expression EXP, which is a call to the bzero builtin. Return
4035 NULL_RTX if we failed the caller should emit a normal call. */
4036
4037 static rtx
4038 expand_builtin_bzero (tree exp)
4039 {
4040 tree dest, size;
4041 location_t loc = EXPR_LOCATION (exp);
4042
4043 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4044 return NULL_RTX;
4045
4046 dest = CALL_EXPR_ARG (exp, 0);
4047 size = CALL_EXPR_ARG (exp, 1);
4048
4049 /* New argument list transforming bzero(ptr x, int y) to
4050 memset(ptr x, int 0, size_t y). This is done this way
4051 so that if it isn't expanded inline, we fallback to
4052 calling bzero instead of memset. */
4053
4054 return expand_builtin_memset_args (dest, integer_zero_node,
4055 fold_convert_loc (loc, sizetype, size),
4056 const0_rtx, VOIDmode, exp);
4057 }
4058
4059 /* Expand expression EXP, which is a call to the memcmp built-in function.
4060 Return NULL_RTX if we failed and the
4061 caller should emit a normal call, otherwise try to get the result in
4062 TARGET, if convenient (and in mode MODE, if that's convenient). */
4063
4064 static rtx
4065 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4066 ATTRIBUTE_UNUSED enum machine_mode mode)
4067 {
4068 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4069
4070 if (!validate_arglist (exp,
4071 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4072 return NULL_RTX;
4073
4074 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4075 {
4076 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4077 rtx result;
4078 rtx insn;
4079 tree arg1 = CALL_EXPR_ARG (exp, 0);
4080 tree arg2 = CALL_EXPR_ARG (exp, 1);
4081 tree len = CALL_EXPR_ARG (exp, 2);
4082
4083 unsigned int arg1_align
4084 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4085 unsigned int arg2_align
4086 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4087 enum machine_mode insn_mode;
4088
4089 #ifdef HAVE_cmpmemsi
4090 if (HAVE_cmpmemsi)
4091 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4092 else
4093 #endif
4094 #ifdef HAVE_cmpstrnsi
4095 if (HAVE_cmpstrnsi)
4096 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4097 else
4098 #endif
4099 return NULL_RTX;
4100
4101 /* If we don't have POINTER_TYPE, call the function. */
4102 if (arg1_align == 0 || arg2_align == 0)
4103 return NULL_RTX;
4104
4105 /* Make a place to write the result of the instruction. */
4106 result = target;
4107 if (! (result != 0
4108 && REG_P (result) && GET_MODE (result) == insn_mode
4109 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4110 result = gen_reg_rtx (insn_mode);
4111
4112 arg1_rtx = get_memory_rtx (arg1, len);
4113 arg2_rtx = get_memory_rtx (arg2, len);
4114 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4115
4116 /* Set MEM_SIZE as appropriate. */
4117 if (CONST_INT_P (arg3_rtx))
4118 {
4119 set_mem_size (arg1_rtx, arg3_rtx);
4120 set_mem_size (arg2_rtx, arg3_rtx);
4121 }
4122
4123 #ifdef HAVE_cmpmemsi
4124 if (HAVE_cmpmemsi)
4125 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4126 GEN_INT (MIN (arg1_align, arg2_align)));
4127 else
4128 #endif
4129 #ifdef HAVE_cmpstrnsi
4130 if (HAVE_cmpstrnsi)
4131 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4132 GEN_INT (MIN (arg1_align, arg2_align)));
4133 else
4134 #endif
4135 gcc_unreachable ();
4136
4137 if (insn)
4138 emit_insn (insn);
4139 else
4140 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4141 TYPE_MODE (integer_type_node), 3,
4142 XEXP (arg1_rtx, 0), Pmode,
4143 XEXP (arg2_rtx, 0), Pmode,
4144 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4145 TYPE_UNSIGNED (sizetype)),
4146 TYPE_MODE (sizetype));
4147
4148 /* Return the value in the proper mode for this function. */
4149 mode = TYPE_MODE (TREE_TYPE (exp));
4150 if (GET_MODE (result) == mode)
4151 return result;
4152 else if (target != 0)
4153 {
4154 convert_move (target, result, 0);
4155 return target;
4156 }
4157 else
4158 return convert_to_mode (mode, result, 0);
4159 }
4160 #endif
4161
4162 return NULL_RTX;
4163 }
4164
4165 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4166 if we failed the caller should emit a normal call, otherwise try to get
4167 the result in TARGET, if convenient. */
4168
4169 static rtx
4170 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4171 {
4172 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4173 return NULL_RTX;
4174
4175 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4176 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
4177 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
4178 {
4179 rtx arg1_rtx, arg2_rtx;
4180 rtx result, insn = NULL_RTX;
4181 tree fndecl, fn;
4182 tree arg1 = CALL_EXPR_ARG (exp, 0);
4183 tree arg2 = CALL_EXPR_ARG (exp, 1);
4184
4185 unsigned int arg1_align
4186 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4187 unsigned int arg2_align
4188 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4189
4190 /* If we don't have POINTER_TYPE, call the function. */
4191 if (arg1_align == 0 || arg2_align == 0)
4192 return NULL_RTX;
4193
4194 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4195 arg1 = builtin_save_expr (arg1);
4196 arg2 = builtin_save_expr (arg2);
4197
4198 arg1_rtx = get_memory_rtx (arg1, NULL);
4199 arg2_rtx = get_memory_rtx (arg2, NULL);
4200
4201 #ifdef HAVE_cmpstrsi
4202 /* Try to call cmpstrsi. */
4203 if (HAVE_cmpstrsi)
4204 {
4205 enum machine_mode insn_mode
4206 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4207
4208 /* Make a place to write the result of the instruction. */
4209 result = target;
4210 if (! (result != 0
4211 && REG_P (result) && GET_MODE (result) == insn_mode
4212 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4213 result = gen_reg_rtx (insn_mode);
4214
4215 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4216 GEN_INT (MIN (arg1_align, arg2_align)));
4217 }
4218 #endif
4219 #ifdef HAVE_cmpstrnsi
4220 /* Try to determine at least one length and call cmpstrnsi. */
4221 if (!insn && HAVE_cmpstrnsi)
4222 {
4223 tree len;
4224 rtx arg3_rtx;
4225
4226 enum machine_mode insn_mode
4227 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4228 tree len1 = c_strlen (arg1, 1);
4229 tree len2 = c_strlen (arg2, 1);
4230
4231 if (len1)
4232 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4233 if (len2)
4234 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4235
4236 /* If we don't have a constant length for the first, use the length
4237 of the second, if we know it. We don't require a constant for
4238 this case; some cost analysis could be done if both are available
4239 but neither is constant. For now, assume they're equally cheap,
4240 unless one has side effects. If both strings have constant lengths,
4241 use the smaller. */
4242
4243 if (!len1)
4244 len = len2;
4245 else if (!len2)
4246 len = len1;
4247 else if (TREE_SIDE_EFFECTS (len1))
4248 len = len2;
4249 else if (TREE_SIDE_EFFECTS (len2))
4250 len = len1;
4251 else if (TREE_CODE (len1) != INTEGER_CST)
4252 len = len2;
4253 else if (TREE_CODE (len2) != INTEGER_CST)
4254 len = len1;
4255 else if (tree_int_cst_lt (len1, len2))
4256 len = len1;
4257 else
4258 len = len2;
4259
4260 /* If both arguments have side effects, we cannot optimize. */
4261 if (!len || TREE_SIDE_EFFECTS (len))
4262 goto do_libcall;
4263
4264 arg3_rtx = expand_normal (len);
4265
4266 /* Make a place to write the result of the instruction. */
4267 result = target;
4268 if (! (result != 0
4269 && REG_P (result) && GET_MODE (result) == insn_mode
4270 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4271 result = gen_reg_rtx (insn_mode);
4272
4273 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4274 GEN_INT (MIN (arg1_align, arg2_align)));
4275 }
4276 #endif
4277
4278 if (insn)
4279 {
4280 enum machine_mode mode;
4281 emit_insn (insn);
4282
4283 /* Return the value in the proper mode for this function. */
4284 mode = TYPE_MODE (TREE_TYPE (exp));
4285 if (GET_MODE (result) == mode)
4286 return result;
4287 if (target == 0)
4288 return convert_to_mode (mode, result, 0);
4289 convert_move (target, result, 0);
4290 return target;
4291 }
4292
4293 /* Expand the library call ourselves using a stabilized argument
4294 list to avoid re-evaluating the function's arguments twice. */
4295 #ifdef HAVE_cmpstrnsi
4296 do_libcall:
4297 #endif
4298 fndecl = get_callee_fndecl (exp);
4299 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4300 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4301 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4302 return expand_call (fn, target, target == const0_rtx);
4303 }
4304 #endif
4305 return NULL_RTX;
4306 }
4307
4308 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4309 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4310 the result in TARGET, if convenient. */
4311
4312 static rtx
4313 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4314 ATTRIBUTE_UNUSED enum machine_mode mode)
4315 {
4316 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4317
4318 if (!validate_arglist (exp,
4319 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4320 return NULL_RTX;
4321
4322 /* If c_strlen can determine an expression for one of the string
4323 lengths, and it doesn't have side effects, then emit cmpstrnsi
4324 using length MIN(strlen(string)+1, arg3). */
4325 #ifdef HAVE_cmpstrnsi
4326 if (HAVE_cmpstrnsi)
4327 {
4328 tree len, len1, len2;
4329 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4330 rtx result, insn;
4331 tree fndecl, fn;
4332 tree arg1 = CALL_EXPR_ARG (exp, 0);
4333 tree arg2 = CALL_EXPR_ARG (exp, 1);
4334 tree arg3 = CALL_EXPR_ARG (exp, 2);
4335
4336 unsigned int arg1_align
4337 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4338 unsigned int arg2_align
4339 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4340 enum machine_mode insn_mode
4341 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4342
4343 len1 = c_strlen (arg1, 1);
4344 len2 = c_strlen (arg2, 1);
4345
4346 if (len1)
4347 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4348 if (len2)
4349 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4350
4351 /* If we don't have a constant length for the first, use the length
4352 of the second, if we know it. We don't require a constant for
4353 this case; some cost analysis could be done if both are available
4354 but neither is constant. For now, assume they're equally cheap,
4355 unless one has side effects. If both strings have constant lengths,
4356 use the smaller. */
4357
4358 if (!len1)
4359 len = len2;
4360 else if (!len2)
4361 len = len1;
4362 else if (TREE_SIDE_EFFECTS (len1))
4363 len = len2;
4364 else if (TREE_SIDE_EFFECTS (len2))
4365 len = len1;
4366 else if (TREE_CODE (len1) != INTEGER_CST)
4367 len = len2;
4368 else if (TREE_CODE (len2) != INTEGER_CST)
4369 len = len1;
4370 else if (tree_int_cst_lt (len1, len2))
4371 len = len1;
4372 else
4373 len = len2;
4374
4375 /* If both arguments have side effects, we cannot optimize. */
4376 if (!len || TREE_SIDE_EFFECTS (len))
4377 return NULL_RTX;
4378
4379 /* The actual new length parameter is MIN(len,arg3). */
4380 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4381 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4382
4383 /* If we don't have POINTER_TYPE, call the function. */
4384 if (arg1_align == 0 || arg2_align == 0)
4385 return NULL_RTX;
4386
4387 /* Make a place to write the result of the instruction. */
4388 result = target;
4389 if (! (result != 0
4390 && REG_P (result) && GET_MODE (result) == insn_mode
4391 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4392 result = gen_reg_rtx (insn_mode);
4393
4394 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4395 arg1 = builtin_save_expr (arg1);
4396 arg2 = builtin_save_expr (arg2);
4397 len = builtin_save_expr (len);
4398
4399 arg1_rtx = get_memory_rtx (arg1, len);
4400 arg2_rtx = get_memory_rtx (arg2, len);
4401 arg3_rtx = expand_normal (len);
4402 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4403 GEN_INT (MIN (arg1_align, arg2_align)));
4404 if (insn)
4405 {
4406 emit_insn (insn);
4407
4408 /* Return the value in the proper mode for this function. */
4409 mode = TYPE_MODE (TREE_TYPE (exp));
4410 if (GET_MODE (result) == mode)
4411 return result;
4412 if (target == 0)
4413 return convert_to_mode (mode, result, 0);
4414 convert_move (target, result, 0);
4415 return target;
4416 }
4417
4418 /* Expand the library call ourselves using a stabilized argument
4419 list to avoid re-evaluating the function's arguments twice. */
4420 fndecl = get_callee_fndecl (exp);
4421 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4422 arg1, arg2, len);
4423 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4424 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4425 return expand_call (fn, target, target == const0_rtx);
4426 }
4427 #endif
4428 return NULL_RTX;
4429 }
4430
4431 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4432 if that's convenient. */
4433
4434 rtx
4435 expand_builtin_saveregs (void)
4436 {
4437 rtx val, seq;
4438
4439 /* Don't do __builtin_saveregs more than once in a function.
4440 Save the result of the first call and reuse it. */
4441 if (saveregs_value != 0)
4442 return saveregs_value;
4443
4444 /* When this function is called, it means that registers must be
4445 saved on entry to this function. So we migrate the call to the
4446 first insn of this function. */
4447
4448 start_sequence ();
4449
4450 /* Do whatever the machine needs done in this case. */
4451 val = targetm.calls.expand_builtin_saveregs ();
4452
4453 seq = get_insns ();
4454 end_sequence ();
4455
4456 saveregs_value = val;
4457
4458 /* Put the insns after the NOTE that starts the function. If this
4459 is inside a start_sequence, make the outer-level insn chain current, so
4460 the code is placed at the start of the function. */
4461 push_topmost_sequence ();
4462 emit_insn_after (seq, entry_of_function ());
4463 pop_topmost_sequence ();
4464
4465 return val;
4466 }
4467
4468 /* Expand a call to __builtin_next_arg. */
4469
4470 static rtx
4471 expand_builtin_next_arg (void)
4472 {
4473 /* Checking arguments is already done in fold_builtin_next_arg
4474 that must be called before this function. */
4475 return expand_binop (ptr_mode, add_optab,
4476 crtl->args.internal_arg_pointer,
4477 crtl->args.arg_offset_rtx,
4478 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4479 }
4480
4481 /* Make it easier for the backends by protecting the valist argument
4482 from multiple evaluations. */
4483
4484 static tree
4485 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4486 {
4487 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4488
4489 /* The current way of determining the type of valist is completely
4490 bogus. We should have the information on the va builtin instead. */
4491 if (!vatype)
4492 vatype = targetm.fn_abi_va_list (cfun->decl);
4493
4494 if (TREE_CODE (vatype) == ARRAY_TYPE)
4495 {
4496 if (TREE_SIDE_EFFECTS (valist))
4497 valist = save_expr (valist);
4498
4499 /* For this case, the backends will be expecting a pointer to
4500 vatype, but it's possible we've actually been given an array
4501 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4502 So fix it. */
4503 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4504 {
4505 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4506 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4507 }
4508 }
4509 else
4510 {
4511 tree pt = build_pointer_type (vatype);
4512
4513 if (! needs_lvalue)
4514 {
4515 if (! TREE_SIDE_EFFECTS (valist))
4516 return valist;
4517
4518 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4519 TREE_SIDE_EFFECTS (valist) = 1;
4520 }
4521
4522 if (TREE_SIDE_EFFECTS (valist))
4523 valist = save_expr (valist);
4524 valist = fold_build2_loc (loc, MEM_REF,
4525 vatype, valist, build_int_cst (pt, 0));
4526 }
4527
4528 return valist;
4529 }
4530
4531 /* The "standard" definition of va_list is void*. */
4532
4533 tree
4534 std_build_builtin_va_list (void)
4535 {
4536 return ptr_type_node;
4537 }
4538
4539 /* The "standard" abi va_list is va_list_type_node. */
4540
4541 tree
4542 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4543 {
4544 return va_list_type_node;
4545 }
4546
4547 /* The "standard" type of va_list is va_list_type_node. */
4548
4549 tree
4550 std_canonical_va_list_type (tree type)
4551 {
4552 tree wtype, htype;
4553
4554 if (INDIRECT_REF_P (type))
4555 type = TREE_TYPE (type);
4556 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4557 type = TREE_TYPE (type);
4558 wtype = va_list_type_node;
4559 htype = type;
4560 /* Treat structure va_list types. */
4561 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4562 htype = TREE_TYPE (htype);
4563 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4564 {
4565 /* If va_list is an array type, the argument may have decayed
4566 to a pointer type, e.g. by being passed to another function.
4567 In that case, unwrap both types so that we can compare the
4568 underlying records. */
4569 if (TREE_CODE (htype) == ARRAY_TYPE
4570 || POINTER_TYPE_P (htype))
4571 {
4572 wtype = TREE_TYPE (wtype);
4573 htype = TREE_TYPE (htype);
4574 }
4575 }
4576 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4577 return va_list_type_node;
4578
4579 return NULL_TREE;
4580 }
4581
4582 /* The "standard" implementation of va_start: just assign `nextarg' to
4583 the variable. */
4584
4585 void
4586 std_expand_builtin_va_start (tree valist, rtx nextarg)
4587 {
4588 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4589 convert_move (va_r, nextarg, 0);
4590 }
4591
4592 /* Expand EXP, a call to __builtin_va_start. */
4593
4594 static rtx
4595 expand_builtin_va_start (tree exp)
4596 {
4597 rtx nextarg;
4598 tree valist;
4599 location_t loc = EXPR_LOCATION (exp);
4600
4601 if (call_expr_nargs (exp) < 2)
4602 {
4603 error_at (loc, "too few arguments to function %<va_start%>");
4604 return const0_rtx;
4605 }
4606
4607 if (fold_builtin_next_arg (exp, true))
4608 return const0_rtx;
4609
4610 nextarg = expand_builtin_next_arg ();
4611 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4612
4613 if (targetm.expand_builtin_va_start)
4614 targetm.expand_builtin_va_start (valist, nextarg);
4615 else
4616 std_expand_builtin_va_start (valist, nextarg);
4617
4618 return const0_rtx;
4619 }
4620
4621 /* The "standard" implementation of va_arg: read the value from the
4622 current (padded) address and increment by the (padded) size. */
4623
4624 tree
4625 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4626 gimple_seq *post_p)
4627 {
4628 tree addr, t, type_size, rounded_size, valist_tmp;
4629 unsigned HOST_WIDE_INT align, boundary;
4630 bool indirect;
4631
4632 #ifdef ARGS_GROW_DOWNWARD
4633 /* All of the alignment and movement below is for args-grow-up machines.
4634 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4635 implement their own specialized gimplify_va_arg_expr routines. */
4636 gcc_unreachable ();
4637 #endif
4638
4639 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4640 if (indirect)
4641 type = build_pointer_type (type);
4642
4643 align = PARM_BOUNDARY / BITS_PER_UNIT;
4644 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4645
4646 /* When we align parameter on stack for caller, if the parameter
4647 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4648 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4649 here with caller. */
4650 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4651 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4652
4653 boundary /= BITS_PER_UNIT;
4654
4655 /* Hoist the valist value into a temporary for the moment. */
4656 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4657
4658 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4659 requires greater alignment, we must perform dynamic alignment. */
4660 if (boundary > align
4661 && !integer_zerop (TYPE_SIZE (type)))
4662 {
4663 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4664 fold_build2 (POINTER_PLUS_EXPR,
4665 TREE_TYPE (valist),
4666 valist_tmp, size_int (boundary - 1)));
4667 gimplify_and_add (t, pre_p);
4668
4669 t = fold_convert (sizetype, valist_tmp);
4670 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4671 fold_convert (TREE_TYPE (valist),
4672 fold_build2 (BIT_AND_EXPR, sizetype, t,
4673 size_int (-boundary))));
4674 gimplify_and_add (t, pre_p);
4675 }
4676 else
4677 boundary = align;
4678
4679 /* If the actual alignment is less than the alignment of the type,
4680 adjust the type accordingly so that we don't assume strict alignment
4681 when dereferencing the pointer. */
4682 boundary *= BITS_PER_UNIT;
4683 if (boundary < TYPE_ALIGN (type))
4684 {
4685 type = build_variant_type_copy (type);
4686 TYPE_ALIGN (type) = boundary;
4687 }
4688
4689 /* Compute the rounded size of the type. */
4690 type_size = size_in_bytes (type);
4691 rounded_size = round_up (type_size, align);
4692
4693 /* Reduce rounded_size so it's sharable with the postqueue. */
4694 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4695
4696 /* Get AP. */
4697 addr = valist_tmp;
4698 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4699 {
4700 /* Small args are padded downward. */
4701 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4702 rounded_size, size_int (align));
4703 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4704 size_binop (MINUS_EXPR, rounded_size, type_size));
4705 addr = fold_build2 (POINTER_PLUS_EXPR,
4706 TREE_TYPE (addr), addr, t);
4707 }
4708
4709 /* Compute new value for AP. */
4710 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4711 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4712 gimplify_and_add (t, pre_p);
4713
4714 addr = fold_convert (build_pointer_type (type), addr);
4715
4716 if (indirect)
4717 addr = build_va_arg_indirect_ref (addr);
4718
4719 return build_va_arg_indirect_ref (addr);
4720 }
4721
4722 /* Build an indirect-ref expression over the given TREE, which represents a
4723 piece of a va_arg() expansion. */
4724 tree
4725 build_va_arg_indirect_ref (tree addr)
4726 {
4727 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4728
4729 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4730 mf_mark (addr);
4731
4732 return addr;
4733 }
4734
4735 /* Return a dummy expression of type TYPE in order to keep going after an
4736 error. */
4737
4738 static tree
4739 dummy_object (tree type)
4740 {
4741 tree t = build_int_cst (build_pointer_type (type), 0);
4742 return build1 (INDIRECT_REF, type, t);
4743 }
4744
4745 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4746 builtin function, but a very special sort of operator. */
4747
4748 enum gimplify_status
4749 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4750 {
4751 tree promoted_type, have_va_type;
4752 tree valist = TREE_OPERAND (*expr_p, 0);
4753 tree type = TREE_TYPE (*expr_p);
4754 tree t;
4755 location_t loc = EXPR_LOCATION (*expr_p);
4756
4757 /* Verify that valist is of the proper type. */
4758 have_va_type = TREE_TYPE (valist);
4759 if (have_va_type == error_mark_node)
4760 return GS_ERROR;
4761 have_va_type = targetm.canonical_va_list_type (have_va_type);
4762
4763 if (have_va_type == NULL_TREE)
4764 {
4765 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4766 return GS_ERROR;
4767 }
4768
4769 /* Generate a diagnostic for requesting data of a type that cannot
4770 be passed through `...' due to type promotion at the call site. */
4771 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4772 != type)
4773 {
4774 static bool gave_help;
4775 bool warned;
4776
4777 /* Unfortunately, this is merely undefined, rather than a constraint
4778 violation, so we cannot make this an error. If this call is never
4779 executed, the program is still strictly conforming. */
4780 warned = warning_at (loc, 0,
4781 "%qT is promoted to %qT when passed through %<...%>",
4782 type, promoted_type);
4783 if (!gave_help && warned)
4784 {
4785 gave_help = true;
4786 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4787 promoted_type, type);
4788 }
4789
4790 /* We can, however, treat "undefined" any way we please.
4791 Call abort to encourage the user to fix the program. */
4792 if (warned)
4793 inform (loc, "if this code is reached, the program will abort");
4794 /* Before the abort, allow the evaluation of the va_list
4795 expression to exit or longjmp. */
4796 gimplify_and_add (valist, pre_p);
4797 t = build_call_expr_loc (loc,
4798 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4799 gimplify_and_add (t, pre_p);
4800
4801 /* This is dead code, but go ahead and finish so that the
4802 mode of the result comes out right. */
4803 *expr_p = dummy_object (type);
4804 return GS_ALL_DONE;
4805 }
4806 else
4807 {
4808 /* Make it easier for the backends by protecting the valist argument
4809 from multiple evaluations. */
4810 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4811 {
4812 /* For this case, the backends will be expecting a pointer to
4813 TREE_TYPE (abi), but it's possible we've
4814 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4815 So fix it. */
4816 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4817 {
4818 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4819 valist = fold_convert_loc (loc, p1,
4820 build_fold_addr_expr_loc (loc, valist));
4821 }
4822
4823 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4824 }
4825 else
4826 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4827
4828 if (!targetm.gimplify_va_arg_expr)
4829 /* FIXME: Once most targets are converted we should merely
4830 assert this is non-null. */
4831 return GS_ALL_DONE;
4832
4833 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4834 return GS_OK;
4835 }
4836 }
4837
4838 /* Expand EXP, a call to __builtin_va_end. */
4839
4840 static rtx
4841 expand_builtin_va_end (tree exp)
4842 {
4843 tree valist = CALL_EXPR_ARG (exp, 0);
4844
4845 /* Evaluate for side effects, if needed. I hate macros that don't
4846 do that. */
4847 if (TREE_SIDE_EFFECTS (valist))
4848 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4849
4850 return const0_rtx;
4851 }
4852
4853 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4854 builtin rather than just as an assignment in stdarg.h because of the
4855 nastiness of array-type va_list types. */
4856
4857 static rtx
4858 expand_builtin_va_copy (tree exp)
4859 {
4860 tree dst, src, t;
4861 location_t loc = EXPR_LOCATION (exp);
4862
4863 dst = CALL_EXPR_ARG (exp, 0);
4864 src = CALL_EXPR_ARG (exp, 1);
4865
4866 dst = stabilize_va_list_loc (loc, dst, 1);
4867 src = stabilize_va_list_loc (loc, src, 0);
4868
4869 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4870
4871 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4872 {
4873 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4874 TREE_SIDE_EFFECTS (t) = 1;
4875 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4876 }
4877 else
4878 {
4879 rtx dstb, srcb, size;
4880
4881 /* Evaluate to pointers. */
4882 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4883 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4884 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4885 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4886
4887 dstb = convert_memory_address (Pmode, dstb);
4888 srcb = convert_memory_address (Pmode, srcb);
4889
4890 /* "Dereference" to BLKmode memories. */
4891 dstb = gen_rtx_MEM (BLKmode, dstb);
4892 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4893 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4894 srcb = gen_rtx_MEM (BLKmode, srcb);
4895 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4896 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4897
4898 /* Copy. */
4899 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4900 }
4901
4902 return const0_rtx;
4903 }
4904
4905 /* Expand a call to one of the builtin functions __builtin_frame_address or
4906 __builtin_return_address. */
4907
4908 static rtx
4909 expand_builtin_frame_address (tree fndecl, tree exp)
4910 {
4911 /* The argument must be a nonnegative integer constant.
4912 It counts the number of frames to scan up the stack.
4913 The value is the return address saved in that frame. */
4914 if (call_expr_nargs (exp) == 0)
4915 /* Warning about missing arg was already issued. */
4916 return const0_rtx;
4917 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4918 {
4919 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4920 error ("invalid argument to %<__builtin_frame_address%>");
4921 else
4922 error ("invalid argument to %<__builtin_return_address%>");
4923 return const0_rtx;
4924 }
4925 else
4926 {
4927 rtx tem
4928 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4929 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4930
4931 /* Some ports cannot access arbitrary stack frames. */
4932 if (tem == NULL)
4933 {
4934 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4935 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4936 else
4937 warning (0, "unsupported argument to %<__builtin_return_address%>");
4938 return const0_rtx;
4939 }
4940
4941 /* For __builtin_frame_address, return what we've got. */
4942 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4943 return tem;
4944
4945 if (!REG_P (tem)
4946 && ! CONSTANT_P (tem))
4947 tem = copy_to_mode_reg (Pmode, tem);
4948 return tem;
4949 }
4950 }
4951
4952 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
4953 we failed and the caller should emit a normal call, otherwise try to get
4954 the result in TARGET, if convenient. */
4955
4956 static rtx
4957 expand_builtin_alloca (tree exp, rtx target)
4958 {
4959 rtx op0;
4960 rtx result;
4961
4962 /* Emit normal call if marked not-inlineable. */
4963 if (CALL_CANNOT_INLINE_P (exp))
4964 return NULL_RTX;
4965
4966 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4967 return NULL_RTX;
4968
4969 /* Compute the argument. */
4970 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4971
4972 /* Allocate the desired space. */
4973 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4974 result = convert_memory_address (ptr_mode, result);
4975
4976 return result;
4977 }
4978
4979 /* Expand a call to a bswap builtin with argument ARG0. MODE
4980 is the mode to expand with. */
4981
4982 static rtx
4983 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4984 {
4985 enum machine_mode mode;
4986 tree arg;
4987 rtx op0;
4988
4989 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4990 return NULL_RTX;
4991
4992 arg = CALL_EXPR_ARG (exp, 0);
4993 mode = TYPE_MODE (TREE_TYPE (arg));
4994 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4995
4996 target = expand_unop (mode, bswap_optab, op0, target, 1);
4997
4998 gcc_assert (target);
4999
5000 return convert_to_mode (mode, target, 0);
5001 }
5002
5003 /* Expand a call to a unary builtin in EXP.
5004 Return NULL_RTX if a normal call should be emitted rather than expanding the
5005 function in-line. If convenient, the result should be placed in TARGET.
5006 SUBTARGET may be used as the target for computing one of EXP's operands. */
5007
5008 static rtx
5009 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5010 rtx subtarget, optab op_optab)
5011 {
5012 rtx op0;
5013
5014 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5015 return NULL_RTX;
5016
5017 /* Compute the argument. */
5018 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5019 VOIDmode, EXPAND_NORMAL);
5020 /* Compute op, into TARGET if possible.
5021 Set TARGET to wherever the result comes back. */
5022 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5023 op_optab, op0, target, 1);
5024 gcc_assert (target);
5025
5026 return convert_to_mode (target_mode, target, 0);
5027 }
5028
5029 /* Expand a call to __builtin_expect. We just return our argument
5030 as the builtin_expect semantic should've been already executed by
5031 tree branch prediction pass. */
5032
5033 static rtx
5034 expand_builtin_expect (tree exp, rtx target)
5035 {
5036 tree arg;
5037
5038 if (call_expr_nargs (exp) < 2)
5039 return const0_rtx;
5040 arg = CALL_EXPR_ARG (exp, 0);
5041
5042 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5043 /* When guessing was done, the hints should be already stripped away. */
5044 gcc_assert (!flag_guess_branch_prob
5045 || optimize == 0 || seen_error ());
5046 return target;
5047 }
5048
5049 void
5050 expand_builtin_trap (void)
5051 {
5052 #ifdef HAVE_trap
5053 if (HAVE_trap)
5054 emit_insn (gen_trap ());
5055 else
5056 #endif
5057 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5058 emit_barrier ();
5059 }
5060
5061 /* Expand a call to __builtin_unreachable. We do nothing except emit
5062 a barrier saying that control flow will not pass here.
5063
5064 It is the responsibility of the program being compiled to ensure
5065 that control flow does never reach __builtin_unreachable. */
5066 static void
5067 expand_builtin_unreachable (void)
5068 {
5069 emit_barrier ();
5070 }
5071
5072 /* Expand EXP, a call to fabs, fabsf or fabsl.
5073 Return NULL_RTX if a normal call should be emitted rather than expanding
5074 the function inline. If convenient, the result should be placed
5075 in TARGET. SUBTARGET may be used as the target for computing
5076 the operand. */
5077
5078 static rtx
5079 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5080 {
5081 enum machine_mode mode;
5082 tree arg;
5083 rtx op0;
5084
5085 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5086 return NULL_RTX;
5087
5088 arg = CALL_EXPR_ARG (exp, 0);
5089 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5090 mode = TYPE_MODE (TREE_TYPE (arg));
5091 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5092 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5093 }
5094
5095 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5096 Return NULL is a normal call should be emitted rather than expanding the
5097 function inline. If convenient, the result should be placed in TARGET.
5098 SUBTARGET may be used as the target for computing the operand. */
5099
5100 static rtx
5101 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5102 {
5103 rtx op0, op1;
5104 tree arg;
5105
5106 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5107 return NULL_RTX;
5108
5109 arg = CALL_EXPR_ARG (exp, 0);
5110 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5111
5112 arg = CALL_EXPR_ARG (exp, 1);
5113 op1 = expand_normal (arg);
5114
5115 return expand_copysign (op0, op1, target);
5116 }
5117
5118 /* Create a new constant string literal and return a char* pointer to it.
5119 The STRING_CST value is the LEN characters at STR. */
5120 tree
5121 build_string_literal (int len, const char *str)
5122 {
5123 tree t, elem, index, type;
5124
5125 t = build_string (len, str);
5126 elem = build_type_variant (char_type_node, 1, 0);
5127 index = build_index_type (size_int (len - 1));
5128 type = build_array_type (elem, index);
5129 TREE_TYPE (t) = type;
5130 TREE_CONSTANT (t) = 1;
5131 TREE_READONLY (t) = 1;
5132 TREE_STATIC (t) = 1;
5133
5134 type = build_pointer_type (elem);
5135 t = build1 (ADDR_EXPR, type,
5136 build4 (ARRAY_REF, elem,
5137 t, integer_zero_node, NULL_TREE, NULL_TREE));
5138 return t;
5139 }
5140
5141 /* Expand a call to either the entry or exit function profiler. */
5142
5143 static rtx
5144 expand_builtin_profile_func (bool exitp)
5145 {
5146 rtx this_rtx, which;
5147
5148 this_rtx = DECL_RTL (current_function_decl);
5149 gcc_assert (MEM_P (this_rtx));
5150 this_rtx = XEXP (this_rtx, 0);
5151
5152 if (exitp)
5153 which = profile_function_exit_libfunc;
5154 else
5155 which = profile_function_entry_libfunc;
5156
5157 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5158 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5159 0),
5160 Pmode);
5161
5162 return const0_rtx;
5163 }
5164
5165 /* Expand a call to __builtin___clear_cache. */
5166
5167 static rtx
5168 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5169 {
5170 #ifndef HAVE_clear_cache
5171 #ifdef CLEAR_INSN_CACHE
5172 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5173 does something. Just do the default expansion to a call to
5174 __clear_cache(). */
5175 return NULL_RTX;
5176 #else
5177 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5178 does nothing. There is no need to call it. Do nothing. */
5179 return const0_rtx;
5180 #endif /* CLEAR_INSN_CACHE */
5181 #else
5182 /* We have a "clear_cache" insn, and it will handle everything. */
5183 tree begin, end;
5184 rtx begin_rtx, end_rtx;
5185 enum insn_code icode;
5186
5187 /* We must not expand to a library call. If we did, any
5188 fallback library function in libgcc that might contain a call to
5189 __builtin___clear_cache() would recurse infinitely. */
5190 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5191 {
5192 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5193 return const0_rtx;
5194 }
5195
5196 if (HAVE_clear_cache)
5197 {
5198 icode = CODE_FOR_clear_cache;
5199
5200 begin = CALL_EXPR_ARG (exp, 0);
5201 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5202 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5203 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5204 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5205
5206 end = CALL_EXPR_ARG (exp, 1);
5207 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5208 end_rtx = convert_memory_address (Pmode, end_rtx);
5209 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5210 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5211
5212 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5213 }
5214 return const0_rtx;
5215 #endif /* HAVE_clear_cache */
5216 }
5217
5218 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5219
5220 static rtx
5221 round_trampoline_addr (rtx tramp)
5222 {
5223 rtx temp, addend, mask;
5224
5225 /* If we don't need too much alignment, we'll have been guaranteed
5226 proper alignment by get_trampoline_type. */
5227 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5228 return tramp;
5229
5230 /* Round address up to desired boundary. */
5231 temp = gen_reg_rtx (Pmode);
5232 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5233 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5234
5235 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5236 temp, 0, OPTAB_LIB_WIDEN);
5237 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5238 temp, 0, OPTAB_LIB_WIDEN);
5239
5240 return tramp;
5241 }
5242
5243 static rtx
5244 expand_builtin_init_trampoline (tree exp)
5245 {
5246 tree t_tramp, t_func, t_chain;
5247 rtx m_tramp, r_tramp, r_chain, tmp;
5248
5249 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5250 POINTER_TYPE, VOID_TYPE))
5251 return NULL_RTX;
5252
5253 t_tramp = CALL_EXPR_ARG (exp, 0);
5254 t_func = CALL_EXPR_ARG (exp, 1);
5255 t_chain = CALL_EXPR_ARG (exp, 2);
5256
5257 r_tramp = expand_normal (t_tramp);
5258 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5259 MEM_NOTRAP_P (m_tramp) = 1;
5260
5261 /* The TRAMP argument should be the address of a field within the
5262 local function's FRAME decl. Let's see if we can fill in the
5263 to fill in the MEM_ATTRs for this memory. */
5264 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5265 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5266 true, 0);
5267
5268 tmp = round_trampoline_addr (r_tramp);
5269 if (tmp != r_tramp)
5270 {
5271 m_tramp = change_address (m_tramp, BLKmode, tmp);
5272 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5273 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5274 }
5275
5276 /* The FUNC argument should be the address of the nested function.
5277 Extract the actual function decl to pass to the hook. */
5278 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5279 t_func = TREE_OPERAND (t_func, 0);
5280 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5281
5282 r_chain = expand_normal (t_chain);
5283
5284 /* Generate insns to initialize the trampoline. */
5285 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5286
5287 trampolines_created = 1;
5288
5289 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5290 "trampoline generated for nested function %qD", t_func);
5291
5292 return const0_rtx;
5293 }
5294
5295 static rtx
5296 expand_builtin_adjust_trampoline (tree exp)
5297 {
5298 rtx tramp;
5299
5300 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5301 return NULL_RTX;
5302
5303 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5304 tramp = round_trampoline_addr (tramp);
5305 if (targetm.calls.trampoline_adjust_address)
5306 tramp = targetm.calls.trampoline_adjust_address (tramp);
5307
5308 return tramp;
5309 }
5310
5311 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5312 function. The function first checks whether the back end provides
5313 an insn to implement signbit for the respective mode. If not, it
5314 checks whether the floating point format of the value is such that
5315 the sign bit can be extracted. If that is not the case, the
5316 function returns NULL_RTX to indicate that a normal call should be
5317 emitted rather than expanding the function in-line. EXP is the
5318 expression that is a call to the builtin function; if convenient,
5319 the result should be placed in TARGET. */
5320 static rtx
5321 expand_builtin_signbit (tree exp, rtx target)
5322 {
5323 const struct real_format *fmt;
5324 enum machine_mode fmode, imode, rmode;
5325 tree arg;
5326 int word, bitpos;
5327 enum insn_code icode;
5328 rtx temp;
5329 location_t loc = EXPR_LOCATION (exp);
5330
5331 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5332 return NULL_RTX;
5333
5334 arg = CALL_EXPR_ARG (exp, 0);
5335 fmode = TYPE_MODE (TREE_TYPE (arg));
5336 rmode = TYPE_MODE (TREE_TYPE (exp));
5337 fmt = REAL_MODE_FORMAT (fmode);
5338
5339 arg = builtin_save_expr (arg);
5340
5341 /* Expand the argument yielding a RTX expression. */
5342 temp = expand_normal (arg);
5343
5344 /* Check if the back end provides an insn that handles signbit for the
5345 argument's mode. */
5346 icode = optab_handler (signbit_optab, fmode);
5347 if (icode != CODE_FOR_nothing)
5348 {
5349 rtx last = get_last_insn ();
5350 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5351 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5352 return target;
5353 delete_insns_since (last);
5354 }
5355
5356 /* For floating point formats without a sign bit, implement signbit
5357 as "ARG < 0.0". */
5358 bitpos = fmt->signbit_ro;
5359 if (bitpos < 0)
5360 {
5361 /* But we can't do this if the format supports signed zero. */
5362 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5363 return NULL_RTX;
5364
5365 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5366 build_real (TREE_TYPE (arg), dconst0));
5367 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5368 }
5369
5370 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5371 {
5372 imode = int_mode_for_mode (fmode);
5373 if (imode == BLKmode)
5374 return NULL_RTX;
5375 temp = gen_lowpart (imode, temp);
5376 }
5377 else
5378 {
5379 imode = word_mode;
5380 /* Handle targets with different FP word orders. */
5381 if (FLOAT_WORDS_BIG_ENDIAN)
5382 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5383 else
5384 word = bitpos / BITS_PER_WORD;
5385 temp = operand_subword_force (temp, word, fmode);
5386 bitpos = bitpos % BITS_PER_WORD;
5387 }
5388
5389 /* Force the intermediate word_mode (or narrower) result into a
5390 register. This avoids attempting to create paradoxical SUBREGs
5391 of floating point modes below. */
5392 temp = force_reg (imode, temp);
5393
5394 /* If the bitpos is within the "result mode" lowpart, the operation
5395 can be implement with a single bitwise AND. Otherwise, we need
5396 a right shift and an AND. */
5397
5398 if (bitpos < GET_MODE_BITSIZE (rmode))
5399 {
5400 double_int mask = double_int_setbit (double_int_zero, bitpos);
5401
5402 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5403 temp = gen_lowpart (rmode, temp);
5404 temp = expand_binop (rmode, and_optab, temp,
5405 immed_double_int_const (mask, rmode),
5406 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5407 }
5408 else
5409 {
5410 /* Perform a logical right shift to place the signbit in the least
5411 significant bit, then truncate the result to the desired mode
5412 and mask just this bit. */
5413 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5414 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5415 temp = gen_lowpart (rmode, temp);
5416 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5417 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5418 }
5419
5420 return temp;
5421 }
5422
5423 /* Expand fork or exec calls. TARGET is the desired target of the
5424 call. EXP is the call. FN is the
5425 identificator of the actual function. IGNORE is nonzero if the
5426 value is to be ignored. */
5427
5428 static rtx
5429 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5430 {
5431 tree id, decl;
5432 tree call;
5433
5434 /* If we are not profiling, just call the function. */
5435 if (!profile_arc_flag)
5436 return NULL_RTX;
5437
5438 /* Otherwise call the wrapper. This should be equivalent for the rest of
5439 compiler, so the code does not diverge, and the wrapper may run the
5440 code necessary for keeping the profiling sane. */
5441
5442 switch (DECL_FUNCTION_CODE (fn))
5443 {
5444 case BUILT_IN_FORK:
5445 id = get_identifier ("__gcov_fork");
5446 break;
5447
5448 case BUILT_IN_EXECL:
5449 id = get_identifier ("__gcov_execl");
5450 break;
5451
5452 case BUILT_IN_EXECV:
5453 id = get_identifier ("__gcov_execv");
5454 break;
5455
5456 case BUILT_IN_EXECLP:
5457 id = get_identifier ("__gcov_execlp");
5458 break;
5459
5460 case BUILT_IN_EXECLE:
5461 id = get_identifier ("__gcov_execle");
5462 break;
5463
5464 case BUILT_IN_EXECVP:
5465 id = get_identifier ("__gcov_execvp");
5466 break;
5467
5468 case BUILT_IN_EXECVE:
5469 id = get_identifier ("__gcov_execve");
5470 break;
5471
5472 default:
5473 gcc_unreachable ();
5474 }
5475
5476 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5477 FUNCTION_DECL, id, TREE_TYPE (fn));
5478 DECL_EXTERNAL (decl) = 1;
5479 TREE_PUBLIC (decl) = 1;
5480 DECL_ARTIFICIAL (decl) = 1;
5481 TREE_NOTHROW (decl) = 1;
5482 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5483 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5484 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5485 return expand_call (call, target, ignore);
5486 }
5487
5488
5489 \f
5490 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5491 the pointer in these functions is void*, the tree optimizers may remove
5492 casts. The mode computed in expand_builtin isn't reliable either, due
5493 to __sync_bool_compare_and_swap.
5494
5495 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5496 group of builtins. This gives us log2 of the mode size. */
5497
5498 static inline enum machine_mode
5499 get_builtin_sync_mode (int fcode_diff)
5500 {
5501 /* The size is not negotiable, so ask not to get BLKmode in return
5502 if the target indicates that a smaller size would be better. */
5503 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5504 }
5505
5506 /* Expand the memory expression LOC and return the appropriate memory operand
5507 for the builtin_sync operations. */
5508
5509 static rtx
5510 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5511 {
5512 rtx addr, mem;
5513
5514 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5515 addr = convert_memory_address (Pmode, addr);
5516
5517 /* Note that we explicitly do not want any alias information for this
5518 memory, so that we kill all other live memories. Otherwise we don't
5519 satisfy the full barrier semantics of the intrinsic. */
5520 mem = validize_mem (gen_rtx_MEM (mode, addr));
5521
5522 /* The alignment needs to be at least according to that of the mode. */
5523 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5524 get_pointer_alignment (loc, BIGGEST_ALIGNMENT)));
5525 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5526 MEM_VOLATILE_P (mem) = 1;
5527
5528 return mem;
5529 }
5530
5531 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5532 EXP is the CALL_EXPR. CODE is the rtx code
5533 that corresponds to the arithmetic or logical operation from the name;
5534 an exception here is that NOT actually means NAND. TARGET is an optional
5535 place for us to store the results; AFTER is true if this is the
5536 fetch_and_xxx form. IGNORE is true if we don't actually care about
5537 the result of the operation at all. */
5538
5539 static rtx
5540 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5541 enum rtx_code code, bool after,
5542 rtx target, bool ignore)
5543 {
5544 rtx val, mem;
5545 enum machine_mode old_mode;
5546 location_t loc = EXPR_LOCATION (exp);
5547
5548 if (code == NOT && warn_sync_nand)
5549 {
5550 tree fndecl = get_callee_fndecl (exp);
5551 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5552
5553 static bool warned_f_a_n, warned_n_a_f;
5554
5555 switch (fcode)
5556 {
5557 case BUILT_IN_FETCH_AND_NAND_1:
5558 case BUILT_IN_FETCH_AND_NAND_2:
5559 case BUILT_IN_FETCH_AND_NAND_4:
5560 case BUILT_IN_FETCH_AND_NAND_8:
5561 case BUILT_IN_FETCH_AND_NAND_16:
5562
5563 if (warned_f_a_n)
5564 break;
5565
5566 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5567 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5568 warned_f_a_n = true;
5569 break;
5570
5571 case BUILT_IN_NAND_AND_FETCH_1:
5572 case BUILT_IN_NAND_AND_FETCH_2:
5573 case BUILT_IN_NAND_AND_FETCH_4:
5574 case BUILT_IN_NAND_AND_FETCH_8:
5575 case BUILT_IN_NAND_AND_FETCH_16:
5576
5577 if (warned_n_a_f)
5578 break;
5579
5580 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5581 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5582 warned_n_a_f = true;
5583 break;
5584
5585 default:
5586 gcc_unreachable ();
5587 }
5588 }
5589
5590 /* Expand the operands. */
5591 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5592
5593 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5594 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5595 of CONST_INTs, where we know the old_mode only from the call argument. */
5596 old_mode = GET_MODE (val);
5597 if (old_mode == VOIDmode)
5598 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5599 val = convert_modes (mode, old_mode, val, 1);
5600
5601 if (ignore)
5602 return expand_sync_operation (mem, val, code);
5603 else
5604 return expand_sync_fetch_operation (mem, val, code, after, target);
5605 }
5606
5607 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5608 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5609 true if this is the boolean form. TARGET is a place for us to store the
5610 results; this is NOT optional if IS_BOOL is true. */
5611
5612 static rtx
5613 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5614 bool is_bool, rtx target)
5615 {
5616 rtx old_val, new_val, mem;
5617 enum machine_mode old_mode;
5618
5619 /* Expand the operands. */
5620 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5621
5622
5623 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5624 mode, EXPAND_NORMAL);
5625 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5626 of CONST_INTs, where we know the old_mode only from the call argument. */
5627 old_mode = GET_MODE (old_val);
5628 if (old_mode == VOIDmode)
5629 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5630 old_val = convert_modes (mode, old_mode, old_val, 1);
5631
5632 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5633 mode, EXPAND_NORMAL);
5634 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5635 of CONST_INTs, where we know the old_mode only from the call argument. */
5636 old_mode = GET_MODE (new_val);
5637 if (old_mode == VOIDmode)
5638 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5639 new_val = convert_modes (mode, old_mode, new_val, 1);
5640
5641 if (is_bool)
5642 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5643 else
5644 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5645 }
5646
5647 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5648 general form is actually an atomic exchange, and some targets only
5649 support a reduced form with the second argument being a constant 1.
5650 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5651 the results. */
5652
5653 static rtx
5654 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5655 rtx target)
5656 {
5657 rtx val, mem;
5658 enum machine_mode old_mode;
5659
5660 /* Expand the operands. */
5661 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5662 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5663 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5664 of CONST_INTs, where we know the old_mode only from the call argument. */
5665 old_mode = GET_MODE (val);
5666 if (old_mode == VOIDmode)
5667 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5668 val = convert_modes (mode, old_mode, val, 1);
5669
5670 return expand_sync_lock_test_and_set (mem, val, target);
5671 }
5672
5673 /* Expand the __sync_synchronize intrinsic. */
5674
5675 static void
5676 expand_builtin_synchronize (void)
5677 {
5678 gimple x;
5679 VEC (tree, gc) *v_clobbers;
5680
5681 #ifdef HAVE_memory_barrier
5682 if (HAVE_memory_barrier)
5683 {
5684 emit_insn (gen_memory_barrier ());
5685 return;
5686 }
5687 #endif
5688
5689 if (synchronize_libfunc != NULL_RTX)
5690 {
5691 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5692 return;
5693 }
5694
5695 /* If no explicit memory barrier instruction is available, create an
5696 empty asm stmt with a memory clobber. */
5697 v_clobbers = VEC_alloc (tree, gc, 1);
5698 VEC_quick_push (tree, v_clobbers,
5699 tree_cons (NULL, build_string (6, "memory"), NULL));
5700 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5701 gimple_asm_set_volatile (x, true);
5702 expand_asm_stmt (x);
5703 }
5704
5705 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5706
5707 static void
5708 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5709 {
5710 enum insn_code icode;
5711 rtx mem, insn;
5712 rtx val = const0_rtx;
5713
5714 /* Expand the operands. */
5715 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5716
5717 /* If there is an explicit operation in the md file, use it. */
5718 icode = direct_optab_handler (sync_lock_release_optab, mode);
5719 if (icode != CODE_FOR_nothing)
5720 {
5721 if (!insn_data[icode].operand[1].predicate (val, mode))
5722 val = force_reg (mode, val);
5723
5724 insn = GEN_FCN (icode) (mem, val);
5725 if (insn)
5726 {
5727 emit_insn (insn);
5728 return;
5729 }
5730 }
5731
5732 /* Otherwise we can implement this operation by emitting a barrier
5733 followed by a store of zero. */
5734 expand_builtin_synchronize ();
5735 emit_move_insn (mem, val);
5736 }
5737 \f
5738 /* Expand an expression EXP that calls a built-in function,
5739 with result going to TARGET if that's convenient
5740 (and in mode MODE if that's convenient).
5741 SUBTARGET may be used as the target for computing one of EXP's operands.
5742 IGNORE is nonzero if the value is to be ignored. */
5743
5744 rtx
5745 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5746 int ignore)
5747 {
5748 tree fndecl = get_callee_fndecl (exp);
5749 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5750 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5751 int flags;
5752
5753 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5754 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5755
5756 /* When not optimizing, generate calls to library functions for a certain
5757 set of builtins. */
5758 if (!optimize
5759 && !called_as_built_in (fndecl)
5760 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5761 && fcode != BUILT_IN_ALLOCA
5762 && fcode != BUILT_IN_FREE)
5763 return expand_call (exp, target, ignore);
5764
5765 /* The built-in function expanders test for target == const0_rtx
5766 to determine whether the function's result will be ignored. */
5767 if (ignore)
5768 target = const0_rtx;
5769
5770 /* If the result of a pure or const built-in function is ignored, and
5771 none of its arguments are volatile, we can avoid expanding the
5772 built-in call and just evaluate the arguments for side-effects. */
5773 if (target == const0_rtx
5774 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5775 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5776 {
5777 bool volatilep = false;
5778 tree arg;
5779 call_expr_arg_iterator iter;
5780
5781 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5782 if (TREE_THIS_VOLATILE (arg))
5783 {
5784 volatilep = true;
5785 break;
5786 }
5787
5788 if (! volatilep)
5789 {
5790 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5791 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5792 return const0_rtx;
5793 }
5794 }
5795
5796 switch (fcode)
5797 {
5798 CASE_FLT_FN (BUILT_IN_FABS):
5799 target = expand_builtin_fabs (exp, target, subtarget);
5800 if (target)
5801 return target;
5802 break;
5803
5804 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5805 target = expand_builtin_copysign (exp, target, subtarget);
5806 if (target)
5807 return target;
5808 break;
5809
5810 /* Just do a normal library call if we were unable to fold
5811 the values. */
5812 CASE_FLT_FN (BUILT_IN_CABS):
5813 break;
5814
5815 CASE_FLT_FN (BUILT_IN_EXP):
5816 CASE_FLT_FN (BUILT_IN_EXP10):
5817 CASE_FLT_FN (BUILT_IN_POW10):
5818 CASE_FLT_FN (BUILT_IN_EXP2):
5819 CASE_FLT_FN (BUILT_IN_EXPM1):
5820 CASE_FLT_FN (BUILT_IN_LOGB):
5821 CASE_FLT_FN (BUILT_IN_LOG):
5822 CASE_FLT_FN (BUILT_IN_LOG10):
5823 CASE_FLT_FN (BUILT_IN_LOG2):
5824 CASE_FLT_FN (BUILT_IN_LOG1P):
5825 CASE_FLT_FN (BUILT_IN_TAN):
5826 CASE_FLT_FN (BUILT_IN_ASIN):
5827 CASE_FLT_FN (BUILT_IN_ACOS):
5828 CASE_FLT_FN (BUILT_IN_ATAN):
5829 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5830 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5831 because of possible accuracy problems. */
5832 if (! flag_unsafe_math_optimizations)
5833 break;
5834 CASE_FLT_FN (BUILT_IN_SQRT):
5835 CASE_FLT_FN (BUILT_IN_FLOOR):
5836 CASE_FLT_FN (BUILT_IN_CEIL):
5837 CASE_FLT_FN (BUILT_IN_TRUNC):
5838 CASE_FLT_FN (BUILT_IN_ROUND):
5839 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5840 CASE_FLT_FN (BUILT_IN_RINT):
5841 target = expand_builtin_mathfn (exp, target, subtarget);
5842 if (target)
5843 return target;
5844 break;
5845
5846 CASE_FLT_FN (BUILT_IN_ILOGB):
5847 if (! flag_unsafe_math_optimizations)
5848 break;
5849 CASE_FLT_FN (BUILT_IN_ISINF):
5850 CASE_FLT_FN (BUILT_IN_FINITE):
5851 case BUILT_IN_ISFINITE:
5852 case BUILT_IN_ISNORMAL:
5853 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
5854 if (target)
5855 return target;
5856 break;
5857
5858 CASE_FLT_FN (BUILT_IN_LCEIL):
5859 CASE_FLT_FN (BUILT_IN_LLCEIL):
5860 CASE_FLT_FN (BUILT_IN_LFLOOR):
5861 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5862 target = expand_builtin_int_roundingfn (exp, target);
5863 if (target)
5864 return target;
5865 break;
5866
5867 CASE_FLT_FN (BUILT_IN_LRINT):
5868 CASE_FLT_FN (BUILT_IN_LLRINT):
5869 CASE_FLT_FN (BUILT_IN_LROUND):
5870 CASE_FLT_FN (BUILT_IN_LLROUND):
5871 target = expand_builtin_int_roundingfn_2 (exp, target);
5872 if (target)
5873 return target;
5874 break;
5875
5876 CASE_FLT_FN (BUILT_IN_POW):
5877 target = expand_builtin_pow (exp, target, subtarget);
5878 if (target)
5879 return target;
5880 break;
5881
5882 CASE_FLT_FN (BUILT_IN_POWI):
5883 target = expand_builtin_powi (exp, target, subtarget);
5884 if (target)
5885 return target;
5886 break;
5887
5888 CASE_FLT_FN (BUILT_IN_ATAN2):
5889 CASE_FLT_FN (BUILT_IN_LDEXP):
5890 CASE_FLT_FN (BUILT_IN_SCALB):
5891 CASE_FLT_FN (BUILT_IN_SCALBN):
5892 CASE_FLT_FN (BUILT_IN_SCALBLN):
5893 if (! flag_unsafe_math_optimizations)
5894 break;
5895
5896 CASE_FLT_FN (BUILT_IN_FMOD):
5897 CASE_FLT_FN (BUILT_IN_REMAINDER):
5898 CASE_FLT_FN (BUILT_IN_DREM):
5899 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5900 if (target)
5901 return target;
5902 break;
5903
5904 CASE_FLT_FN (BUILT_IN_CEXPI):
5905 target = expand_builtin_cexpi (exp, target, subtarget);
5906 gcc_assert (target);
5907 return target;
5908
5909 CASE_FLT_FN (BUILT_IN_SIN):
5910 CASE_FLT_FN (BUILT_IN_COS):
5911 if (! flag_unsafe_math_optimizations)
5912 break;
5913 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5914 if (target)
5915 return target;
5916 break;
5917
5918 CASE_FLT_FN (BUILT_IN_SINCOS):
5919 if (! flag_unsafe_math_optimizations)
5920 break;
5921 target = expand_builtin_sincos (exp);
5922 if (target)
5923 return target;
5924 break;
5925
5926 case BUILT_IN_APPLY_ARGS:
5927 return expand_builtin_apply_args ();
5928
5929 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5930 FUNCTION with a copy of the parameters described by
5931 ARGUMENTS, and ARGSIZE. It returns a block of memory
5932 allocated on the stack into which is stored all the registers
5933 that might possibly be used for returning the result of a
5934 function. ARGUMENTS is the value returned by
5935 __builtin_apply_args. ARGSIZE is the number of bytes of
5936 arguments that must be copied. ??? How should this value be
5937 computed? We'll also need a safe worst case value for varargs
5938 functions. */
5939 case BUILT_IN_APPLY:
5940 if (!validate_arglist (exp, POINTER_TYPE,
5941 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5942 && !validate_arglist (exp, REFERENCE_TYPE,
5943 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5944 return const0_rtx;
5945 else
5946 {
5947 rtx ops[3];
5948
5949 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5950 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5951 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5952
5953 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5954 }
5955
5956 /* __builtin_return (RESULT) causes the function to return the
5957 value described by RESULT. RESULT is address of the block of
5958 memory returned by __builtin_apply. */
5959 case BUILT_IN_RETURN:
5960 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5961 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5962 return const0_rtx;
5963
5964 case BUILT_IN_SAVEREGS:
5965 return expand_builtin_saveregs ();
5966
5967 case BUILT_IN_VA_ARG_PACK:
5968 /* All valid uses of __builtin_va_arg_pack () are removed during
5969 inlining. */
5970 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5971 return const0_rtx;
5972
5973 case BUILT_IN_VA_ARG_PACK_LEN:
5974 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5975 inlining. */
5976 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5977 return const0_rtx;
5978
5979 /* Return the address of the first anonymous stack arg. */
5980 case BUILT_IN_NEXT_ARG:
5981 if (fold_builtin_next_arg (exp, false))
5982 return const0_rtx;
5983 return expand_builtin_next_arg ();
5984
5985 case BUILT_IN_CLEAR_CACHE:
5986 target = expand_builtin___clear_cache (exp);
5987 if (target)
5988 return target;
5989 break;
5990
5991 case BUILT_IN_CLASSIFY_TYPE:
5992 return expand_builtin_classify_type (exp);
5993
5994 case BUILT_IN_CONSTANT_P:
5995 return const0_rtx;
5996
5997 case BUILT_IN_FRAME_ADDRESS:
5998 case BUILT_IN_RETURN_ADDRESS:
5999 return expand_builtin_frame_address (fndecl, exp);
6000
6001 /* Returns the address of the area where the structure is returned.
6002 0 otherwise. */
6003 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6004 if (call_expr_nargs (exp) != 0
6005 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6006 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6007 return const0_rtx;
6008 else
6009 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6010
6011 case BUILT_IN_ALLOCA:
6012 target = expand_builtin_alloca (exp, target);
6013 if (target)
6014 return target;
6015 break;
6016
6017 case BUILT_IN_STACK_SAVE:
6018 return expand_stack_save ();
6019
6020 case BUILT_IN_STACK_RESTORE:
6021 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6022 return const0_rtx;
6023
6024 case BUILT_IN_BSWAP32:
6025 case BUILT_IN_BSWAP64:
6026 target = expand_builtin_bswap (exp, target, subtarget);
6027
6028 if (target)
6029 return target;
6030 break;
6031
6032 CASE_INT_FN (BUILT_IN_FFS):
6033 case BUILT_IN_FFSIMAX:
6034 target = expand_builtin_unop (target_mode, exp, target,
6035 subtarget, ffs_optab);
6036 if (target)
6037 return target;
6038 break;
6039
6040 CASE_INT_FN (BUILT_IN_CLZ):
6041 case BUILT_IN_CLZIMAX:
6042 target = expand_builtin_unop (target_mode, exp, target,
6043 subtarget, clz_optab);
6044 if (target)
6045 return target;
6046 break;
6047
6048 CASE_INT_FN (BUILT_IN_CTZ):
6049 case BUILT_IN_CTZIMAX:
6050 target = expand_builtin_unop (target_mode, exp, target,
6051 subtarget, ctz_optab);
6052 if (target)
6053 return target;
6054 break;
6055
6056 CASE_INT_FN (BUILT_IN_POPCOUNT):
6057 case BUILT_IN_POPCOUNTIMAX:
6058 target = expand_builtin_unop (target_mode, exp, target,
6059 subtarget, popcount_optab);
6060 if (target)
6061 return target;
6062 break;
6063
6064 CASE_INT_FN (BUILT_IN_PARITY):
6065 case BUILT_IN_PARITYIMAX:
6066 target = expand_builtin_unop (target_mode, exp, target,
6067 subtarget, parity_optab);
6068 if (target)
6069 return target;
6070 break;
6071
6072 case BUILT_IN_STRLEN:
6073 target = expand_builtin_strlen (exp, target, target_mode);
6074 if (target)
6075 return target;
6076 break;
6077
6078 case BUILT_IN_STRCPY:
6079 target = expand_builtin_strcpy (exp, target);
6080 if (target)
6081 return target;
6082 break;
6083
6084 case BUILT_IN_STRNCPY:
6085 target = expand_builtin_strncpy (exp, target);
6086 if (target)
6087 return target;
6088 break;
6089
6090 case BUILT_IN_STPCPY:
6091 target = expand_builtin_stpcpy (exp, target, mode);
6092 if (target)
6093 return target;
6094 break;
6095
6096 case BUILT_IN_MEMCPY:
6097 target = expand_builtin_memcpy (exp, target);
6098 if (target)
6099 return target;
6100 break;
6101
6102 case BUILT_IN_MEMPCPY:
6103 target = expand_builtin_mempcpy (exp, target, mode);
6104 if (target)
6105 return target;
6106 break;
6107
6108 case BUILT_IN_MEMSET:
6109 target = expand_builtin_memset (exp, target, mode);
6110 if (target)
6111 return target;
6112 break;
6113
6114 case BUILT_IN_BZERO:
6115 target = expand_builtin_bzero (exp);
6116 if (target)
6117 return target;
6118 break;
6119
6120 case BUILT_IN_STRCMP:
6121 target = expand_builtin_strcmp (exp, target);
6122 if (target)
6123 return target;
6124 break;
6125
6126 case BUILT_IN_STRNCMP:
6127 target = expand_builtin_strncmp (exp, target, mode);
6128 if (target)
6129 return target;
6130 break;
6131
6132 case BUILT_IN_BCMP:
6133 case BUILT_IN_MEMCMP:
6134 target = expand_builtin_memcmp (exp, target, mode);
6135 if (target)
6136 return target;
6137 break;
6138
6139 case BUILT_IN_SETJMP:
6140 /* This should have been lowered to the builtins below. */
6141 gcc_unreachable ();
6142
6143 case BUILT_IN_SETJMP_SETUP:
6144 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6145 and the receiver label. */
6146 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6147 {
6148 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6149 VOIDmode, EXPAND_NORMAL);
6150 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6151 rtx label_r = label_rtx (label);
6152
6153 /* This is copied from the handling of non-local gotos. */
6154 expand_builtin_setjmp_setup (buf_addr, label_r);
6155 nonlocal_goto_handler_labels
6156 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6157 nonlocal_goto_handler_labels);
6158 /* ??? Do not let expand_label treat us as such since we would
6159 not want to be both on the list of non-local labels and on
6160 the list of forced labels. */
6161 FORCED_LABEL (label) = 0;
6162 return const0_rtx;
6163 }
6164 break;
6165
6166 case BUILT_IN_SETJMP_DISPATCHER:
6167 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6168 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6169 {
6170 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6171 rtx label_r = label_rtx (label);
6172
6173 /* Remove the dispatcher label from the list of non-local labels
6174 since the receiver labels have been added to it above. */
6175 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6176 return const0_rtx;
6177 }
6178 break;
6179
6180 case BUILT_IN_SETJMP_RECEIVER:
6181 /* __builtin_setjmp_receiver is passed the receiver label. */
6182 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6183 {
6184 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6185 rtx label_r = label_rtx (label);
6186
6187 expand_builtin_setjmp_receiver (label_r);
6188 return const0_rtx;
6189 }
6190 break;
6191
6192 /* __builtin_longjmp is passed a pointer to an array of five words.
6193 It's similar to the C library longjmp function but works with
6194 __builtin_setjmp above. */
6195 case BUILT_IN_LONGJMP:
6196 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6197 {
6198 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6199 VOIDmode, EXPAND_NORMAL);
6200 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6201
6202 if (value != const1_rtx)
6203 {
6204 error ("%<__builtin_longjmp%> second argument must be 1");
6205 return const0_rtx;
6206 }
6207
6208 expand_builtin_longjmp (buf_addr, value);
6209 return const0_rtx;
6210 }
6211 break;
6212
6213 case BUILT_IN_NONLOCAL_GOTO:
6214 target = expand_builtin_nonlocal_goto (exp);
6215 if (target)
6216 return target;
6217 break;
6218
6219 /* This updates the setjmp buffer that is its argument with the value
6220 of the current stack pointer. */
6221 case BUILT_IN_UPDATE_SETJMP_BUF:
6222 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6223 {
6224 rtx buf_addr
6225 = expand_normal (CALL_EXPR_ARG (exp, 0));
6226
6227 expand_builtin_update_setjmp_buf (buf_addr);
6228 return const0_rtx;
6229 }
6230 break;
6231
6232 case BUILT_IN_TRAP:
6233 expand_builtin_trap ();
6234 return const0_rtx;
6235
6236 case BUILT_IN_UNREACHABLE:
6237 expand_builtin_unreachable ();
6238 return const0_rtx;
6239
6240 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6241 case BUILT_IN_SIGNBITD32:
6242 case BUILT_IN_SIGNBITD64:
6243 case BUILT_IN_SIGNBITD128:
6244 target = expand_builtin_signbit (exp, target);
6245 if (target)
6246 return target;
6247 break;
6248
6249 /* Various hooks for the DWARF 2 __throw routine. */
6250 case BUILT_IN_UNWIND_INIT:
6251 expand_builtin_unwind_init ();
6252 return const0_rtx;
6253 case BUILT_IN_DWARF_CFA:
6254 return virtual_cfa_rtx;
6255 #ifdef DWARF2_UNWIND_INFO
6256 case BUILT_IN_DWARF_SP_COLUMN:
6257 return expand_builtin_dwarf_sp_column ();
6258 case BUILT_IN_INIT_DWARF_REG_SIZES:
6259 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6260 return const0_rtx;
6261 #endif
6262 case BUILT_IN_FROB_RETURN_ADDR:
6263 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6264 case BUILT_IN_EXTRACT_RETURN_ADDR:
6265 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6266 case BUILT_IN_EH_RETURN:
6267 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6268 CALL_EXPR_ARG (exp, 1));
6269 return const0_rtx;
6270 #ifdef EH_RETURN_DATA_REGNO
6271 case BUILT_IN_EH_RETURN_DATA_REGNO:
6272 return expand_builtin_eh_return_data_regno (exp);
6273 #endif
6274 case BUILT_IN_EXTEND_POINTER:
6275 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6276 case BUILT_IN_EH_POINTER:
6277 return expand_builtin_eh_pointer (exp);
6278 case BUILT_IN_EH_FILTER:
6279 return expand_builtin_eh_filter (exp);
6280 case BUILT_IN_EH_COPY_VALUES:
6281 return expand_builtin_eh_copy_values (exp);
6282
6283 case BUILT_IN_VA_START:
6284 return expand_builtin_va_start (exp);
6285 case BUILT_IN_VA_END:
6286 return expand_builtin_va_end (exp);
6287 case BUILT_IN_VA_COPY:
6288 return expand_builtin_va_copy (exp);
6289 case BUILT_IN_EXPECT:
6290 return expand_builtin_expect (exp, target);
6291 case BUILT_IN_PREFETCH:
6292 expand_builtin_prefetch (exp);
6293 return const0_rtx;
6294
6295 case BUILT_IN_PROFILE_FUNC_ENTER:
6296 return expand_builtin_profile_func (false);
6297 case BUILT_IN_PROFILE_FUNC_EXIT:
6298 return expand_builtin_profile_func (true);
6299
6300 case BUILT_IN_INIT_TRAMPOLINE:
6301 return expand_builtin_init_trampoline (exp);
6302 case BUILT_IN_ADJUST_TRAMPOLINE:
6303 return expand_builtin_adjust_trampoline (exp);
6304
6305 case BUILT_IN_FORK:
6306 case BUILT_IN_EXECL:
6307 case BUILT_IN_EXECV:
6308 case BUILT_IN_EXECLP:
6309 case BUILT_IN_EXECLE:
6310 case BUILT_IN_EXECVP:
6311 case BUILT_IN_EXECVE:
6312 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6313 if (target)
6314 return target;
6315 break;
6316
6317 case BUILT_IN_FETCH_AND_ADD_1:
6318 case BUILT_IN_FETCH_AND_ADD_2:
6319 case BUILT_IN_FETCH_AND_ADD_4:
6320 case BUILT_IN_FETCH_AND_ADD_8:
6321 case BUILT_IN_FETCH_AND_ADD_16:
6322 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6323 target = expand_builtin_sync_operation (mode, exp, PLUS,
6324 false, target, ignore);
6325 if (target)
6326 return target;
6327 break;
6328
6329 case BUILT_IN_FETCH_AND_SUB_1:
6330 case BUILT_IN_FETCH_AND_SUB_2:
6331 case BUILT_IN_FETCH_AND_SUB_4:
6332 case BUILT_IN_FETCH_AND_SUB_8:
6333 case BUILT_IN_FETCH_AND_SUB_16:
6334 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6335 target = expand_builtin_sync_operation (mode, exp, MINUS,
6336 false, target, ignore);
6337 if (target)
6338 return target;
6339 break;
6340
6341 case BUILT_IN_FETCH_AND_OR_1:
6342 case BUILT_IN_FETCH_AND_OR_2:
6343 case BUILT_IN_FETCH_AND_OR_4:
6344 case BUILT_IN_FETCH_AND_OR_8:
6345 case BUILT_IN_FETCH_AND_OR_16:
6346 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6347 target = expand_builtin_sync_operation (mode, exp, IOR,
6348 false, target, ignore);
6349 if (target)
6350 return target;
6351 break;
6352
6353 case BUILT_IN_FETCH_AND_AND_1:
6354 case BUILT_IN_FETCH_AND_AND_2:
6355 case BUILT_IN_FETCH_AND_AND_4:
6356 case BUILT_IN_FETCH_AND_AND_8:
6357 case BUILT_IN_FETCH_AND_AND_16:
6358 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6359 target = expand_builtin_sync_operation (mode, exp, AND,
6360 false, target, ignore);
6361 if (target)
6362 return target;
6363 break;
6364
6365 case BUILT_IN_FETCH_AND_XOR_1:
6366 case BUILT_IN_FETCH_AND_XOR_2:
6367 case BUILT_IN_FETCH_AND_XOR_4:
6368 case BUILT_IN_FETCH_AND_XOR_8:
6369 case BUILT_IN_FETCH_AND_XOR_16:
6370 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6371 target = expand_builtin_sync_operation (mode, exp, XOR,
6372 false, target, ignore);
6373 if (target)
6374 return target;
6375 break;
6376
6377 case BUILT_IN_FETCH_AND_NAND_1:
6378 case BUILT_IN_FETCH_AND_NAND_2:
6379 case BUILT_IN_FETCH_AND_NAND_4:
6380 case BUILT_IN_FETCH_AND_NAND_8:
6381 case BUILT_IN_FETCH_AND_NAND_16:
6382 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6383 target = expand_builtin_sync_operation (mode, exp, NOT,
6384 false, target, ignore);
6385 if (target)
6386 return target;
6387 break;
6388
6389 case BUILT_IN_ADD_AND_FETCH_1:
6390 case BUILT_IN_ADD_AND_FETCH_2:
6391 case BUILT_IN_ADD_AND_FETCH_4:
6392 case BUILT_IN_ADD_AND_FETCH_8:
6393 case BUILT_IN_ADD_AND_FETCH_16:
6394 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6395 target = expand_builtin_sync_operation (mode, exp, PLUS,
6396 true, target, ignore);
6397 if (target)
6398 return target;
6399 break;
6400
6401 case BUILT_IN_SUB_AND_FETCH_1:
6402 case BUILT_IN_SUB_AND_FETCH_2:
6403 case BUILT_IN_SUB_AND_FETCH_4:
6404 case BUILT_IN_SUB_AND_FETCH_8:
6405 case BUILT_IN_SUB_AND_FETCH_16:
6406 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6407 target = expand_builtin_sync_operation (mode, exp, MINUS,
6408 true, target, ignore);
6409 if (target)
6410 return target;
6411 break;
6412
6413 case BUILT_IN_OR_AND_FETCH_1:
6414 case BUILT_IN_OR_AND_FETCH_2:
6415 case BUILT_IN_OR_AND_FETCH_4:
6416 case BUILT_IN_OR_AND_FETCH_8:
6417 case BUILT_IN_OR_AND_FETCH_16:
6418 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6419 target = expand_builtin_sync_operation (mode, exp, IOR,
6420 true, target, ignore);
6421 if (target)
6422 return target;
6423 break;
6424
6425 case BUILT_IN_AND_AND_FETCH_1:
6426 case BUILT_IN_AND_AND_FETCH_2:
6427 case BUILT_IN_AND_AND_FETCH_4:
6428 case BUILT_IN_AND_AND_FETCH_8:
6429 case BUILT_IN_AND_AND_FETCH_16:
6430 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6431 target = expand_builtin_sync_operation (mode, exp, AND,
6432 true, target, ignore);
6433 if (target)
6434 return target;
6435 break;
6436
6437 case BUILT_IN_XOR_AND_FETCH_1:
6438 case BUILT_IN_XOR_AND_FETCH_2:
6439 case BUILT_IN_XOR_AND_FETCH_4:
6440 case BUILT_IN_XOR_AND_FETCH_8:
6441 case BUILT_IN_XOR_AND_FETCH_16:
6442 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6443 target = expand_builtin_sync_operation (mode, exp, XOR,
6444 true, target, ignore);
6445 if (target)
6446 return target;
6447 break;
6448
6449 case BUILT_IN_NAND_AND_FETCH_1:
6450 case BUILT_IN_NAND_AND_FETCH_2:
6451 case BUILT_IN_NAND_AND_FETCH_4:
6452 case BUILT_IN_NAND_AND_FETCH_8:
6453 case BUILT_IN_NAND_AND_FETCH_16:
6454 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6455 target = expand_builtin_sync_operation (mode, exp, NOT,
6456 true, target, ignore);
6457 if (target)
6458 return target;
6459 break;
6460
6461 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6462 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6463 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6464 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6465 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6466 if (mode == VOIDmode)
6467 mode = TYPE_MODE (boolean_type_node);
6468 if (!target || !register_operand (target, mode))
6469 target = gen_reg_rtx (mode);
6470
6471 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6472 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6473 if (target)
6474 return target;
6475 break;
6476
6477 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6478 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6479 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6480 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6481 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6482 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6483 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6484 if (target)
6485 return target;
6486 break;
6487
6488 case BUILT_IN_LOCK_TEST_AND_SET_1:
6489 case BUILT_IN_LOCK_TEST_AND_SET_2:
6490 case BUILT_IN_LOCK_TEST_AND_SET_4:
6491 case BUILT_IN_LOCK_TEST_AND_SET_8:
6492 case BUILT_IN_LOCK_TEST_AND_SET_16:
6493 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6494 target = expand_builtin_lock_test_and_set (mode, exp, target);
6495 if (target)
6496 return target;
6497 break;
6498
6499 case BUILT_IN_LOCK_RELEASE_1:
6500 case BUILT_IN_LOCK_RELEASE_2:
6501 case BUILT_IN_LOCK_RELEASE_4:
6502 case BUILT_IN_LOCK_RELEASE_8:
6503 case BUILT_IN_LOCK_RELEASE_16:
6504 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6505 expand_builtin_lock_release (mode, exp);
6506 return const0_rtx;
6507
6508 case BUILT_IN_SYNCHRONIZE:
6509 expand_builtin_synchronize ();
6510 return const0_rtx;
6511
6512 case BUILT_IN_OBJECT_SIZE:
6513 return expand_builtin_object_size (exp);
6514
6515 case BUILT_IN_MEMCPY_CHK:
6516 case BUILT_IN_MEMPCPY_CHK:
6517 case BUILT_IN_MEMMOVE_CHK:
6518 case BUILT_IN_MEMSET_CHK:
6519 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6520 if (target)
6521 return target;
6522 break;
6523
6524 case BUILT_IN_STRCPY_CHK:
6525 case BUILT_IN_STPCPY_CHK:
6526 case BUILT_IN_STRNCPY_CHK:
6527 case BUILT_IN_STRCAT_CHK:
6528 case BUILT_IN_STRNCAT_CHK:
6529 case BUILT_IN_SNPRINTF_CHK:
6530 case BUILT_IN_VSNPRINTF_CHK:
6531 maybe_emit_chk_warning (exp, fcode);
6532 break;
6533
6534 case BUILT_IN_SPRINTF_CHK:
6535 case BUILT_IN_VSPRINTF_CHK:
6536 maybe_emit_sprintf_chk_warning (exp, fcode);
6537 break;
6538
6539 case BUILT_IN_FREE:
6540 maybe_emit_free_warning (exp);
6541 break;
6542
6543 default: /* just do library call, if unknown builtin */
6544 break;
6545 }
6546
6547 /* The switch statement above can drop through to cause the function
6548 to be called normally. */
6549 return expand_call (exp, target, ignore);
6550 }
6551
6552 /* Determine whether a tree node represents a call to a built-in
6553 function. If the tree T is a call to a built-in function with
6554 the right number of arguments of the appropriate types, return
6555 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6556 Otherwise the return value is END_BUILTINS. */
6557
6558 enum built_in_function
6559 builtin_mathfn_code (const_tree t)
6560 {
6561 const_tree fndecl, arg, parmlist;
6562 const_tree argtype, parmtype;
6563 const_call_expr_arg_iterator iter;
6564
6565 if (TREE_CODE (t) != CALL_EXPR
6566 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6567 return END_BUILTINS;
6568
6569 fndecl = get_callee_fndecl (t);
6570 if (fndecl == NULL_TREE
6571 || TREE_CODE (fndecl) != FUNCTION_DECL
6572 || ! DECL_BUILT_IN (fndecl)
6573 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6574 return END_BUILTINS;
6575
6576 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6577 init_const_call_expr_arg_iterator (t, &iter);
6578 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6579 {
6580 /* If a function doesn't take a variable number of arguments,
6581 the last element in the list will have type `void'. */
6582 parmtype = TREE_VALUE (parmlist);
6583 if (VOID_TYPE_P (parmtype))
6584 {
6585 if (more_const_call_expr_args_p (&iter))
6586 return END_BUILTINS;
6587 return DECL_FUNCTION_CODE (fndecl);
6588 }
6589
6590 if (! more_const_call_expr_args_p (&iter))
6591 return END_BUILTINS;
6592
6593 arg = next_const_call_expr_arg (&iter);
6594 argtype = TREE_TYPE (arg);
6595
6596 if (SCALAR_FLOAT_TYPE_P (parmtype))
6597 {
6598 if (! SCALAR_FLOAT_TYPE_P (argtype))
6599 return END_BUILTINS;
6600 }
6601 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6602 {
6603 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6604 return END_BUILTINS;
6605 }
6606 else if (POINTER_TYPE_P (parmtype))
6607 {
6608 if (! POINTER_TYPE_P (argtype))
6609 return END_BUILTINS;
6610 }
6611 else if (INTEGRAL_TYPE_P (parmtype))
6612 {
6613 if (! INTEGRAL_TYPE_P (argtype))
6614 return END_BUILTINS;
6615 }
6616 else
6617 return END_BUILTINS;
6618 }
6619
6620 /* Variable-length argument list. */
6621 return DECL_FUNCTION_CODE (fndecl);
6622 }
6623
6624 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6625 evaluate to a constant. */
6626
6627 static tree
6628 fold_builtin_constant_p (tree arg)
6629 {
6630 /* We return 1 for a numeric type that's known to be a constant
6631 value at compile-time or for an aggregate type that's a
6632 literal constant. */
6633 STRIP_NOPS (arg);
6634
6635 /* If we know this is a constant, emit the constant of one. */
6636 if (CONSTANT_CLASS_P (arg)
6637 || (TREE_CODE (arg) == CONSTRUCTOR
6638 && TREE_CONSTANT (arg)))
6639 return integer_one_node;
6640 if (TREE_CODE (arg) == ADDR_EXPR)
6641 {
6642 tree op = TREE_OPERAND (arg, 0);
6643 if (TREE_CODE (op) == STRING_CST
6644 || (TREE_CODE (op) == ARRAY_REF
6645 && integer_zerop (TREE_OPERAND (op, 1))
6646 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6647 return integer_one_node;
6648 }
6649
6650 /* If this expression has side effects, show we don't know it to be a
6651 constant. Likewise if it's a pointer or aggregate type since in
6652 those case we only want literals, since those are only optimized
6653 when generating RTL, not later.
6654 And finally, if we are compiling an initializer, not code, we
6655 need to return a definite result now; there's not going to be any
6656 more optimization done. */
6657 if (TREE_SIDE_EFFECTS (arg)
6658 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6659 || POINTER_TYPE_P (TREE_TYPE (arg))
6660 || cfun == 0
6661 || folding_initializer)
6662 return integer_zero_node;
6663
6664 return NULL_TREE;
6665 }
6666
6667 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6668 return it as a truthvalue. */
6669
6670 static tree
6671 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6672 {
6673 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6674
6675 fn = built_in_decls[BUILT_IN_EXPECT];
6676 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6677 ret_type = TREE_TYPE (TREE_TYPE (fn));
6678 pred_type = TREE_VALUE (arg_types);
6679 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6680
6681 pred = fold_convert_loc (loc, pred_type, pred);
6682 expected = fold_convert_loc (loc, expected_type, expected);
6683 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6684
6685 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6686 build_int_cst (ret_type, 0));
6687 }
6688
6689 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6690 NULL_TREE if no simplification is possible. */
6691
6692 static tree
6693 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6694 {
6695 tree inner, fndecl;
6696 enum tree_code code;
6697
6698 /* If this is a builtin_expect within a builtin_expect keep the
6699 inner one. See through a comparison against a constant. It
6700 might have been added to create a thruthvalue. */
6701 inner = arg0;
6702 if (COMPARISON_CLASS_P (inner)
6703 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6704 inner = TREE_OPERAND (inner, 0);
6705
6706 if (TREE_CODE (inner) == CALL_EXPR
6707 && (fndecl = get_callee_fndecl (inner))
6708 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6709 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6710 return arg0;
6711
6712 /* Distribute the expected value over short-circuiting operators.
6713 See through the cast from truthvalue_type_node to long. */
6714 inner = arg0;
6715 while (TREE_CODE (inner) == NOP_EXPR
6716 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6717 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6718 inner = TREE_OPERAND (inner, 0);
6719
6720 code = TREE_CODE (inner);
6721 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6722 {
6723 tree op0 = TREE_OPERAND (inner, 0);
6724 tree op1 = TREE_OPERAND (inner, 1);
6725
6726 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6727 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6728 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6729
6730 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6731 }
6732
6733 /* If the argument isn't invariant then there's nothing else we can do. */
6734 if (!TREE_CONSTANT (arg0))
6735 return NULL_TREE;
6736
6737 /* If we expect that a comparison against the argument will fold to
6738 a constant return the constant. In practice, this means a true
6739 constant or the address of a non-weak symbol. */
6740 inner = arg0;
6741 STRIP_NOPS (inner);
6742 if (TREE_CODE (inner) == ADDR_EXPR)
6743 {
6744 do
6745 {
6746 inner = TREE_OPERAND (inner, 0);
6747 }
6748 while (TREE_CODE (inner) == COMPONENT_REF
6749 || TREE_CODE (inner) == ARRAY_REF);
6750 if ((TREE_CODE (inner) == VAR_DECL
6751 || TREE_CODE (inner) == FUNCTION_DECL)
6752 && DECL_WEAK (inner))
6753 return NULL_TREE;
6754 }
6755
6756 /* Otherwise, ARG0 already has the proper type for the return value. */
6757 return arg0;
6758 }
6759
6760 /* Fold a call to __builtin_classify_type with argument ARG. */
6761
6762 static tree
6763 fold_builtin_classify_type (tree arg)
6764 {
6765 if (arg == 0)
6766 return build_int_cst (NULL_TREE, no_type_class);
6767
6768 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6769 }
6770
6771 /* Fold a call to __builtin_strlen with argument ARG. */
6772
6773 static tree
6774 fold_builtin_strlen (location_t loc, tree type, tree arg)
6775 {
6776 if (!validate_arg (arg, POINTER_TYPE))
6777 return NULL_TREE;
6778 else
6779 {
6780 tree len = c_strlen (arg, 0);
6781
6782 if (len)
6783 return fold_convert_loc (loc, type, len);
6784
6785 return NULL_TREE;
6786 }
6787 }
6788
6789 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6790
6791 static tree
6792 fold_builtin_inf (location_t loc, tree type, int warn)
6793 {
6794 REAL_VALUE_TYPE real;
6795
6796 /* __builtin_inff is intended to be usable to define INFINITY on all
6797 targets. If an infinity is not available, INFINITY expands "to a
6798 positive constant of type float that overflows at translation
6799 time", footnote "In this case, using INFINITY will violate the
6800 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6801 Thus we pedwarn to ensure this constraint violation is
6802 diagnosed. */
6803 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6804 pedwarn (loc, 0, "target format does not support infinity");
6805
6806 real_inf (&real);
6807 return build_real (type, real);
6808 }
6809
6810 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6811
6812 static tree
6813 fold_builtin_nan (tree arg, tree type, int quiet)
6814 {
6815 REAL_VALUE_TYPE real;
6816 const char *str;
6817
6818 if (!validate_arg (arg, POINTER_TYPE))
6819 return NULL_TREE;
6820 str = c_getstr (arg);
6821 if (!str)
6822 return NULL_TREE;
6823
6824 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6825 return NULL_TREE;
6826
6827 return build_real (type, real);
6828 }
6829
6830 /* Return true if the floating point expression T has an integer value.
6831 We also allow +Inf, -Inf and NaN to be considered integer values. */
6832
6833 static bool
6834 integer_valued_real_p (tree t)
6835 {
6836 switch (TREE_CODE (t))
6837 {
6838 case FLOAT_EXPR:
6839 return true;
6840
6841 case ABS_EXPR:
6842 case SAVE_EXPR:
6843 return integer_valued_real_p (TREE_OPERAND (t, 0));
6844
6845 case COMPOUND_EXPR:
6846 case MODIFY_EXPR:
6847 case BIND_EXPR:
6848 return integer_valued_real_p (TREE_OPERAND (t, 1));
6849
6850 case PLUS_EXPR:
6851 case MINUS_EXPR:
6852 case MULT_EXPR:
6853 case MIN_EXPR:
6854 case MAX_EXPR:
6855 return integer_valued_real_p (TREE_OPERAND (t, 0))
6856 && integer_valued_real_p (TREE_OPERAND (t, 1));
6857
6858 case COND_EXPR:
6859 return integer_valued_real_p (TREE_OPERAND (t, 1))
6860 && integer_valued_real_p (TREE_OPERAND (t, 2));
6861
6862 case REAL_CST:
6863 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6864
6865 case NOP_EXPR:
6866 {
6867 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6868 if (TREE_CODE (type) == INTEGER_TYPE)
6869 return true;
6870 if (TREE_CODE (type) == REAL_TYPE)
6871 return integer_valued_real_p (TREE_OPERAND (t, 0));
6872 break;
6873 }
6874
6875 case CALL_EXPR:
6876 switch (builtin_mathfn_code (t))
6877 {
6878 CASE_FLT_FN (BUILT_IN_CEIL):
6879 CASE_FLT_FN (BUILT_IN_FLOOR):
6880 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6881 CASE_FLT_FN (BUILT_IN_RINT):
6882 CASE_FLT_FN (BUILT_IN_ROUND):
6883 CASE_FLT_FN (BUILT_IN_TRUNC):
6884 return true;
6885
6886 CASE_FLT_FN (BUILT_IN_FMIN):
6887 CASE_FLT_FN (BUILT_IN_FMAX):
6888 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6889 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6890
6891 default:
6892 break;
6893 }
6894 break;
6895
6896 default:
6897 break;
6898 }
6899 return false;
6900 }
6901
6902 /* FNDECL is assumed to be a builtin where truncation can be propagated
6903 across (for instance floor((double)f) == (double)floorf (f).
6904 Do the transformation for a call with argument ARG. */
6905
6906 static tree
6907 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6908 {
6909 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6910
6911 if (!validate_arg (arg, REAL_TYPE))
6912 return NULL_TREE;
6913
6914 /* Integer rounding functions are idempotent. */
6915 if (fcode == builtin_mathfn_code (arg))
6916 return arg;
6917
6918 /* If argument is already integer valued, and we don't need to worry
6919 about setting errno, there's no need to perform rounding. */
6920 if (! flag_errno_math && integer_valued_real_p (arg))
6921 return arg;
6922
6923 if (optimize)
6924 {
6925 tree arg0 = strip_float_extensions (arg);
6926 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6927 tree newtype = TREE_TYPE (arg0);
6928 tree decl;
6929
6930 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6931 && (decl = mathfn_built_in (newtype, fcode)))
6932 return fold_convert_loc (loc, ftype,
6933 build_call_expr_loc (loc, decl, 1,
6934 fold_convert_loc (loc,
6935 newtype,
6936 arg0)));
6937 }
6938 return NULL_TREE;
6939 }
6940
6941 /* FNDECL is assumed to be builtin which can narrow the FP type of
6942 the argument, for instance lround((double)f) -> lroundf (f).
6943 Do the transformation for a call with argument ARG. */
6944
6945 static tree
6946 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6947 {
6948 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6949
6950 if (!validate_arg (arg, REAL_TYPE))
6951 return NULL_TREE;
6952
6953 /* If argument is already integer valued, and we don't need to worry
6954 about setting errno, there's no need to perform rounding. */
6955 if (! flag_errno_math && integer_valued_real_p (arg))
6956 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6957 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6958
6959 if (optimize)
6960 {
6961 tree ftype = TREE_TYPE (arg);
6962 tree arg0 = strip_float_extensions (arg);
6963 tree newtype = TREE_TYPE (arg0);
6964 tree decl;
6965
6966 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6967 && (decl = mathfn_built_in (newtype, fcode)))
6968 return build_call_expr_loc (loc, decl, 1,
6969 fold_convert_loc (loc, newtype, arg0));
6970 }
6971
6972 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6973 sizeof (long long) == sizeof (long). */
6974 if (TYPE_PRECISION (long_long_integer_type_node)
6975 == TYPE_PRECISION (long_integer_type_node))
6976 {
6977 tree newfn = NULL_TREE;
6978 switch (fcode)
6979 {
6980 CASE_FLT_FN (BUILT_IN_LLCEIL):
6981 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6982 break;
6983
6984 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6985 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6986 break;
6987
6988 CASE_FLT_FN (BUILT_IN_LLROUND):
6989 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6990 break;
6991
6992 CASE_FLT_FN (BUILT_IN_LLRINT):
6993 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6994 break;
6995
6996 default:
6997 break;
6998 }
6999
7000 if (newfn)
7001 {
7002 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7003 return fold_convert_loc (loc,
7004 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7005 }
7006 }
7007
7008 return NULL_TREE;
7009 }
7010
7011 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7012 return type. Return NULL_TREE if no simplification can be made. */
7013
7014 static tree
7015 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7016 {
7017 tree res;
7018
7019 if (!validate_arg (arg, COMPLEX_TYPE)
7020 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7021 return NULL_TREE;
7022
7023 /* Calculate the result when the argument is a constant. */
7024 if (TREE_CODE (arg) == COMPLEX_CST
7025 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7026 type, mpfr_hypot)))
7027 return res;
7028
7029 if (TREE_CODE (arg) == COMPLEX_EXPR)
7030 {
7031 tree real = TREE_OPERAND (arg, 0);
7032 tree imag = TREE_OPERAND (arg, 1);
7033
7034 /* If either part is zero, cabs is fabs of the other. */
7035 if (real_zerop (real))
7036 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7037 if (real_zerop (imag))
7038 return fold_build1_loc (loc, ABS_EXPR, type, real);
7039
7040 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7041 if (flag_unsafe_math_optimizations
7042 && operand_equal_p (real, imag, OEP_PURE_SAME))
7043 {
7044 const REAL_VALUE_TYPE sqrt2_trunc
7045 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7046 STRIP_NOPS (real);
7047 return fold_build2_loc (loc, MULT_EXPR, type,
7048 fold_build1_loc (loc, ABS_EXPR, type, real),
7049 build_real (type, sqrt2_trunc));
7050 }
7051 }
7052
7053 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7054 if (TREE_CODE (arg) == NEGATE_EXPR
7055 || TREE_CODE (arg) == CONJ_EXPR)
7056 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7057
7058 /* Don't do this when optimizing for size. */
7059 if (flag_unsafe_math_optimizations
7060 && optimize && optimize_function_for_speed_p (cfun))
7061 {
7062 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7063
7064 if (sqrtfn != NULL_TREE)
7065 {
7066 tree rpart, ipart, result;
7067
7068 arg = builtin_save_expr (arg);
7069
7070 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7071 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7072
7073 rpart = builtin_save_expr (rpart);
7074 ipart = builtin_save_expr (ipart);
7075
7076 result = fold_build2_loc (loc, PLUS_EXPR, type,
7077 fold_build2_loc (loc, MULT_EXPR, type,
7078 rpart, rpart),
7079 fold_build2_loc (loc, MULT_EXPR, type,
7080 ipart, ipart));
7081
7082 return build_call_expr_loc (loc, sqrtfn, 1, result);
7083 }
7084 }
7085
7086 return NULL_TREE;
7087 }
7088
7089 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7090 complex tree type of the result. If NEG is true, the imaginary
7091 zero is negative. */
7092
7093 static tree
7094 build_complex_cproj (tree type, bool neg)
7095 {
7096 REAL_VALUE_TYPE rinf, rzero = dconst0;
7097
7098 real_inf (&rinf);
7099 rzero.sign = neg;
7100 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7101 build_real (TREE_TYPE (type), rzero));
7102 }
7103
7104 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7105 return type. Return NULL_TREE if no simplification can be made. */
7106
7107 static tree
7108 fold_builtin_cproj (location_t loc, tree arg, tree type)
7109 {
7110 if (!validate_arg (arg, COMPLEX_TYPE)
7111 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7112 return NULL_TREE;
7113
7114 /* If there are no infinities, return arg. */
7115 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7116 return non_lvalue_loc (loc, arg);
7117
7118 /* Calculate the result when the argument is a constant. */
7119 if (TREE_CODE (arg) == COMPLEX_CST)
7120 {
7121 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7122 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7123
7124 if (real_isinf (real) || real_isinf (imag))
7125 return build_complex_cproj (type, imag->sign);
7126 else
7127 return arg;
7128 }
7129 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7130 {
7131 tree real = TREE_OPERAND (arg, 0);
7132 tree imag = TREE_OPERAND (arg, 1);
7133
7134 STRIP_NOPS (real);
7135 STRIP_NOPS (imag);
7136
7137 /* If the real part is inf and the imag part is known to be
7138 nonnegative, return (inf + 0i). Remember side-effects are
7139 possible in the imag part. */
7140 if (TREE_CODE (real) == REAL_CST
7141 && real_isinf (TREE_REAL_CST_PTR (real))
7142 && tree_expr_nonnegative_p (imag))
7143 return omit_one_operand_loc (loc, type,
7144 build_complex_cproj (type, false),
7145 arg);
7146
7147 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7148 Remember side-effects are possible in the real part. */
7149 if (TREE_CODE (imag) == REAL_CST
7150 && real_isinf (TREE_REAL_CST_PTR (imag)))
7151 return
7152 omit_one_operand_loc (loc, type,
7153 build_complex_cproj (type, TREE_REAL_CST_PTR
7154 (imag)->sign), arg);
7155 }
7156
7157 return NULL_TREE;
7158 }
7159
7160 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7161 Return NULL_TREE if no simplification can be made. */
7162
7163 static tree
7164 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7165 {
7166
7167 enum built_in_function fcode;
7168 tree res;
7169
7170 if (!validate_arg (arg, REAL_TYPE))
7171 return NULL_TREE;
7172
7173 /* Calculate the result when the argument is a constant. */
7174 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7175 return res;
7176
7177 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7178 fcode = builtin_mathfn_code (arg);
7179 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7180 {
7181 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7182 arg = fold_build2_loc (loc, MULT_EXPR, type,
7183 CALL_EXPR_ARG (arg, 0),
7184 build_real (type, dconsthalf));
7185 return build_call_expr_loc (loc, expfn, 1, arg);
7186 }
7187
7188 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7189 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7190 {
7191 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7192
7193 if (powfn)
7194 {
7195 tree arg0 = CALL_EXPR_ARG (arg, 0);
7196 tree tree_root;
7197 /* The inner root was either sqrt or cbrt. */
7198 /* This was a conditional expression but it triggered a bug
7199 in Sun C 5.5. */
7200 REAL_VALUE_TYPE dconstroot;
7201 if (BUILTIN_SQRT_P (fcode))
7202 dconstroot = dconsthalf;
7203 else
7204 dconstroot = dconst_third ();
7205
7206 /* Adjust for the outer root. */
7207 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7208 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7209 tree_root = build_real (type, dconstroot);
7210 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7211 }
7212 }
7213
7214 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7215 if (flag_unsafe_math_optimizations
7216 && (fcode == BUILT_IN_POW
7217 || fcode == BUILT_IN_POWF
7218 || fcode == BUILT_IN_POWL))
7219 {
7220 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7221 tree arg0 = CALL_EXPR_ARG (arg, 0);
7222 tree arg1 = CALL_EXPR_ARG (arg, 1);
7223 tree narg1;
7224 if (!tree_expr_nonnegative_p (arg0))
7225 arg0 = build1 (ABS_EXPR, type, arg0);
7226 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7227 build_real (type, dconsthalf));
7228 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7229 }
7230
7231 return NULL_TREE;
7232 }
7233
7234 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7235 Return NULL_TREE if no simplification can be made. */
7236
7237 static tree
7238 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7239 {
7240 const enum built_in_function fcode = builtin_mathfn_code (arg);
7241 tree res;
7242
7243 if (!validate_arg (arg, REAL_TYPE))
7244 return NULL_TREE;
7245
7246 /* Calculate the result when the argument is a constant. */
7247 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7248 return res;
7249
7250 if (flag_unsafe_math_optimizations)
7251 {
7252 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7253 if (BUILTIN_EXPONENT_P (fcode))
7254 {
7255 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7256 const REAL_VALUE_TYPE third_trunc =
7257 real_value_truncate (TYPE_MODE (type), dconst_third ());
7258 arg = fold_build2_loc (loc, MULT_EXPR, type,
7259 CALL_EXPR_ARG (arg, 0),
7260 build_real (type, third_trunc));
7261 return build_call_expr_loc (loc, expfn, 1, arg);
7262 }
7263
7264 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7265 if (BUILTIN_SQRT_P (fcode))
7266 {
7267 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7268
7269 if (powfn)
7270 {
7271 tree arg0 = CALL_EXPR_ARG (arg, 0);
7272 tree tree_root;
7273 REAL_VALUE_TYPE dconstroot = dconst_third ();
7274
7275 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7276 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7277 tree_root = build_real (type, dconstroot);
7278 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7279 }
7280 }
7281
7282 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7283 if (BUILTIN_CBRT_P (fcode))
7284 {
7285 tree arg0 = CALL_EXPR_ARG (arg, 0);
7286 if (tree_expr_nonnegative_p (arg0))
7287 {
7288 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7289
7290 if (powfn)
7291 {
7292 tree tree_root;
7293 REAL_VALUE_TYPE dconstroot;
7294
7295 real_arithmetic (&dconstroot, MULT_EXPR,
7296 dconst_third_ptr (), dconst_third_ptr ());
7297 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7298 tree_root = build_real (type, dconstroot);
7299 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7300 }
7301 }
7302 }
7303
7304 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7305 if (fcode == BUILT_IN_POW
7306 || fcode == BUILT_IN_POWF
7307 || fcode == BUILT_IN_POWL)
7308 {
7309 tree arg00 = CALL_EXPR_ARG (arg, 0);
7310 tree arg01 = CALL_EXPR_ARG (arg, 1);
7311 if (tree_expr_nonnegative_p (arg00))
7312 {
7313 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7314 const REAL_VALUE_TYPE dconstroot
7315 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7316 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7317 build_real (type, dconstroot));
7318 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7319 }
7320 }
7321 }
7322 return NULL_TREE;
7323 }
7324
7325 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7326 TYPE is the type of the return value. Return NULL_TREE if no
7327 simplification can be made. */
7328
7329 static tree
7330 fold_builtin_cos (location_t loc,
7331 tree arg, tree type, tree fndecl)
7332 {
7333 tree res, narg;
7334
7335 if (!validate_arg (arg, REAL_TYPE))
7336 return NULL_TREE;
7337
7338 /* Calculate the result when the argument is a constant. */
7339 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7340 return res;
7341
7342 /* Optimize cos(-x) into cos (x). */
7343 if ((narg = fold_strip_sign_ops (arg)))
7344 return build_call_expr_loc (loc, fndecl, 1, narg);
7345
7346 return NULL_TREE;
7347 }
7348
7349 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7350 Return NULL_TREE if no simplification can be made. */
7351
7352 static tree
7353 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7354 {
7355 if (validate_arg (arg, REAL_TYPE))
7356 {
7357 tree res, narg;
7358
7359 /* Calculate the result when the argument is a constant. */
7360 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7361 return res;
7362
7363 /* Optimize cosh(-x) into cosh (x). */
7364 if ((narg = fold_strip_sign_ops (arg)))
7365 return build_call_expr_loc (loc, fndecl, 1, narg);
7366 }
7367
7368 return NULL_TREE;
7369 }
7370
7371 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7372 argument ARG. TYPE is the type of the return value. Return
7373 NULL_TREE if no simplification can be made. */
7374
7375 static tree
7376 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7377 bool hyper)
7378 {
7379 if (validate_arg (arg, COMPLEX_TYPE)
7380 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7381 {
7382 tree tmp;
7383
7384 /* Calculate the result when the argument is a constant. */
7385 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7386 return tmp;
7387
7388 /* Optimize fn(-x) into fn(x). */
7389 if ((tmp = fold_strip_sign_ops (arg)))
7390 return build_call_expr_loc (loc, fndecl, 1, tmp);
7391 }
7392
7393 return NULL_TREE;
7394 }
7395
7396 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7397 Return NULL_TREE if no simplification can be made. */
7398
7399 static tree
7400 fold_builtin_tan (tree arg, tree type)
7401 {
7402 enum built_in_function fcode;
7403 tree res;
7404
7405 if (!validate_arg (arg, REAL_TYPE))
7406 return NULL_TREE;
7407
7408 /* Calculate the result when the argument is a constant. */
7409 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7410 return res;
7411
7412 /* Optimize tan(atan(x)) = x. */
7413 fcode = builtin_mathfn_code (arg);
7414 if (flag_unsafe_math_optimizations
7415 && (fcode == BUILT_IN_ATAN
7416 || fcode == BUILT_IN_ATANF
7417 || fcode == BUILT_IN_ATANL))
7418 return CALL_EXPR_ARG (arg, 0);
7419
7420 return NULL_TREE;
7421 }
7422
7423 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7424 NULL_TREE if no simplification can be made. */
7425
7426 static tree
7427 fold_builtin_sincos (location_t loc,
7428 tree arg0, tree arg1, tree arg2)
7429 {
7430 tree type;
7431 tree res, fn, call;
7432
7433 if (!validate_arg (arg0, REAL_TYPE)
7434 || !validate_arg (arg1, POINTER_TYPE)
7435 || !validate_arg (arg2, POINTER_TYPE))
7436 return NULL_TREE;
7437
7438 type = TREE_TYPE (arg0);
7439
7440 /* Calculate the result when the argument is a constant. */
7441 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7442 return res;
7443
7444 /* Canonicalize sincos to cexpi. */
7445 if (!TARGET_C99_FUNCTIONS)
7446 return NULL_TREE;
7447 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7448 if (!fn)
7449 return NULL_TREE;
7450
7451 call = build_call_expr_loc (loc, fn, 1, arg0);
7452 call = builtin_save_expr (call);
7453
7454 return build2 (COMPOUND_EXPR, void_type_node,
7455 build2 (MODIFY_EXPR, void_type_node,
7456 build_fold_indirect_ref_loc (loc, arg1),
7457 build1 (IMAGPART_EXPR, type, call)),
7458 build2 (MODIFY_EXPR, void_type_node,
7459 build_fold_indirect_ref_loc (loc, arg2),
7460 build1 (REALPART_EXPR, type, call)));
7461 }
7462
7463 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7464 NULL_TREE if no simplification can be made. */
7465
7466 static tree
7467 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7468 {
7469 tree rtype;
7470 tree realp, imagp, ifn;
7471 tree res;
7472
7473 if (!validate_arg (arg0, COMPLEX_TYPE)
7474 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7475 return NULL_TREE;
7476
7477 /* Calculate the result when the argument is a constant. */
7478 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7479 return res;
7480
7481 rtype = TREE_TYPE (TREE_TYPE (arg0));
7482
7483 /* In case we can figure out the real part of arg0 and it is constant zero
7484 fold to cexpi. */
7485 if (!TARGET_C99_FUNCTIONS)
7486 return NULL_TREE;
7487 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7488 if (!ifn)
7489 return NULL_TREE;
7490
7491 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7492 && real_zerop (realp))
7493 {
7494 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7495 return build_call_expr_loc (loc, ifn, 1, narg);
7496 }
7497
7498 /* In case we can easily decompose real and imaginary parts split cexp
7499 to exp (r) * cexpi (i). */
7500 if (flag_unsafe_math_optimizations
7501 && realp)
7502 {
7503 tree rfn, rcall, icall;
7504
7505 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7506 if (!rfn)
7507 return NULL_TREE;
7508
7509 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7510 if (!imagp)
7511 return NULL_TREE;
7512
7513 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7514 icall = builtin_save_expr (icall);
7515 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7516 rcall = builtin_save_expr (rcall);
7517 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7518 fold_build2_loc (loc, MULT_EXPR, rtype,
7519 rcall,
7520 fold_build1_loc (loc, REALPART_EXPR,
7521 rtype, icall)),
7522 fold_build2_loc (loc, MULT_EXPR, rtype,
7523 rcall,
7524 fold_build1_loc (loc, IMAGPART_EXPR,
7525 rtype, icall)));
7526 }
7527
7528 return NULL_TREE;
7529 }
7530
7531 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7532 Return NULL_TREE if no simplification can be made. */
7533
7534 static tree
7535 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7536 {
7537 if (!validate_arg (arg, REAL_TYPE))
7538 return NULL_TREE;
7539
7540 /* Optimize trunc of constant value. */
7541 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7542 {
7543 REAL_VALUE_TYPE r, x;
7544 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7545
7546 x = TREE_REAL_CST (arg);
7547 real_trunc (&r, TYPE_MODE (type), &x);
7548 return build_real (type, r);
7549 }
7550
7551 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7552 }
7553
7554 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7555 Return NULL_TREE if no simplification can be made. */
7556
7557 static tree
7558 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7559 {
7560 if (!validate_arg (arg, REAL_TYPE))
7561 return NULL_TREE;
7562
7563 /* Optimize floor of constant value. */
7564 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7565 {
7566 REAL_VALUE_TYPE x;
7567
7568 x = TREE_REAL_CST (arg);
7569 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7570 {
7571 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7572 REAL_VALUE_TYPE r;
7573
7574 real_floor (&r, TYPE_MODE (type), &x);
7575 return build_real (type, r);
7576 }
7577 }
7578
7579 /* Fold floor (x) where x is nonnegative to trunc (x). */
7580 if (tree_expr_nonnegative_p (arg))
7581 {
7582 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7583 if (truncfn)
7584 return build_call_expr_loc (loc, truncfn, 1, arg);
7585 }
7586
7587 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7588 }
7589
7590 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7591 Return NULL_TREE if no simplification can be made. */
7592
7593 static tree
7594 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7595 {
7596 if (!validate_arg (arg, REAL_TYPE))
7597 return NULL_TREE;
7598
7599 /* Optimize ceil of constant value. */
7600 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7601 {
7602 REAL_VALUE_TYPE x;
7603
7604 x = TREE_REAL_CST (arg);
7605 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7606 {
7607 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7608 REAL_VALUE_TYPE r;
7609
7610 real_ceil (&r, TYPE_MODE (type), &x);
7611 return build_real (type, r);
7612 }
7613 }
7614
7615 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7616 }
7617
7618 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7619 Return NULL_TREE if no simplification can be made. */
7620
7621 static tree
7622 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7623 {
7624 if (!validate_arg (arg, REAL_TYPE))
7625 return NULL_TREE;
7626
7627 /* Optimize round of constant value. */
7628 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7629 {
7630 REAL_VALUE_TYPE x;
7631
7632 x = TREE_REAL_CST (arg);
7633 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7634 {
7635 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7636 REAL_VALUE_TYPE r;
7637
7638 real_round (&r, TYPE_MODE (type), &x);
7639 return build_real (type, r);
7640 }
7641 }
7642
7643 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7644 }
7645
7646 /* Fold function call to builtin lround, lroundf or lroundl (or the
7647 corresponding long long versions) and other rounding functions. ARG
7648 is the argument to the call. Return NULL_TREE if no simplification
7649 can be made. */
7650
7651 static tree
7652 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7653 {
7654 if (!validate_arg (arg, REAL_TYPE))
7655 return NULL_TREE;
7656
7657 /* Optimize lround of constant value. */
7658 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7659 {
7660 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7661
7662 if (real_isfinite (&x))
7663 {
7664 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7665 tree ftype = TREE_TYPE (arg);
7666 double_int val;
7667 REAL_VALUE_TYPE r;
7668
7669 switch (DECL_FUNCTION_CODE (fndecl))
7670 {
7671 CASE_FLT_FN (BUILT_IN_LFLOOR):
7672 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7673 real_floor (&r, TYPE_MODE (ftype), &x);
7674 break;
7675
7676 CASE_FLT_FN (BUILT_IN_LCEIL):
7677 CASE_FLT_FN (BUILT_IN_LLCEIL):
7678 real_ceil (&r, TYPE_MODE (ftype), &x);
7679 break;
7680
7681 CASE_FLT_FN (BUILT_IN_LROUND):
7682 CASE_FLT_FN (BUILT_IN_LLROUND):
7683 real_round (&r, TYPE_MODE (ftype), &x);
7684 break;
7685
7686 default:
7687 gcc_unreachable ();
7688 }
7689
7690 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
7691 if (double_int_fits_to_tree_p (itype, val))
7692 return double_int_to_tree (itype, val);
7693 }
7694 }
7695
7696 switch (DECL_FUNCTION_CODE (fndecl))
7697 {
7698 CASE_FLT_FN (BUILT_IN_LFLOOR):
7699 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7700 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7701 if (tree_expr_nonnegative_p (arg))
7702 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7703 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7704 break;
7705 default:;
7706 }
7707
7708 return fold_fixed_mathfn (loc, fndecl, arg);
7709 }
7710
7711 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7712 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7713 the argument to the call. Return NULL_TREE if no simplification can
7714 be made. */
7715
7716 static tree
7717 fold_builtin_bitop (tree fndecl, tree arg)
7718 {
7719 if (!validate_arg (arg, INTEGER_TYPE))
7720 return NULL_TREE;
7721
7722 /* Optimize for constant argument. */
7723 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7724 {
7725 HOST_WIDE_INT hi, width, result;
7726 unsigned HOST_WIDE_INT lo;
7727 tree type;
7728
7729 type = TREE_TYPE (arg);
7730 width = TYPE_PRECISION (type);
7731 lo = TREE_INT_CST_LOW (arg);
7732
7733 /* Clear all the bits that are beyond the type's precision. */
7734 if (width > HOST_BITS_PER_WIDE_INT)
7735 {
7736 hi = TREE_INT_CST_HIGH (arg);
7737 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7738 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7739 }
7740 else
7741 {
7742 hi = 0;
7743 if (width < HOST_BITS_PER_WIDE_INT)
7744 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7745 }
7746
7747 switch (DECL_FUNCTION_CODE (fndecl))
7748 {
7749 CASE_INT_FN (BUILT_IN_FFS):
7750 if (lo != 0)
7751 result = ffs_hwi (lo);
7752 else if (hi != 0)
7753 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
7754 else
7755 result = 0;
7756 break;
7757
7758 CASE_INT_FN (BUILT_IN_CLZ):
7759 if (hi != 0)
7760 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7761 else if (lo != 0)
7762 result = width - floor_log2 (lo) - 1;
7763 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7764 result = width;
7765 break;
7766
7767 CASE_INT_FN (BUILT_IN_CTZ):
7768 if (lo != 0)
7769 result = ctz_hwi (lo);
7770 else if (hi != 0)
7771 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
7772 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7773 result = width;
7774 break;
7775
7776 CASE_INT_FN (BUILT_IN_POPCOUNT):
7777 result = 0;
7778 while (lo)
7779 result++, lo &= lo - 1;
7780 while (hi)
7781 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7782 break;
7783
7784 CASE_INT_FN (BUILT_IN_PARITY):
7785 result = 0;
7786 while (lo)
7787 result++, lo &= lo - 1;
7788 while (hi)
7789 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7790 result &= 1;
7791 break;
7792
7793 default:
7794 gcc_unreachable ();
7795 }
7796
7797 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7798 }
7799
7800 return NULL_TREE;
7801 }
7802
7803 /* Fold function call to builtin_bswap and the long and long long
7804 variants. Return NULL_TREE if no simplification can be made. */
7805 static tree
7806 fold_builtin_bswap (tree fndecl, tree arg)
7807 {
7808 if (! validate_arg (arg, INTEGER_TYPE))
7809 return NULL_TREE;
7810
7811 /* Optimize constant value. */
7812 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7813 {
7814 HOST_WIDE_INT hi, width, r_hi = 0;
7815 unsigned HOST_WIDE_INT lo, r_lo = 0;
7816 tree type;
7817
7818 type = TREE_TYPE (arg);
7819 width = TYPE_PRECISION (type);
7820 lo = TREE_INT_CST_LOW (arg);
7821 hi = TREE_INT_CST_HIGH (arg);
7822
7823 switch (DECL_FUNCTION_CODE (fndecl))
7824 {
7825 case BUILT_IN_BSWAP32:
7826 case BUILT_IN_BSWAP64:
7827 {
7828 int s;
7829
7830 for (s = 0; s < width; s += 8)
7831 {
7832 int d = width - s - 8;
7833 unsigned HOST_WIDE_INT byte;
7834
7835 if (s < HOST_BITS_PER_WIDE_INT)
7836 byte = (lo >> s) & 0xff;
7837 else
7838 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7839
7840 if (d < HOST_BITS_PER_WIDE_INT)
7841 r_lo |= byte << d;
7842 else
7843 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7844 }
7845 }
7846
7847 break;
7848
7849 default:
7850 gcc_unreachable ();
7851 }
7852
7853 if (width < HOST_BITS_PER_WIDE_INT)
7854 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7855 else
7856 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7857 }
7858
7859 return NULL_TREE;
7860 }
7861
7862 /* A subroutine of fold_builtin to fold the various logarithmic
7863 functions. Return NULL_TREE if no simplification can me made.
7864 FUNC is the corresponding MPFR logarithm function. */
7865
7866 static tree
7867 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7868 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7869 {
7870 if (validate_arg (arg, REAL_TYPE))
7871 {
7872 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7873 tree res;
7874 const enum built_in_function fcode = builtin_mathfn_code (arg);
7875
7876 /* Calculate the result when the argument is a constant. */
7877 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7878 return res;
7879
7880 /* Special case, optimize logN(expN(x)) = x. */
7881 if (flag_unsafe_math_optimizations
7882 && ((func == mpfr_log
7883 && (fcode == BUILT_IN_EXP
7884 || fcode == BUILT_IN_EXPF
7885 || fcode == BUILT_IN_EXPL))
7886 || (func == mpfr_log2
7887 && (fcode == BUILT_IN_EXP2
7888 || fcode == BUILT_IN_EXP2F
7889 || fcode == BUILT_IN_EXP2L))
7890 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7891 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7892
7893 /* Optimize logN(func()) for various exponential functions. We
7894 want to determine the value "x" and the power "exponent" in
7895 order to transform logN(x**exponent) into exponent*logN(x). */
7896 if (flag_unsafe_math_optimizations)
7897 {
7898 tree exponent = 0, x = 0;
7899
7900 switch (fcode)
7901 {
7902 CASE_FLT_FN (BUILT_IN_EXP):
7903 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7904 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7905 dconst_e ()));
7906 exponent = CALL_EXPR_ARG (arg, 0);
7907 break;
7908 CASE_FLT_FN (BUILT_IN_EXP2):
7909 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7910 x = build_real (type, dconst2);
7911 exponent = CALL_EXPR_ARG (arg, 0);
7912 break;
7913 CASE_FLT_FN (BUILT_IN_EXP10):
7914 CASE_FLT_FN (BUILT_IN_POW10):
7915 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7916 {
7917 REAL_VALUE_TYPE dconst10;
7918 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7919 x = build_real (type, dconst10);
7920 }
7921 exponent = CALL_EXPR_ARG (arg, 0);
7922 break;
7923 CASE_FLT_FN (BUILT_IN_SQRT):
7924 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7925 x = CALL_EXPR_ARG (arg, 0);
7926 exponent = build_real (type, dconsthalf);
7927 break;
7928 CASE_FLT_FN (BUILT_IN_CBRT):
7929 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7930 x = CALL_EXPR_ARG (arg, 0);
7931 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7932 dconst_third ()));
7933 break;
7934 CASE_FLT_FN (BUILT_IN_POW):
7935 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7936 x = CALL_EXPR_ARG (arg, 0);
7937 exponent = CALL_EXPR_ARG (arg, 1);
7938 break;
7939 default:
7940 break;
7941 }
7942
7943 /* Now perform the optimization. */
7944 if (x && exponent)
7945 {
7946 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7947 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7948 }
7949 }
7950 }
7951
7952 return NULL_TREE;
7953 }
7954
7955 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7956 NULL_TREE if no simplification can be made. */
7957
7958 static tree
7959 fold_builtin_hypot (location_t loc, tree fndecl,
7960 tree arg0, tree arg1, tree type)
7961 {
7962 tree res, narg0, narg1;
7963
7964 if (!validate_arg (arg0, REAL_TYPE)
7965 || !validate_arg (arg1, REAL_TYPE))
7966 return NULL_TREE;
7967
7968 /* Calculate the result when the argument is a constant. */
7969 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7970 return res;
7971
7972 /* If either argument to hypot has a negate or abs, strip that off.
7973 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7974 narg0 = fold_strip_sign_ops (arg0);
7975 narg1 = fold_strip_sign_ops (arg1);
7976 if (narg0 || narg1)
7977 {
7978 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7979 narg1 ? narg1 : arg1);
7980 }
7981
7982 /* If either argument is zero, hypot is fabs of the other. */
7983 if (real_zerop (arg0))
7984 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7985 else if (real_zerop (arg1))
7986 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7987
7988 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7989 if (flag_unsafe_math_optimizations
7990 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7991 {
7992 const REAL_VALUE_TYPE sqrt2_trunc
7993 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7994 return fold_build2_loc (loc, MULT_EXPR, type,
7995 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7996 build_real (type, sqrt2_trunc));
7997 }
7998
7999 return NULL_TREE;
8000 }
8001
8002
8003 /* Fold a builtin function call to pow, powf, or powl. Return
8004 NULL_TREE if no simplification can be made. */
8005 static tree
8006 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8007 {
8008 tree res;
8009
8010 if (!validate_arg (arg0, REAL_TYPE)
8011 || !validate_arg (arg1, REAL_TYPE))
8012 return NULL_TREE;
8013
8014 /* Calculate the result when the argument is a constant. */
8015 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8016 return res;
8017
8018 /* Optimize pow(1.0,y) = 1.0. */
8019 if (real_onep (arg0))
8020 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8021
8022 if (TREE_CODE (arg1) == REAL_CST
8023 && !TREE_OVERFLOW (arg1))
8024 {
8025 REAL_VALUE_TYPE cint;
8026 REAL_VALUE_TYPE c;
8027 HOST_WIDE_INT n;
8028
8029 c = TREE_REAL_CST (arg1);
8030
8031 /* Optimize pow(x,0.0) = 1.0. */
8032 if (REAL_VALUES_EQUAL (c, dconst0))
8033 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8034 arg0);
8035
8036 /* Optimize pow(x,1.0) = x. */
8037 if (REAL_VALUES_EQUAL (c, dconst1))
8038 return arg0;
8039
8040 /* Optimize pow(x,-1.0) = 1.0/x. */
8041 if (REAL_VALUES_EQUAL (c, dconstm1))
8042 return fold_build2_loc (loc, RDIV_EXPR, type,
8043 build_real (type, dconst1), arg0);
8044
8045 /* Optimize pow(x,0.5) = sqrt(x). */
8046 if (flag_unsafe_math_optimizations
8047 && REAL_VALUES_EQUAL (c, dconsthalf))
8048 {
8049 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8050
8051 if (sqrtfn != NULL_TREE)
8052 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8053 }
8054
8055 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8056 if (flag_unsafe_math_optimizations)
8057 {
8058 const REAL_VALUE_TYPE dconstroot
8059 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8060
8061 if (REAL_VALUES_EQUAL (c, dconstroot))
8062 {
8063 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8064 if (cbrtfn != NULL_TREE)
8065 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8066 }
8067 }
8068
8069 /* Check for an integer exponent. */
8070 n = real_to_integer (&c);
8071 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8072 if (real_identical (&c, &cint))
8073 {
8074 /* Attempt to evaluate pow at compile-time, unless this should
8075 raise an exception. */
8076 if (TREE_CODE (arg0) == REAL_CST
8077 && !TREE_OVERFLOW (arg0)
8078 && (n > 0
8079 || (!flag_trapping_math && !flag_errno_math)
8080 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8081 {
8082 REAL_VALUE_TYPE x;
8083 bool inexact;
8084
8085 x = TREE_REAL_CST (arg0);
8086 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8087 if (flag_unsafe_math_optimizations || !inexact)
8088 return build_real (type, x);
8089 }
8090
8091 /* Strip sign ops from even integer powers. */
8092 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8093 {
8094 tree narg0 = fold_strip_sign_ops (arg0);
8095 if (narg0)
8096 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8097 }
8098 }
8099 }
8100
8101 if (flag_unsafe_math_optimizations)
8102 {
8103 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8104
8105 /* Optimize pow(expN(x),y) = expN(x*y). */
8106 if (BUILTIN_EXPONENT_P (fcode))
8107 {
8108 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8109 tree arg = CALL_EXPR_ARG (arg0, 0);
8110 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8111 return build_call_expr_loc (loc, expfn, 1, arg);
8112 }
8113
8114 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8115 if (BUILTIN_SQRT_P (fcode))
8116 {
8117 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8118 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8119 build_real (type, dconsthalf));
8120 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8121 }
8122
8123 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8124 if (BUILTIN_CBRT_P (fcode))
8125 {
8126 tree arg = CALL_EXPR_ARG (arg0, 0);
8127 if (tree_expr_nonnegative_p (arg))
8128 {
8129 const REAL_VALUE_TYPE dconstroot
8130 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8131 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8132 build_real (type, dconstroot));
8133 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8134 }
8135 }
8136
8137 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8138 if (fcode == BUILT_IN_POW
8139 || fcode == BUILT_IN_POWF
8140 || fcode == BUILT_IN_POWL)
8141 {
8142 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8143 if (tree_expr_nonnegative_p (arg00))
8144 {
8145 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8146 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8147 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8148 }
8149 }
8150 }
8151
8152 return NULL_TREE;
8153 }
8154
8155 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8156 Return NULL_TREE if no simplification can be made. */
8157 static tree
8158 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8159 tree arg0, tree arg1, tree type)
8160 {
8161 if (!validate_arg (arg0, REAL_TYPE)
8162 || !validate_arg (arg1, INTEGER_TYPE))
8163 return NULL_TREE;
8164
8165 /* Optimize pow(1.0,y) = 1.0. */
8166 if (real_onep (arg0))
8167 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8168
8169 if (host_integerp (arg1, 0))
8170 {
8171 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8172
8173 /* Evaluate powi at compile-time. */
8174 if (TREE_CODE (arg0) == REAL_CST
8175 && !TREE_OVERFLOW (arg0))
8176 {
8177 REAL_VALUE_TYPE x;
8178 x = TREE_REAL_CST (arg0);
8179 real_powi (&x, TYPE_MODE (type), &x, c);
8180 return build_real (type, x);
8181 }
8182
8183 /* Optimize pow(x,0) = 1.0. */
8184 if (c == 0)
8185 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8186 arg0);
8187
8188 /* Optimize pow(x,1) = x. */
8189 if (c == 1)
8190 return arg0;
8191
8192 /* Optimize pow(x,-1) = 1.0/x. */
8193 if (c == -1)
8194 return fold_build2_loc (loc, RDIV_EXPR, type,
8195 build_real (type, dconst1), arg0);
8196 }
8197
8198 return NULL_TREE;
8199 }
8200
8201 /* A subroutine of fold_builtin to fold the various exponent
8202 functions. Return NULL_TREE if no simplification can be made.
8203 FUNC is the corresponding MPFR exponent function. */
8204
8205 static tree
8206 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8207 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8208 {
8209 if (validate_arg (arg, REAL_TYPE))
8210 {
8211 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8212 tree res;
8213
8214 /* Calculate the result when the argument is a constant. */
8215 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8216 return res;
8217
8218 /* Optimize expN(logN(x)) = x. */
8219 if (flag_unsafe_math_optimizations)
8220 {
8221 const enum built_in_function fcode = builtin_mathfn_code (arg);
8222
8223 if ((func == mpfr_exp
8224 && (fcode == BUILT_IN_LOG
8225 || fcode == BUILT_IN_LOGF
8226 || fcode == BUILT_IN_LOGL))
8227 || (func == mpfr_exp2
8228 && (fcode == BUILT_IN_LOG2
8229 || fcode == BUILT_IN_LOG2F
8230 || fcode == BUILT_IN_LOG2L))
8231 || (func == mpfr_exp10
8232 && (fcode == BUILT_IN_LOG10
8233 || fcode == BUILT_IN_LOG10F
8234 || fcode == BUILT_IN_LOG10L)))
8235 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8236 }
8237 }
8238
8239 return NULL_TREE;
8240 }
8241
8242 /* Return true if VAR is a VAR_DECL or a component thereof. */
8243
8244 static bool
8245 var_decl_component_p (tree var)
8246 {
8247 tree inner = var;
8248 while (handled_component_p (inner))
8249 inner = TREE_OPERAND (inner, 0);
8250 return SSA_VAR_P (inner);
8251 }
8252
8253 /* Fold function call to builtin memset. Return
8254 NULL_TREE if no simplification can be made. */
8255
8256 static tree
8257 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8258 tree type, bool ignore)
8259 {
8260 tree var, ret, etype;
8261 unsigned HOST_WIDE_INT length, cval;
8262
8263 if (! validate_arg (dest, POINTER_TYPE)
8264 || ! validate_arg (c, INTEGER_TYPE)
8265 || ! validate_arg (len, INTEGER_TYPE))
8266 return NULL_TREE;
8267
8268 if (! host_integerp (len, 1))
8269 return NULL_TREE;
8270
8271 /* If the LEN parameter is zero, return DEST. */
8272 if (integer_zerop (len))
8273 return omit_one_operand_loc (loc, type, dest, c);
8274
8275 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8276 return NULL_TREE;
8277
8278 var = dest;
8279 STRIP_NOPS (var);
8280 if (TREE_CODE (var) != ADDR_EXPR)
8281 return NULL_TREE;
8282
8283 var = TREE_OPERAND (var, 0);
8284 if (TREE_THIS_VOLATILE (var))
8285 return NULL_TREE;
8286
8287 etype = TREE_TYPE (var);
8288 if (TREE_CODE (etype) == ARRAY_TYPE)
8289 etype = TREE_TYPE (etype);
8290
8291 if (!INTEGRAL_TYPE_P (etype)
8292 && !POINTER_TYPE_P (etype))
8293 return NULL_TREE;
8294
8295 if (! var_decl_component_p (var))
8296 return NULL_TREE;
8297
8298 length = tree_low_cst (len, 1);
8299 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8300 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8301 < length)
8302 return NULL_TREE;
8303
8304 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8305 return NULL_TREE;
8306
8307 if (integer_zerop (c))
8308 cval = 0;
8309 else
8310 {
8311 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8312 return NULL_TREE;
8313
8314 cval = tree_low_cst (c, 1);
8315 cval &= 0xff;
8316 cval |= cval << 8;
8317 cval |= cval << 16;
8318 cval |= (cval << 31) << 1;
8319 }
8320
8321 ret = build_int_cst_type (etype, cval);
8322 var = build_fold_indirect_ref_loc (loc,
8323 fold_convert_loc (loc,
8324 build_pointer_type (etype),
8325 dest));
8326 ret = build2 (MODIFY_EXPR, etype, var, ret);
8327 if (ignore)
8328 return ret;
8329
8330 return omit_one_operand_loc (loc, type, dest, ret);
8331 }
8332
8333 /* Fold function call to builtin memset. Return
8334 NULL_TREE if no simplification can be made. */
8335
8336 static tree
8337 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8338 {
8339 if (! validate_arg (dest, POINTER_TYPE)
8340 || ! validate_arg (size, INTEGER_TYPE))
8341 return NULL_TREE;
8342
8343 if (!ignore)
8344 return NULL_TREE;
8345
8346 /* New argument list transforming bzero(ptr x, int y) to
8347 memset(ptr x, int 0, size_t y). This is done this way
8348 so that if it isn't expanded inline, we fallback to
8349 calling bzero instead of memset. */
8350
8351 return fold_builtin_memset (loc, dest, integer_zero_node,
8352 fold_convert_loc (loc, sizetype, size),
8353 void_type_node, ignore);
8354 }
8355
8356 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8357 NULL_TREE if no simplification can be made.
8358 If ENDP is 0, return DEST (like memcpy).
8359 If ENDP is 1, return DEST+LEN (like mempcpy).
8360 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8361 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8362 (memmove). */
8363
8364 static tree
8365 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8366 tree len, tree type, bool ignore, int endp)
8367 {
8368 tree destvar, srcvar, expr;
8369
8370 if (! validate_arg (dest, POINTER_TYPE)
8371 || ! validate_arg (src, POINTER_TYPE)
8372 || ! validate_arg (len, INTEGER_TYPE))
8373 return NULL_TREE;
8374
8375 /* If the LEN parameter is zero, return DEST. */
8376 if (integer_zerop (len))
8377 return omit_one_operand_loc (loc, type, dest, src);
8378
8379 /* If SRC and DEST are the same (and not volatile), return
8380 DEST{,+LEN,+LEN-1}. */
8381 if (operand_equal_p (src, dest, 0))
8382 expr = len;
8383 else
8384 {
8385 tree srctype, desttype;
8386 unsigned int src_align, dest_align;
8387 tree off0;
8388
8389 if (endp == 3)
8390 {
8391 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8392 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8393
8394 /* Both DEST and SRC must be pointer types.
8395 ??? This is what old code did. Is the testing for pointer types
8396 really mandatory?
8397
8398 If either SRC is readonly or length is 1, we can use memcpy. */
8399 if (!dest_align || !src_align)
8400 return NULL_TREE;
8401 if (readonly_data_expr (src)
8402 || (host_integerp (len, 1)
8403 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8404 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
8405 {
8406 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8407 if (!fn)
8408 return NULL_TREE;
8409 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8410 }
8411
8412 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8413 if (TREE_CODE (src) == ADDR_EXPR
8414 && TREE_CODE (dest) == ADDR_EXPR)
8415 {
8416 tree src_base, dest_base, fn;
8417 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8418 HOST_WIDE_INT size = -1;
8419 HOST_WIDE_INT maxsize = -1;
8420
8421 srcvar = TREE_OPERAND (src, 0);
8422 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8423 &size, &maxsize);
8424 destvar = TREE_OPERAND (dest, 0);
8425 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8426 &size, &maxsize);
8427 if (host_integerp (len, 1))
8428 maxsize = tree_low_cst (len, 1);
8429 else
8430 maxsize = -1;
8431 src_offset /= BITS_PER_UNIT;
8432 dest_offset /= BITS_PER_UNIT;
8433 if (SSA_VAR_P (src_base)
8434 && SSA_VAR_P (dest_base))
8435 {
8436 if (operand_equal_p (src_base, dest_base, 0)
8437 && ranges_overlap_p (src_offset, maxsize,
8438 dest_offset, maxsize))
8439 return NULL_TREE;
8440 }
8441 else if (TREE_CODE (src_base) == MEM_REF
8442 && TREE_CODE (dest_base) == MEM_REF)
8443 {
8444 double_int off;
8445 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8446 TREE_OPERAND (dest_base, 0), 0))
8447 return NULL_TREE;
8448 off = double_int_add (mem_ref_offset (src_base),
8449 shwi_to_double_int (src_offset));
8450 if (!double_int_fits_in_shwi_p (off))
8451 return NULL_TREE;
8452 src_offset = off.low;
8453 off = double_int_add (mem_ref_offset (dest_base),
8454 shwi_to_double_int (dest_offset));
8455 if (!double_int_fits_in_shwi_p (off))
8456 return NULL_TREE;
8457 dest_offset = off.low;
8458 if (ranges_overlap_p (src_offset, maxsize,
8459 dest_offset, maxsize))
8460 return NULL_TREE;
8461 }
8462 else
8463 return NULL_TREE;
8464
8465 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8466 if (!fn)
8467 return NULL_TREE;
8468 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8469 }
8470 return NULL_TREE;
8471 }
8472
8473 if (!host_integerp (len, 0))
8474 return NULL_TREE;
8475 /* FIXME:
8476 This logic lose for arguments like (type *)malloc (sizeof (type)),
8477 since we strip the casts of up to VOID return value from malloc.
8478 Perhaps we ought to inherit type from non-VOID argument here? */
8479 STRIP_NOPS (src);
8480 STRIP_NOPS (dest);
8481 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8482 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8483 {
8484 tree tem = TREE_OPERAND (src, 0);
8485 STRIP_NOPS (tem);
8486 if (tem != TREE_OPERAND (src, 0))
8487 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8488 }
8489 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8490 {
8491 tree tem = TREE_OPERAND (dest, 0);
8492 STRIP_NOPS (tem);
8493 if (tem != TREE_OPERAND (dest, 0))
8494 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8495 }
8496 srctype = TREE_TYPE (TREE_TYPE (src));
8497 if (srctype
8498 && TREE_CODE (srctype) == ARRAY_TYPE
8499 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8500 {
8501 srctype = TREE_TYPE (srctype);
8502 STRIP_NOPS (src);
8503 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8504 }
8505 desttype = TREE_TYPE (TREE_TYPE (dest));
8506 if (desttype
8507 && TREE_CODE (desttype) == ARRAY_TYPE
8508 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8509 {
8510 desttype = TREE_TYPE (desttype);
8511 STRIP_NOPS (dest);
8512 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8513 }
8514 if (!srctype || !desttype
8515 || TREE_ADDRESSABLE (srctype)
8516 || TREE_ADDRESSABLE (desttype)
8517 || !TYPE_SIZE_UNIT (srctype)
8518 || !TYPE_SIZE_UNIT (desttype)
8519 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8520 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST)
8521 return NULL_TREE;
8522
8523 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8524 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8525 if (dest_align < TYPE_ALIGN (desttype)
8526 || src_align < TYPE_ALIGN (srctype))
8527 return NULL_TREE;
8528
8529 if (!ignore)
8530 dest = builtin_save_expr (dest);
8531
8532 /* Build accesses at offset zero with a ref-all character type. */
8533 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8534 ptr_mode, true), 0);
8535
8536 destvar = dest;
8537 STRIP_NOPS (destvar);
8538 if (TREE_CODE (destvar) == ADDR_EXPR
8539 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8540 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8541 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8542 else
8543 destvar = NULL_TREE;
8544
8545 srcvar = src;
8546 STRIP_NOPS (srcvar);
8547 if (TREE_CODE (srcvar) == ADDR_EXPR
8548 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8549 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8550 && (!STRICT_ALIGNMENT
8551 || !destvar
8552 || src_align >= TYPE_ALIGN (desttype)))
8553 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8554 srcvar, off0);
8555 else
8556 srcvar = NULL_TREE;
8557
8558 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8559 return NULL_TREE;
8560
8561 if (srcvar == NULL_TREE)
8562 {
8563 if (STRICT_ALIGNMENT
8564 && src_align < TYPE_ALIGN (desttype))
8565 return NULL_TREE;
8566 STRIP_NOPS (src);
8567 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8568 }
8569 else if (destvar == NULL_TREE)
8570 {
8571 if (STRICT_ALIGNMENT
8572 && dest_align < TYPE_ALIGN (srctype))
8573 return NULL_TREE;
8574 STRIP_NOPS (dest);
8575 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8576 }
8577
8578 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8579 }
8580
8581 if (ignore)
8582 return expr;
8583
8584 if (endp == 0 || endp == 3)
8585 return omit_one_operand_loc (loc, type, dest, expr);
8586
8587 if (expr == len)
8588 expr = NULL_TREE;
8589
8590 if (endp == 2)
8591 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8592 ssize_int (1));
8593
8594 len = fold_convert_loc (loc, sizetype, len);
8595 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8596 dest = fold_convert_loc (loc, type, dest);
8597 if (expr)
8598 dest = omit_one_operand_loc (loc, type, dest, expr);
8599 return dest;
8600 }
8601
8602 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8603 If LEN is not NULL, it represents the length of the string to be
8604 copied. Return NULL_TREE if no simplification can be made. */
8605
8606 tree
8607 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8608 {
8609 tree fn;
8610
8611 if (!validate_arg (dest, POINTER_TYPE)
8612 || !validate_arg (src, POINTER_TYPE))
8613 return NULL_TREE;
8614
8615 /* If SRC and DEST are the same (and not volatile), return DEST. */
8616 if (operand_equal_p (src, dest, 0))
8617 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8618
8619 if (optimize_function_for_size_p (cfun))
8620 return NULL_TREE;
8621
8622 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8623 if (!fn)
8624 return NULL_TREE;
8625
8626 if (!len)
8627 {
8628 len = c_strlen (src, 1);
8629 if (! len || TREE_SIDE_EFFECTS (len))
8630 return NULL_TREE;
8631 }
8632
8633 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8634 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8635 build_call_expr_loc (loc, fn, 3, dest, src, len));
8636 }
8637
8638 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8639 Return NULL_TREE if no simplification can be made. */
8640
8641 static tree
8642 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8643 {
8644 tree fn, len, lenp1, call, type;
8645
8646 if (!validate_arg (dest, POINTER_TYPE)
8647 || !validate_arg (src, POINTER_TYPE))
8648 return NULL_TREE;
8649
8650 len = c_strlen (src, 1);
8651 if (!len
8652 || TREE_CODE (len) != INTEGER_CST)
8653 return NULL_TREE;
8654
8655 if (optimize_function_for_size_p (cfun)
8656 /* If length is zero it's small enough. */
8657 && !integer_zerop (len))
8658 return NULL_TREE;
8659
8660 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8661 if (!fn)
8662 return NULL_TREE;
8663
8664 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8665 /* We use dest twice in building our expression. Save it from
8666 multiple expansions. */
8667 dest = builtin_save_expr (dest);
8668 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8669
8670 type = TREE_TYPE (TREE_TYPE (fndecl));
8671 len = fold_convert_loc (loc, sizetype, len);
8672 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8673 dest = fold_convert_loc (loc, type, dest);
8674 dest = omit_one_operand_loc (loc, type, dest, call);
8675 return dest;
8676 }
8677
8678 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8679 If SLEN is not NULL, it represents the length of the source string.
8680 Return NULL_TREE if no simplification can be made. */
8681
8682 tree
8683 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8684 tree src, tree len, tree slen)
8685 {
8686 tree fn;
8687
8688 if (!validate_arg (dest, POINTER_TYPE)
8689 || !validate_arg (src, POINTER_TYPE)
8690 || !validate_arg (len, INTEGER_TYPE))
8691 return NULL_TREE;
8692
8693 /* If the LEN parameter is zero, return DEST. */
8694 if (integer_zerop (len))
8695 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8696
8697 /* We can't compare slen with len as constants below if len is not a
8698 constant. */
8699 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8700 return NULL_TREE;
8701
8702 if (!slen)
8703 slen = c_strlen (src, 1);
8704
8705 /* Now, we must be passed a constant src ptr parameter. */
8706 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8707 return NULL_TREE;
8708
8709 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8710
8711 /* We do not support simplification of this case, though we do
8712 support it when expanding trees into RTL. */
8713 /* FIXME: generate a call to __builtin_memset. */
8714 if (tree_int_cst_lt (slen, len))
8715 return NULL_TREE;
8716
8717 /* OK transform into builtin memcpy. */
8718 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8719 if (!fn)
8720 return NULL_TREE;
8721 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8722 build_call_expr_loc (loc, fn, 3, dest, src, len));
8723 }
8724
8725 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8726 arguments to the call, and TYPE is its return type.
8727 Return NULL_TREE if no simplification can be made. */
8728
8729 static tree
8730 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8731 {
8732 if (!validate_arg (arg1, POINTER_TYPE)
8733 || !validate_arg (arg2, INTEGER_TYPE)
8734 || !validate_arg (len, INTEGER_TYPE))
8735 return NULL_TREE;
8736 else
8737 {
8738 const char *p1;
8739
8740 if (TREE_CODE (arg2) != INTEGER_CST
8741 || !host_integerp (len, 1))
8742 return NULL_TREE;
8743
8744 p1 = c_getstr (arg1);
8745 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8746 {
8747 char c;
8748 const char *r;
8749 tree tem;
8750
8751 if (target_char_cast (arg2, &c))
8752 return NULL_TREE;
8753
8754 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8755
8756 if (r == NULL)
8757 return build_int_cst (TREE_TYPE (arg1), 0);
8758
8759 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8760 size_int (r - p1));
8761 return fold_convert_loc (loc, type, tem);
8762 }
8763 return NULL_TREE;
8764 }
8765 }
8766
8767 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8768 Return NULL_TREE if no simplification can be made. */
8769
8770 static tree
8771 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8772 {
8773 const char *p1, *p2;
8774
8775 if (!validate_arg (arg1, POINTER_TYPE)
8776 || !validate_arg (arg2, POINTER_TYPE)
8777 || !validate_arg (len, INTEGER_TYPE))
8778 return NULL_TREE;
8779
8780 /* If the LEN parameter is zero, return zero. */
8781 if (integer_zerop (len))
8782 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8783 arg1, arg2);
8784
8785 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8786 if (operand_equal_p (arg1, arg2, 0))
8787 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8788
8789 p1 = c_getstr (arg1);
8790 p2 = c_getstr (arg2);
8791
8792 /* If all arguments are constant, and the value of len is not greater
8793 than the lengths of arg1 and arg2, evaluate at compile-time. */
8794 if (host_integerp (len, 1) && p1 && p2
8795 && compare_tree_int (len, strlen (p1) + 1) <= 0
8796 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8797 {
8798 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8799
8800 if (r > 0)
8801 return integer_one_node;
8802 else if (r < 0)
8803 return integer_minus_one_node;
8804 else
8805 return integer_zero_node;
8806 }
8807
8808 /* If len parameter is one, return an expression corresponding to
8809 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8810 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8811 {
8812 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8813 tree cst_uchar_ptr_node
8814 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8815
8816 tree ind1
8817 = fold_convert_loc (loc, integer_type_node,
8818 build1 (INDIRECT_REF, cst_uchar_node,
8819 fold_convert_loc (loc,
8820 cst_uchar_ptr_node,
8821 arg1)));
8822 tree ind2
8823 = fold_convert_loc (loc, integer_type_node,
8824 build1 (INDIRECT_REF, cst_uchar_node,
8825 fold_convert_loc (loc,
8826 cst_uchar_ptr_node,
8827 arg2)));
8828 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8829 }
8830
8831 return NULL_TREE;
8832 }
8833
8834 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8835 Return NULL_TREE if no simplification can be made. */
8836
8837 static tree
8838 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8839 {
8840 const char *p1, *p2;
8841
8842 if (!validate_arg (arg1, POINTER_TYPE)
8843 || !validate_arg (arg2, POINTER_TYPE))
8844 return NULL_TREE;
8845
8846 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8847 if (operand_equal_p (arg1, arg2, 0))
8848 return integer_zero_node;
8849
8850 p1 = c_getstr (arg1);
8851 p2 = c_getstr (arg2);
8852
8853 if (p1 && p2)
8854 {
8855 const int i = strcmp (p1, p2);
8856 if (i < 0)
8857 return integer_minus_one_node;
8858 else if (i > 0)
8859 return integer_one_node;
8860 else
8861 return integer_zero_node;
8862 }
8863
8864 /* If the second arg is "", return *(const unsigned char*)arg1. */
8865 if (p2 && *p2 == '\0')
8866 {
8867 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8868 tree cst_uchar_ptr_node
8869 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8870
8871 return fold_convert_loc (loc, integer_type_node,
8872 build1 (INDIRECT_REF, cst_uchar_node,
8873 fold_convert_loc (loc,
8874 cst_uchar_ptr_node,
8875 arg1)));
8876 }
8877
8878 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8879 if (p1 && *p1 == '\0')
8880 {
8881 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8882 tree cst_uchar_ptr_node
8883 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8884
8885 tree temp
8886 = fold_convert_loc (loc, integer_type_node,
8887 build1 (INDIRECT_REF, cst_uchar_node,
8888 fold_convert_loc (loc,
8889 cst_uchar_ptr_node,
8890 arg2)));
8891 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8892 }
8893
8894 return NULL_TREE;
8895 }
8896
8897 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8898 Return NULL_TREE if no simplification can be made. */
8899
8900 static tree
8901 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8902 {
8903 const char *p1, *p2;
8904
8905 if (!validate_arg (arg1, POINTER_TYPE)
8906 || !validate_arg (arg2, POINTER_TYPE)
8907 || !validate_arg (len, INTEGER_TYPE))
8908 return NULL_TREE;
8909
8910 /* If the LEN parameter is zero, return zero. */
8911 if (integer_zerop (len))
8912 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8913 arg1, arg2);
8914
8915 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8916 if (operand_equal_p (arg1, arg2, 0))
8917 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8918
8919 p1 = c_getstr (arg1);
8920 p2 = c_getstr (arg2);
8921
8922 if (host_integerp (len, 1) && p1 && p2)
8923 {
8924 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8925 if (i > 0)
8926 return integer_one_node;
8927 else if (i < 0)
8928 return integer_minus_one_node;
8929 else
8930 return integer_zero_node;
8931 }
8932
8933 /* If the second arg is "", and the length is greater than zero,
8934 return *(const unsigned char*)arg1. */
8935 if (p2 && *p2 == '\0'
8936 && TREE_CODE (len) == INTEGER_CST
8937 && tree_int_cst_sgn (len) == 1)
8938 {
8939 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8940 tree cst_uchar_ptr_node
8941 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8942
8943 return fold_convert_loc (loc, integer_type_node,
8944 build1 (INDIRECT_REF, cst_uchar_node,
8945 fold_convert_loc (loc,
8946 cst_uchar_ptr_node,
8947 arg1)));
8948 }
8949
8950 /* If the first arg is "", and the length is greater than zero,
8951 return -*(const unsigned char*)arg2. */
8952 if (p1 && *p1 == '\0'
8953 && TREE_CODE (len) == INTEGER_CST
8954 && tree_int_cst_sgn (len) == 1)
8955 {
8956 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8957 tree cst_uchar_ptr_node
8958 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8959
8960 tree temp = fold_convert_loc (loc, integer_type_node,
8961 build1 (INDIRECT_REF, cst_uchar_node,
8962 fold_convert_loc (loc,
8963 cst_uchar_ptr_node,
8964 arg2)));
8965 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8966 }
8967
8968 /* If len parameter is one, return an expression corresponding to
8969 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8970 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8971 {
8972 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8973 tree cst_uchar_ptr_node
8974 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8975
8976 tree ind1 = fold_convert_loc (loc, integer_type_node,
8977 build1 (INDIRECT_REF, cst_uchar_node,
8978 fold_convert_loc (loc,
8979 cst_uchar_ptr_node,
8980 arg1)));
8981 tree ind2 = fold_convert_loc (loc, integer_type_node,
8982 build1 (INDIRECT_REF, cst_uchar_node,
8983 fold_convert_loc (loc,
8984 cst_uchar_ptr_node,
8985 arg2)));
8986 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8987 }
8988
8989 return NULL_TREE;
8990 }
8991
8992 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8993 ARG. Return NULL_TREE if no simplification can be made. */
8994
8995 static tree
8996 fold_builtin_signbit (location_t loc, tree arg, tree type)
8997 {
8998 tree temp;
8999
9000 if (!validate_arg (arg, REAL_TYPE))
9001 return NULL_TREE;
9002
9003 /* If ARG is a compile-time constant, determine the result. */
9004 if (TREE_CODE (arg) == REAL_CST
9005 && !TREE_OVERFLOW (arg))
9006 {
9007 REAL_VALUE_TYPE c;
9008
9009 c = TREE_REAL_CST (arg);
9010 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9011 return fold_convert_loc (loc, type, temp);
9012 }
9013
9014 /* If ARG is non-negative, the result is always zero. */
9015 if (tree_expr_nonnegative_p (arg))
9016 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9017
9018 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9019 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9020 return fold_build2_loc (loc, LT_EXPR, type, arg,
9021 build_real (TREE_TYPE (arg), dconst0));
9022
9023 return NULL_TREE;
9024 }
9025
9026 /* Fold function call to builtin copysign, copysignf or copysignl with
9027 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9028 be made. */
9029
9030 static tree
9031 fold_builtin_copysign (location_t loc, tree fndecl,
9032 tree arg1, tree arg2, tree type)
9033 {
9034 tree tem;
9035
9036 if (!validate_arg (arg1, REAL_TYPE)
9037 || !validate_arg (arg2, REAL_TYPE))
9038 return NULL_TREE;
9039
9040 /* copysign(X,X) is X. */
9041 if (operand_equal_p (arg1, arg2, 0))
9042 return fold_convert_loc (loc, type, arg1);
9043
9044 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9045 if (TREE_CODE (arg1) == REAL_CST
9046 && TREE_CODE (arg2) == REAL_CST
9047 && !TREE_OVERFLOW (arg1)
9048 && !TREE_OVERFLOW (arg2))
9049 {
9050 REAL_VALUE_TYPE c1, c2;
9051
9052 c1 = TREE_REAL_CST (arg1);
9053 c2 = TREE_REAL_CST (arg2);
9054 /* c1.sign := c2.sign. */
9055 real_copysign (&c1, &c2);
9056 return build_real (type, c1);
9057 }
9058
9059 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9060 Remember to evaluate Y for side-effects. */
9061 if (tree_expr_nonnegative_p (arg2))
9062 return omit_one_operand_loc (loc, type,
9063 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9064 arg2);
9065
9066 /* Strip sign changing operations for the first argument. */
9067 tem = fold_strip_sign_ops (arg1);
9068 if (tem)
9069 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9070
9071 return NULL_TREE;
9072 }
9073
9074 /* Fold a call to builtin isascii with argument ARG. */
9075
9076 static tree
9077 fold_builtin_isascii (location_t loc, tree arg)
9078 {
9079 if (!validate_arg (arg, INTEGER_TYPE))
9080 return NULL_TREE;
9081 else
9082 {
9083 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9084 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9085 build_int_cst (NULL_TREE,
9086 ~ (unsigned HOST_WIDE_INT) 0x7f));
9087 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9088 arg, integer_zero_node);
9089 }
9090 }
9091
9092 /* Fold a call to builtin toascii with argument ARG. */
9093
9094 static tree
9095 fold_builtin_toascii (location_t loc, tree arg)
9096 {
9097 if (!validate_arg (arg, INTEGER_TYPE))
9098 return NULL_TREE;
9099
9100 /* Transform toascii(c) -> (c & 0x7f). */
9101 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9102 build_int_cst (NULL_TREE, 0x7f));
9103 }
9104
9105 /* Fold a call to builtin isdigit with argument ARG. */
9106
9107 static tree
9108 fold_builtin_isdigit (location_t loc, tree arg)
9109 {
9110 if (!validate_arg (arg, INTEGER_TYPE))
9111 return NULL_TREE;
9112 else
9113 {
9114 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9115 /* According to the C standard, isdigit is unaffected by locale.
9116 However, it definitely is affected by the target character set. */
9117 unsigned HOST_WIDE_INT target_digit0
9118 = lang_hooks.to_target_charset ('0');
9119
9120 if (target_digit0 == 0)
9121 return NULL_TREE;
9122
9123 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9124 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9125 build_int_cst (unsigned_type_node, target_digit0));
9126 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9127 build_int_cst (unsigned_type_node, 9));
9128 }
9129 }
9130
9131 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9132
9133 static tree
9134 fold_builtin_fabs (location_t loc, tree arg, tree type)
9135 {
9136 if (!validate_arg (arg, REAL_TYPE))
9137 return NULL_TREE;
9138
9139 arg = fold_convert_loc (loc, type, arg);
9140 if (TREE_CODE (arg) == REAL_CST)
9141 return fold_abs_const (arg, type);
9142 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9143 }
9144
9145 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9146
9147 static tree
9148 fold_builtin_abs (location_t loc, tree arg, tree type)
9149 {
9150 if (!validate_arg (arg, INTEGER_TYPE))
9151 return NULL_TREE;
9152
9153 arg = fold_convert_loc (loc, type, arg);
9154 if (TREE_CODE (arg) == INTEGER_CST)
9155 return fold_abs_const (arg, type);
9156 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9157 }
9158
9159 /* Fold a call to builtin fmin or fmax. */
9160
9161 static tree
9162 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9163 tree type, bool max)
9164 {
9165 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9166 {
9167 /* Calculate the result when the argument is a constant. */
9168 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9169
9170 if (res)
9171 return res;
9172
9173 /* If either argument is NaN, return the other one. Avoid the
9174 transformation if we get (and honor) a signalling NaN. Using
9175 omit_one_operand() ensures we create a non-lvalue. */
9176 if (TREE_CODE (arg0) == REAL_CST
9177 && real_isnan (&TREE_REAL_CST (arg0))
9178 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9179 || ! TREE_REAL_CST (arg0).signalling))
9180 return omit_one_operand_loc (loc, type, arg1, arg0);
9181 if (TREE_CODE (arg1) == REAL_CST
9182 && real_isnan (&TREE_REAL_CST (arg1))
9183 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9184 || ! TREE_REAL_CST (arg1).signalling))
9185 return omit_one_operand_loc (loc, type, arg0, arg1);
9186
9187 /* Transform fmin/fmax(x,x) -> x. */
9188 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9189 return omit_one_operand_loc (loc, type, arg0, arg1);
9190
9191 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9192 functions to return the numeric arg if the other one is NaN.
9193 These tree codes don't honor that, so only transform if
9194 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9195 handled, so we don't have to worry about it either. */
9196 if (flag_finite_math_only)
9197 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9198 fold_convert_loc (loc, type, arg0),
9199 fold_convert_loc (loc, type, arg1));
9200 }
9201 return NULL_TREE;
9202 }
9203
9204 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9205
9206 static tree
9207 fold_builtin_carg (location_t loc, tree arg, tree type)
9208 {
9209 if (validate_arg (arg, COMPLEX_TYPE)
9210 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9211 {
9212 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9213
9214 if (atan2_fn)
9215 {
9216 tree new_arg = builtin_save_expr (arg);
9217 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9218 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9219 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9220 }
9221 }
9222
9223 return NULL_TREE;
9224 }
9225
9226 /* Fold a call to builtin logb/ilogb. */
9227
9228 static tree
9229 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9230 {
9231 if (! validate_arg (arg, REAL_TYPE))
9232 return NULL_TREE;
9233
9234 STRIP_NOPS (arg);
9235
9236 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9237 {
9238 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9239
9240 switch (value->cl)
9241 {
9242 case rvc_nan:
9243 case rvc_inf:
9244 /* If arg is Inf or NaN and we're logb, return it. */
9245 if (TREE_CODE (rettype) == REAL_TYPE)
9246 return fold_convert_loc (loc, rettype, arg);
9247 /* Fall through... */
9248 case rvc_zero:
9249 /* Zero may set errno and/or raise an exception for logb, also
9250 for ilogb we don't know FP_ILOGB0. */
9251 return NULL_TREE;
9252 case rvc_normal:
9253 /* For normal numbers, proceed iff radix == 2. In GCC,
9254 normalized significands are in the range [0.5, 1.0). We
9255 want the exponent as if they were [1.0, 2.0) so get the
9256 exponent and subtract 1. */
9257 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9258 return fold_convert_loc (loc, rettype,
9259 build_int_cst (NULL_TREE,
9260 REAL_EXP (value)-1));
9261 break;
9262 }
9263 }
9264
9265 return NULL_TREE;
9266 }
9267
9268 /* Fold a call to builtin significand, if radix == 2. */
9269
9270 static tree
9271 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9272 {
9273 if (! validate_arg (arg, REAL_TYPE))
9274 return NULL_TREE;
9275
9276 STRIP_NOPS (arg);
9277
9278 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9279 {
9280 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9281
9282 switch (value->cl)
9283 {
9284 case rvc_zero:
9285 case rvc_nan:
9286 case rvc_inf:
9287 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9288 return fold_convert_loc (loc, rettype, arg);
9289 case rvc_normal:
9290 /* For normal numbers, proceed iff radix == 2. */
9291 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9292 {
9293 REAL_VALUE_TYPE result = *value;
9294 /* In GCC, normalized significands are in the range [0.5,
9295 1.0). We want them to be [1.0, 2.0) so set the
9296 exponent to 1. */
9297 SET_REAL_EXP (&result, 1);
9298 return build_real (rettype, result);
9299 }
9300 break;
9301 }
9302 }
9303
9304 return NULL_TREE;
9305 }
9306
9307 /* Fold a call to builtin frexp, we can assume the base is 2. */
9308
9309 static tree
9310 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9311 {
9312 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9313 return NULL_TREE;
9314
9315 STRIP_NOPS (arg0);
9316
9317 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9318 return NULL_TREE;
9319
9320 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9321
9322 /* Proceed if a valid pointer type was passed in. */
9323 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9324 {
9325 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9326 tree frac, exp;
9327
9328 switch (value->cl)
9329 {
9330 case rvc_zero:
9331 /* For +-0, return (*exp = 0, +-0). */
9332 exp = integer_zero_node;
9333 frac = arg0;
9334 break;
9335 case rvc_nan:
9336 case rvc_inf:
9337 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9338 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9339 case rvc_normal:
9340 {
9341 /* Since the frexp function always expects base 2, and in
9342 GCC normalized significands are already in the range
9343 [0.5, 1.0), we have exactly what frexp wants. */
9344 REAL_VALUE_TYPE frac_rvt = *value;
9345 SET_REAL_EXP (&frac_rvt, 0);
9346 frac = build_real (rettype, frac_rvt);
9347 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9348 }
9349 break;
9350 default:
9351 gcc_unreachable ();
9352 }
9353
9354 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9355 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9356 TREE_SIDE_EFFECTS (arg1) = 1;
9357 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9358 }
9359
9360 return NULL_TREE;
9361 }
9362
9363 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9364 then we can assume the base is two. If it's false, then we have to
9365 check the mode of the TYPE parameter in certain cases. */
9366
9367 static tree
9368 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9369 tree type, bool ldexp)
9370 {
9371 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9372 {
9373 STRIP_NOPS (arg0);
9374 STRIP_NOPS (arg1);
9375
9376 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9377 if (real_zerop (arg0) || integer_zerop (arg1)
9378 || (TREE_CODE (arg0) == REAL_CST
9379 && !real_isfinite (&TREE_REAL_CST (arg0))))
9380 return omit_one_operand_loc (loc, type, arg0, arg1);
9381
9382 /* If both arguments are constant, then try to evaluate it. */
9383 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9384 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9385 && host_integerp (arg1, 0))
9386 {
9387 /* Bound the maximum adjustment to twice the range of the
9388 mode's valid exponents. Use abs to ensure the range is
9389 positive as a sanity check. */
9390 const long max_exp_adj = 2 *
9391 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9392 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9393
9394 /* Get the user-requested adjustment. */
9395 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9396
9397 /* The requested adjustment must be inside this range. This
9398 is a preliminary cap to avoid things like overflow, we
9399 may still fail to compute the result for other reasons. */
9400 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9401 {
9402 REAL_VALUE_TYPE initial_result;
9403
9404 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9405
9406 /* Ensure we didn't overflow. */
9407 if (! real_isinf (&initial_result))
9408 {
9409 const REAL_VALUE_TYPE trunc_result
9410 = real_value_truncate (TYPE_MODE (type), initial_result);
9411
9412 /* Only proceed if the target mode can hold the
9413 resulting value. */
9414 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9415 return build_real (type, trunc_result);
9416 }
9417 }
9418 }
9419 }
9420
9421 return NULL_TREE;
9422 }
9423
9424 /* Fold a call to builtin modf. */
9425
9426 static tree
9427 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9428 {
9429 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9430 return NULL_TREE;
9431
9432 STRIP_NOPS (arg0);
9433
9434 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9435 return NULL_TREE;
9436
9437 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9438
9439 /* Proceed if a valid pointer type was passed in. */
9440 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9441 {
9442 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9443 REAL_VALUE_TYPE trunc, frac;
9444
9445 switch (value->cl)
9446 {
9447 case rvc_nan:
9448 case rvc_zero:
9449 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9450 trunc = frac = *value;
9451 break;
9452 case rvc_inf:
9453 /* For +-Inf, return (*arg1 = arg0, +-0). */
9454 frac = dconst0;
9455 frac.sign = value->sign;
9456 trunc = *value;
9457 break;
9458 case rvc_normal:
9459 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9460 real_trunc (&trunc, VOIDmode, value);
9461 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9462 /* If the original number was negative and already
9463 integral, then the fractional part is -0.0. */
9464 if (value->sign && frac.cl == rvc_zero)
9465 frac.sign = value->sign;
9466 break;
9467 }
9468
9469 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9470 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9471 build_real (rettype, trunc));
9472 TREE_SIDE_EFFECTS (arg1) = 1;
9473 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9474 build_real (rettype, frac));
9475 }
9476
9477 return NULL_TREE;
9478 }
9479
9480 /* Given a location LOC, an interclass builtin function decl FNDECL
9481 and its single argument ARG, return an folded expression computing
9482 the same, or NULL_TREE if we either couldn't or didn't want to fold
9483 (the latter happen if there's an RTL instruction available). */
9484
9485 static tree
9486 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9487 {
9488 enum machine_mode mode;
9489
9490 if (!validate_arg (arg, REAL_TYPE))
9491 return NULL_TREE;
9492
9493 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9494 return NULL_TREE;
9495
9496 mode = TYPE_MODE (TREE_TYPE (arg));
9497
9498 /* If there is no optab, try generic code. */
9499 switch (DECL_FUNCTION_CODE (fndecl))
9500 {
9501 tree result;
9502
9503 CASE_FLT_FN (BUILT_IN_ISINF):
9504 {
9505 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9506 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9507 tree const type = TREE_TYPE (arg);
9508 REAL_VALUE_TYPE r;
9509 char buf[128];
9510
9511 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9512 real_from_string (&r, buf);
9513 result = build_call_expr (isgr_fn, 2,
9514 fold_build1_loc (loc, ABS_EXPR, type, arg),
9515 build_real (type, r));
9516 return result;
9517 }
9518 CASE_FLT_FN (BUILT_IN_FINITE):
9519 case BUILT_IN_ISFINITE:
9520 {
9521 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9522 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9523 tree const type = TREE_TYPE (arg);
9524 REAL_VALUE_TYPE r;
9525 char buf[128];
9526
9527 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9528 real_from_string (&r, buf);
9529 result = build_call_expr (isle_fn, 2,
9530 fold_build1_loc (loc, ABS_EXPR, type, arg),
9531 build_real (type, r));
9532 /*result = fold_build2_loc (loc, UNGT_EXPR,
9533 TREE_TYPE (TREE_TYPE (fndecl)),
9534 fold_build1_loc (loc, ABS_EXPR, type, arg),
9535 build_real (type, r));
9536 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9537 TREE_TYPE (TREE_TYPE (fndecl)),
9538 result);*/
9539 return result;
9540 }
9541 case BUILT_IN_ISNORMAL:
9542 {
9543 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9544 islessequal(fabs(x),DBL_MAX). */
9545 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9546 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9547 tree const type = TREE_TYPE (arg);
9548 REAL_VALUE_TYPE rmax, rmin;
9549 char buf[128];
9550
9551 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9552 real_from_string (&rmax, buf);
9553 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9554 real_from_string (&rmin, buf);
9555 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9556 result = build_call_expr (isle_fn, 2, arg,
9557 build_real (type, rmax));
9558 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9559 build_call_expr (isge_fn, 2, arg,
9560 build_real (type, rmin)));
9561 return result;
9562 }
9563 default:
9564 break;
9565 }
9566
9567 return NULL_TREE;
9568 }
9569
9570 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9571 ARG is the argument for the call. */
9572
9573 static tree
9574 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9575 {
9576 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9577 REAL_VALUE_TYPE r;
9578
9579 if (!validate_arg (arg, REAL_TYPE))
9580 return NULL_TREE;
9581
9582 switch (builtin_index)
9583 {
9584 case BUILT_IN_ISINF:
9585 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9586 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9587
9588 if (TREE_CODE (arg) == REAL_CST)
9589 {
9590 r = TREE_REAL_CST (arg);
9591 if (real_isinf (&r))
9592 return real_compare (GT_EXPR, &r, &dconst0)
9593 ? integer_one_node : integer_minus_one_node;
9594 else
9595 return integer_zero_node;
9596 }
9597
9598 return NULL_TREE;
9599
9600 case BUILT_IN_ISINF_SIGN:
9601 {
9602 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9603 /* In a boolean context, GCC will fold the inner COND_EXPR to
9604 1. So e.g. "if (isinf_sign(x))" would be folded to just
9605 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9606 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9607 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9608 tree tmp = NULL_TREE;
9609
9610 arg = builtin_save_expr (arg);
9611
9612 if (signbit_fn && isinf_fn)
9613 {
9614 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9615 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9616
9617 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9618 signbit_call, integer_zero_node);
9619 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9620 isinf_call, integer_zero_node);
9621
9622 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9623 integer_minus_one_node, integer_one_node);
9624 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9625 isinf_call, tmp,
9626 integer_zero_node);
9627 }
9628
9629 return tmp;
9630 }
9631
9632 case BUILT_IN_ISFINITE:
9633 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9634 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9635 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9636
9637 if (TREE_CODE (arg) == REAL_CST)
9638 {
9639 r = TREE_REAL_CST (arg);
9640 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9641 }
9642
9643 return NULL_TREE;
9644
9645 case BUILT_IN_ISNAN:
9646 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9647 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9648
9649 if (TREE_CODE (arg) == REAL_CST)
9650 {
9651 r = TREE_REAL_CST (arg);
9652 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9653 }
9654
9655 arg = builtin_save_expr (arg);
9656 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9657
9658 default:
9659 gcc_unreachable ();
9660 }
9661 }
9662
9663 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9664 This builtin will generate code to return the appropriate floating
9665 point classification depending on the value of the floating point
9666 number passed in. The possible return values must be supplied as
9667 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9668 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9669 one floating point argument which is "type generic". */
9670
9671 static tree
9672 fold_builtin_fpclassify (location_t loc, tree exp)
9673 {
9674 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9675 arg, type, res, tmp;
9676 enum machine_mode mode;
9677 REAL_VALUE_TYPE r;
9678 char buf[128];
9679
9680 /* Verify the required arguments in the original call. */
9681 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9682 INTEGER_TYPE, INTEGER_TYPE,
9683 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9684 return NULL_TREE;
9685
9686 fp_nan = CALL_EXPR_ARG (exp, 0);
9687 fp_infinite = CALL_EXPR_ARG (exp, 1);
9688 fp_normal = CALL_EXPR_ARG (exp, 2);
9689 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9690 fp_zero = CALL_EXPR_ARG (exp, 4);
9691 arg = CALL_EXPR_ARG (exp, 5);
9692 type = TREE_TYPE (arg);
9693 mode = TYPE_MODE (type);
9694 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9695
9696 /* fpclassify(x) ->
9697 isnan(x) ? FP_NAN :
9698 (fabs(x) == Inf ? FP_INFINITE :
9699 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9700 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9701
9702 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9703 build_real (type, dconst0));
9704 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9705 tmp, fp_zero, fp_subnormal);
9706
9707 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9708 real_from_string (&r, buf);
9709 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9710 arg, build_real (type, r));
9711 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9712
9713 if (HONOR_INFINITIES (mode))
9714 {
9715 real_inf (&r);
9716 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9717 build_real (type, r));
9718 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9719 fp_infinite, res);
9720 }
9721
9722 if (HONOR_NANS (mode))
9723 {
9724 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9725 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9726 }
9727
9728 return res;
9729 }
9730
9731 /* Fold a call to an unordered comparison function such as
9732 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9733 being called and ARG0 and ARG1 are the arguments for the call.
9734 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9735 the opposite of the desired result. UNORDERED_CODE is used
9736 for modes that can hold NaNs and ORDERED_CODE is used for
9737 the rest. */
9738
9739 static tree
9740 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9741 enum tree_code unordered_code,
9742 enum tree_code ordered_code)
9743 {
9744 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9745 enum tree_code code;
9746 tree type0, type1;
9747 enum tree_code code0, code1;
9748 tree cmp_type = NULL_TREE;
9749
9750 type0 = TREE_TYPE (arg0);
9751 type1 = TREE_TYPE (arg1);
9752
9753 code0 = TREE_CODE (type0);
9754 code1 = TREE_CODE (type1);
9755
9756 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9757 /* Choose the wider of two real types. */
9758 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9759 ? type0 : type1;
9760 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9761 cmp_type = type0;
9762 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9763 cmp_type = type1;
9764
9765 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9766 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9767
9768 if (unordered_code == UNORDERED_EXPR)
9769 {
9770 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9771 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9772 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9773 }
9774
9775 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9776 : ordered_code;
9777 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9778 fold_build2_loc (loc, code, type, arg0, arg1));
9779 }
9780
9781 /* Fold a call to built-in function FNDECL with 0 arguments.
9782 IGNORE is true if the result of the function call is ignored. This
9783 function returns NULL_TREE if no simplification was possible. */
9784
9785 static tree
9786 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9787 {
9788 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9789 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9790 switch (fcode)
9791 {
9792 CASE_FLT_FN (BUILT_IN_INF):
9793 case BUILT_IN_INFD32:
9794 case BUILT_IN_INFD64:
9795 case BUILT_IN_INFD128:
9796 return fold_builtin_inf (loc, type, true);
9797
9798 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9799 return fold_builtin_inf (loc, type, false);
9800
9801 case BUILT_IN_CLASSIFY_TYPE:
9802 return fold_builtin_classify_type (NULL_TREE);
9803
9804 default:
9805 break;
9806 }
9807 return NULL_TREE;
9808 }
9809
9810 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9811 IGNORE is true if the result of the function call is ignored. This
9812 function returns NULL_TREE if no simplification was possible. */
9813
9814 static tree
9815 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9816 {
9817 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9818 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9819 switch (fcode)
9820 {
9821 case BUILT_IN_CONSTANT_P:
9822 {
9823 tree val = fold_builtin_constant_p (arg0);
9824
9825 /* Gimplification will pull the CALL_EXPR for the builtin out of
9826 an if condition. When not optimizing, we'll not CSE it back.
9827 To avoid link error types of regressions, return false now. */
9828 if (!val && !optimize)
9829 val = integer_zero_node;
9830
9831 return val;
9832 }
9833
9834 case BUILT_IN_CLASSIFY_TYPE:
9835 return fold_builtin_classify_type (arg0);
9836
9837 case BUILT_IN_STRLEN:
9838 return fold_builtin_strlen (loc, type, arg0);
9839
9840 CASE_FLT_FN (BUILT_IN_FABS):
9841 return fold_builtin_fabs (loc, arg0, type);
9842
9843 case BUILT_IN_ABS:
9844 case BUILT_IN_LABS:
9845 case BUILT_IN_LLABS:
9846 case BUILT_IN_IMAXABS:
9847 return fold_builtin_abs (loc, arg0, type);
9848
9849 CASE_FLT_FN (BUILT_IN_CONJ):
9850 if (validate_arg (arg0, COMPLEX_TYPE)
9851 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9852 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9853 break;
9854
9855 CASE_FLT_FN (BUILT_IN_CREAL):
9856 if (validate_arg (arg0, COMPLEX_TYPE)
9857 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9858 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9859 break;
9860
9861 CASE_FLT_FN (BUILT_IN_CIMAG):
9862 if (validate_arg (arg0, COMPLEX_TYPE)
9863 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9864 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9865 break;
9866
9867 CASE_FLT_FN (BUILT_IN_CCOS):
9868 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9869
9870 CASE_FLT_FN (BUILT_IN_CCOSH):
9871 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9872
9873 CASE_FLT_FN (BUILT_IN_CPROJ):
9874 return fold_builtin_cproj(loc, arg0, type);
9875
9876 CASE_FLT_FN (BUILT_IN_CSIN):
9877 if (validate_arg (arg0, COMPLEX_TYPE)
9878 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9879 return do_mpc_arg1 (arg0, type, mpc_sin);
9880 break;
9881
9882 CASE_FLT_FN (BUILT_IN_CSINH):
9883 if (validate_arg (arg0, COMPLEX_TYPE)
9884 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9885 return do_mpc_arg1 (arg0, type, mpc_sinh);
9886 break;
9887
9888 CASE_FLT_FN (BUILT_IN_CTAN):
9889 if (validate_arg (arg0, COMPLEX_TYPE)
9890 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9891 return do_mpc_arg1 (arg0, type, mpc_tan);
9892 break;
9893
9894 CASE_FLT_FN (BUILT_IN_CTANH):
9895 if (validate_arg (arg0, COMPLEX_TYPE)
9896 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9897 return do_mpc_arg1 (arg0, type, mpc_tanh);
9898 break;
9899
9900 CASE_FLT_FN (BUILT_IN_CLOG):
9901 if (validate_arg (arg0, COMPLEX_TYPE)
9902 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9903 return do_mpc_arg1 (arg0, type, mpc_log);
9904 break;
9905
9906 CASE_FLT_FN (BUILT_IN_CSQRT):
9907 if (validate_arg (arg0, COMPLEX_TYPE)
9908 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9909 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9910 break;
9911
9912 CASE_FLT_FN (BUILT_IN_CASIN):
9913 if (validate_arg (arg0, COMPLEX_TYPE)
9914 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9915 return do_mpc_arg1 (arg0, type, mpc_asin);
9916 break;
9917
9918 CASE_FLT_FN (BUILT_IN_CACOS):
9919 if (validate_arg (arg0, COMPLEX_TYPE)
9920 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9921 return do_mpc_arg1 (arg0, type, mpc_acos);
9922 break;
9923
9924 CASE_FLT_FN (BUILT_IN_CATAN):
9925 if (validate_arg (arg0, COMPLEX_TYPE)
9926 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9927 return do_mpc_arg1 (arg0, type, mpc_atan);
9928 break;
9929
9930 CASE_FLT_FN (BUILT_IN_CASINH):
9931 if (validate_arg (arg0, COMPLEX_TYPE)
9932 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9933 return do_mpc_arg1 (arg0, type, mpc_asinh);
9934 break;
9935
9936 CASE_FLT_FN (BUILT_IN_CACOSH):
9937 if (validate_arg (arg0, COMPLEX_TYPE)
9938 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9939 return do_mpc_arg1 (arg0, type, mpc_acosh);
9940 break;
9941
9942 CASE_FLT_FN (BUILT_IN_CATANH):
9943 if (validate_arg (arg0, COMPLEX_TYPE)
9944 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9945 return do_mpc_arg1 (arg0, type, mpc_atanh);
9946 break;
9947
9948 CASE_FLT_FN (BUILT_IN_CABS):
9949 return fold_builtin_cabs (loc, arg0, type, fndecl);
9950
9951 CASE_FLT_FN (BUILT_IN_CARG):
9952 return fold_builtin_carg (loc, arg0, type);
9953
9954 CASE_FLT_FN (BUILT_IN_SQRT):
9955 return fold_builtin_sqrt (loc, arg0, type);
9956
9957 CASE_FLT_FN (BUILT_IN_CBRT):
9958 return fold_builtin_cbrt (loc, arg0, type);
9959
9960 CASE_FLT_FN (BUILT_IN_ASIN):
9961 if (validate_arg (arg0, REAL_TYPE))
9962 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9963 &dconstm1, &dconst1, true);
9964 break;
9965
9966 CASE_FLT_FN (BUILT_IN_ACOS):
9967 if (validate_arg (arg0, REAL_TYPE))
9968 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9969 &dconstm1, &dconst1, true);
9970 break;
9971
9972 CASE_FLT_FN (BUILT_IN_ATAN):
9973 if (validate_arg (arg0, REAL_TYPE))
9974 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9975 break;
9976
9977 CASE_FLT_FN (BUILT_IN_ASINH):
9978 if (validate_arg (arg0, REAL_TYPE))
9979 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9980 break;
9981
9982 CASE_FLT_FN (BUILT_IN_ACOSH):
9983 if (validate_arg (arg0, REAL_TYPE))
9984 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9985 &dconst1, NULL, true);
9986 break;
9987
9988 CASE_FLT_FN (BUILT_IN_ATANH):
9989 if (validate_arg (arg0, REAL_TYPE))
9990 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9991 &dconstm1, &dconst1, false);
9992 break;
9993
9994 CASE_FLT_FN (BUILT_IN_SIN):
9995 if (validate_arg (arg0, REAL_TYPE))
9996 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9997 break;
9998
9999 CASE_FLT_FN (BUILT_IN_COS):
10000 return fold_builtin_cos (loc, arg0, type, fndecl);
10001
10002 CASE_FLT_FN (BUILT_IN_TAN):
10003 return fold_builtin_tan (arg0, type);
10004
10005 CASE_FLT_FN (BUILT_IN_CEXP):
10006 return fold_builtin_cexp (loc, arg0, type);
10007
10008 CASE_FLT_FN (BUILT_IN_CEXPI):
10009 if (validate_arg (arg0, REAL_TYPE))
10010 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10011 break;
10012
10013 CASE_FLT_FN (BUILT_IN_SINH):
10014 if (validate_arg (arg0, REAL_TYPE))
10015 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10016 break;
10017
10018 CASE_FLT_FN (BUILT_IN_COSH):
10019 return fold_builtin_cosh (loc, arg0, type, fndecl);
10020
10021 CASE_FLT_FN (BUILT_IN_TANH):
10022 if (validate_arg (arg0, REAL_TYPE))
10023 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10024 break;
10025
10026 CASE_FLT_FN (BUILT_IN_ERF):
10027 if (validate_arg (arg0, REAL_TYPE))
10028 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10029 break;
10030
10031 CASE_FLT_FN (BUILT_IN_ERFC):
10032 if (validate_arg (arg0, REAL_TYPE))
10033 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10034 break;
10035
10036 CASE_FLT_FN (BUILT_IN_TGAMMA):
10037 if (validate_arg (arg0, REAL_TYPE))
10038 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10039 break;
10040
10041 CASE_FLT_FN (BUILT_IN_EXP):
10042 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10043
10044 CASE_FLT_FN (BUILT_IN_EXP2):
10045 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10046
10047 CASE_FLT_FN (BUILT_IN_EXP10):
10048 CASE_FLT_FN (BUILT_IN_POW10):
10049 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10050
10051 CASE_FLT_FN (BUILT_IN_EXPM1):
10052 if (validate_arg (arg0, REAL_TYPE))
10053 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10054 break;
10055
10056 CASE_FLT_FN (BUILT_IN_LOG):
10057 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10058
10059 CASE_FLT_FN (BUILT_IN_LOG2):
10060 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10061
10062 CASE_FLT_FN (BUILT_IN_LOG10):
10063 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10064
10065 CASE_FLT_FN (BUILT_IN_LOG1P):
10066 if (validate_arg (arg0, REAL_TYPE))
10067 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10068 &dconstm1, NULL, false);
10069 break;
10070
10071 CASE_FLT_FN (BUILT_IN_J0):
10072 if (validate_arg (arg0, REAL_TYPE))
10073 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10074 NULL, NULL, 0);
10075 break;
10076
10077 CASE_FLT_FN (BUILT_IN_J1):
10078 if (validate_arg (arg0, REAL_TYPE))
10079 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10080 NULL, NULL, 0);
10081 break;
10082
10083 CASE_FLT_FN (BUILT_IN_Y0):
10084 if (validate_arg (arg0, REAL_TYPE))
10085 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10086 &dconst0, NULL, false);
10087 break;
10088
10089 CASE_FLT_FN (BUILT_IN_Y1):
10090 if (validate_arg (arg0, REAL_TYPE))
10091 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10092 &dconst0, NULL, false);
10093 break;
10094
10095 CASE_FLT_FN (BUILT_IN_NAN):
10096 case BUILT_IN_NAND32:
10097 case BUILT_IN_NAND64:
10098 case BUILT_IN_NAND128:
10099 return fold_builtin_nan (arg0, type, true);
10100
10101 CASE_FLT_FN (BUILT_IN_NANS):
10102 return fold_builtin_nan (arg0, type, false);
10103
10104 CASE_FLT_FN (BUILT_IN_FLOOR):
10105 return fold_builtin_floor (loc, fndecl, arg0);
10106
10107 CASE_FLT_FN (BUILT_IN_CEIL):
10108 return fold_builtin_ceil (loc, fndecl, arg0);
10109
10110 CASE_FLT_FN (BUILT_IN_TRUNC):
10111 return fold_builtin_trunc (loc, fndecl, arg0);
10112
10113 CASE_FLT_FN (BUILT_IN_ROUND):
10114 return fold_builtin_round (loc, fndecl, arg0);
10115
10116 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10117 CASE_FLT_FN (BUILT_IN_RINT):
10118 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10119
10120 CASE_FLT_FN (BUILT_IN_LCEIL):
10121 CASE_FLT_FN (BUILT_IN_LLCEIL):
10122 CASE_FLT_FN (BUILT_IN_LFLOOR):
10123 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10124 CASE_FLT_FN (BUILT_IN_LROUND):
10125 CASE_FLT_FN (BUILT_IN_LLROUND):
10126 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10127
10128 CASE_FLT_FN (BUILT_IN_LRINT):
10129 CASE_FLT_FN (BUILT_IN_LLRINT):
10130 return fold_fixed_mathfn (loc, fndecl, arg0);
10131
10132 case BUILT_IN_BSWAP32:
10133 case BUILT_IN_BSWAP64:
10134 return fold_builtin_bswap (fndecl, arg0);
10135
10136 CASE_INT_FN (BUILT_IN_FFS):
10137 CASE_INT_FN (BUILT_IN_CLZ):
10138 CASE_INT_FN (BUILT_IN_CTZ):
10139 CASE_INT_FN (BUILT_IN_POPCOUNT):
10140 CASE_INT_FN (BUILT_IN_PARITY):
10141 return fold_builtin_bitop (fndecl, arg0);
10142
10143 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10144 return fold_builtin_signbit (loc, arg0, type);
10145
10146 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10147 return fold_builtin_significand (loc, arg0, type);
10148
10149 CASE_FLT_FN (BUILT_IN_ILOGB):
10150 CASE_FLT_FN (BUILT_IN_LOGB):
10151 return fold_builtin_logb (loc, arg0, type);
10152
10153 case BUILT_IN_ISASCII:
10154 return fold_builtin_isascii (loc, arg0);
10155
10156 case BUILT_IN_TOASCII:
10157 return fold_builtin_toascii (loc, arg0);
10158
10159 case BUILT_IN_ISDIGIT:
10160 return fold_builtin_isdigit (loc, arg0);
10161
10162 CASE_FLT_FN (BUILT_IN_FINITE):
10163 case BUILT_IN_FINITED32:
10164 case BUILT_IN_FINITED64:
10165 case BUILT_IN_FINITED128:
10166 case BUILT_IN_ISFINITE:
10167 {
10168 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10169 if (ret)
10170 return ret;
10171 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10172 }
10173
10174 CASE_FLT_FN (BUILT_IN_ISINF):
10175 case BUILT_IN_ISINFD32:
10176 case BUILT_IN_ISINFD64:
10177 case BUILT_IN_ISINFD128:
10178 {
10179 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10180 if (ret)
10181 return ret;
10182 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10183 }
10184
10185 case BUILT_IN_ISNORMAL:
10186 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10187
10188 case BUILT_IN_ISINF_SIGN:
10189 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10190
10191 CASE_FLT_FN (BUILT_IN_ISNAN):
10192 case BUILT_IN_ISNAND32:
10193 case BUILT_IN_ISNAND64:
10194 case BUILT_IN_ISNAND128:
10195 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10196
10197 case BUILT_IN_PRINTF:
10198 case BUILT_IN_PRINTF_UNLOCKED:
10199 case BUILT_IN_VPRINTF:
10200 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10201
10202 case BUILT_IN_FREE:
10203 if (integer_zerop (arg0))
10204 return build_empty_stmt (loc);
10205 break;
10206
10207 default:
10208 break;
10209 }
10210
10211 return NULL_TREE;
10212
10213 }
10214
10215 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10216 IGNORE is true if the result of the function call is ignored. This
10217 function returns NULL_TREE if no simplification was possible. */
10218
10219 static tree
10220 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10221 {
10222 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10223 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10224
10225 switch (fcode)
10226 {
10227 CASE_FLT_FN (BUILT_IN_JN):
10228 if (validate_arg (arg0, INTEGER_TYPE)
10229 && validate_arg (arg1, REAL_TYPE))
10230 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10231 break;
10232
10233 CASE_FLT_FN (BUILT_IN_YN):
10234 if (validate_arg (arg0, INTEGER_TYPE)
10235 && validate_arg (arg1, REAL_TYPE))
10236 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10237 &dconst0, false);
10238 break;
10239
10240 CASE_FLT_FN (BUILT_IN_DREM):
10241 CASE_FLT_FN (BUILT_IN_REMAINDER):
10242 if (validate_arg (arg0, REAL_TYPE)
10243 && validate_arg(arg1, REAL_TYPE))
10244 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10245 break;
10246
10247 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10248 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10249 if (validate_arg (arg0, REAL_TYPE)
10250 && validate_arg(arg1, POINTER_TYPE))
10251 return do_mpfr_lgamma_r (arg0, arg1, type);
10252 break;
10253
10254 CASE_FLT_FN (BUILT_IN_ATAN2):
10255 if (validate_arg (arg0, REAL_TYPE)
10256 && validate_arg(arg1, REAL_TYPE))
10257 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10258 break;
10259
10260 CASE_FLT_FN (BUILT_IN_FDIM):
10261 if (validate_arg (arg0, REAL_TYPE)
10262 && validate_arg(arg1, REAL_TYPE))
10263 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10264 break;
10265
10266 CASE_FLT_FN (BUILT_IN_HYPOT):
10267 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10268
10269 CASE_FLT_FN (BUILT_IN_CPOW):
10270 if (validate_arg (arg0, COMPLEX_TYPE)
10271 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10272 && validate_arg (arg1, COMPLEX_TYPE)
10273 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10274 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10275 break;
10276
10277 CASE_FLT_FN (BUILT_IN_LDEXP):
10278 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10279 CASE_FLT_FN (BUILT_IN_SCALBN):
10280 CASE_FLT_FN (BUILT_IN_SCALBLN):
10281 return fold_builtin_load_exponent (loc, arg0, arg1,
10282 type, /*ldexp=*/false);
10283
10284 CASE_FLT_FN (BUILT_IN_FREXP):
10285 return fold_builtin_frexp (loc, arg0, arg1, type);
10286
10287 CASE_FLT_FN (BUILT_IN_MODF):
10288 return fold_builtin_modf (loc, arg0, arg1, type);
10289
10290 case BUILT_IN_BZERO:
10291 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10292
10293 case BUILT_IN_FPUTS:
10294 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10295
10296 case BUILT_IN_FPUTS_UNLOCKED:
10297 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10298
10299 case BUILT_IN_STRSTR:
10300 return fold_builtin_strstr (loc, arg0, arg1, type);
10301
10302 case BUILT_IN_STRCAT:
10303 return fold_builtin_strcat (loc, arg0, arg1);
10304
10305 case BUILT_IN_STRSPN:
10306 return fold_builtin_strspn (loc, arg0, arg1);
10307
10308 case BUILT_IN_STRCSPN:
10309 return fold_builtin_strcspn (loc, arg0, arg1);
10310
10311 case BUILT_IN_STRCHR:
10312 case BUILT_IN_INDEX:
10313 return fold_builtin_strchr (loc, arg0, arg1, type);
10314
10315 case BUILT_IN_STRRCHR:
10316 case BUILT_IN_RINDEX:
10317 return fold_builtin_strrchr (loc, arg0, arg1, type);
10318
10319 case BUILT_IN_STRCPY:
10320 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10321
10322 case BUILT_IN_STPCPY:
10323 if (ignore)
10324 {
10325 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10326 if (!fn)
10327 break;
10328
10329 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10330 }
10331 else
10332 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10333 break;
10334
10335 case BUILT_IN_STRCMP:
10336 return fold_builtin_strcmp (loc, arg0, arg1);
10337
10338 case BUILT_IN_STRPBRK:
10339 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10340
10341 case BUILT_IN_EXPECT:
10342 return fold_builtin_expect (loc, arg0, arg1);
10343
10344 CASE_FLT_FN (BUILT_IN_POW):
10345 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10346
10347 CASE_FLT_FN (BUILT_IN_POWI):
10348 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10349
10350 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10351 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10352
10353 CASE_FLT_FN (BUILT_IN_FMIN):
10354 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10355
10356 CASE_FLT_FN (BUILT_IN_FMAX):
10357 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10358
10359 case BUILT_IN_ISGREATER:
10360 return fold_builtin_unordered_cmp (loc, fndecl,
10361 arg0, arg1, UNLE_EXPR, LE_EXPR);
10362 case BUILT_IN_ISGREATEREQUAL:
10363 return fold_builtin_unordered_cmp (loc, fndecl,
10364 arg0, arg1, UNLT_EXPR, LT_EXPR);
10365 case BUILT_IN_ISLESS:
10366 return fold_builtin_unordered_cmp (loc, fndecl,
10367 arg0, arg1, UNGE_EXPR, GE_EXPR);
10368 case BUILT_IN_ISLESSEQUAL:
10369 return fold_builtin_unordered_cmp (loc, fndecl,
10370 arg0, arg1, UNGT_EXPR, GT_EXPR);
10371 case BUILT_IN_ISLESSGREATER:
10372 return fold_builtin_unordered_cmp (loc, fndecl,
10373 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10374 case BUILT_IN_ISUNORDERED:
10375 return fold_builtin_unordered_cmp (loc, fndecl,
10376 arg0, arg1, UNORDERED_EXPR,
10377 NOP_EXPR);
10378
10379 /* We do the folding for va_start in the expander. */
10380 case BUILT_IN_VA_START:
10381 break;
10382
10383 case BUILT_IN_SPRINTF:
10384 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10385
10386 case BUILT_IN_OBJECT_SIZE:
10387 return fold_builtin_object_size (arg0, arg1);
10388
10389 case BUILT_IN_PRINTF:
10390 case BUILT_IN_PRINTF_UNLOCKED:
10391 case BUILT_IN_VPRINTF:
10392 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10393
10394 case BUILT_IN_PRINTF_CHK:
10395 case BUILT_IN_VPRINTF_CHK:
10396 if (!validate_arg (arg0, INTEGER_TYPE)
10397 || TREE_SIDE_EFFECTS (arg0))
10398 return NULL_TREE;
10399 else
10400 return fold_builtin_printf (loc, fndecl,
10401 arg1, NULL_TREE, ignore, fcode);
10402 break;
10403
10404 case BUILT_IN_FPRINTF:
10405 case BUILT_IN_FPRINTF_UNLOCKED:
10406 case BUILT_IN_VFPRINTF:
10407 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10408 ignore, fcode);
10409
10410 default:
10411 break;
10412 }
10413 return NULL_TREE;
10414 }
10415
10416 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10417 and ARG2. IGNORE is true if the result of the function call is ignored.
10418 This function returns NULL_TREE if no simplification was possible. */
10419
10420 static tree
10421 fold_builtin_3 (location_t loc, tree fndecl,
10422 tree arg0, tree arg1, tree arg2, bool ignore)
10423 {
10424 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10425 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10426 switch (fcode)
10427 {
10428
10429 CASE_FLT_FN (BUILT_IN_SINCOS):
10430 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10431
10432 CASE_FLT_FN (BUILT_IN_FMA):
10433 if (validate_arg (arg0, REAL_TYPE)
10434 && validate_arg(arg1, REAL_TYPE)
10435 && validate_arg(arg2, REAL_TYPE))
10436 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10437 break;
10438
10439 CASE_FLT_FN (BUILT_IN_REMQUO):
10440 if (validate_arg (arg0, REAL_TYPE)
10441 && validate_arg(arg1, REAL_TYPE)
10442 && validate_arg(arg2, POINTER_TYPE))
10443 return do_mpfr_remquo (arg0, arg1, arg2);
10444 break;
10445
10446 case BUILT_IN_MEMSET:
10447 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10448
10449 case BUILT_IN_BCOPY:
10450 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10451 void_type_node, true, /*endp=*/3);
10452
10453 case BUILT_IN_MEMCPY:
10454 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10455 type, ignore, /*endp=*/0);
10456
10457 case BUILT_IN_MEMPCPY:
10458 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10459 type, ignore, /*endp=*/1);
10460
10461 case BUILT_IN_MEMMOVE:
10462 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10463 type, ignore, /*endp=*/3);
10464
10465 case BUILT_IN_STRNCAT:
10466 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10467
10468 case BUILT_IN_STRNCPY:
10469 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10470
10471 case BUILT_IN_STRNCMP:
10472 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10473
10474 case BUILT_IN_MEMCHR:
10475 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10476
10477 case BUILT_IN_BCMP:
10478 case BUILT_IN_MEMCMP:
10479 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10480
10481 case BUILT_IN_SPRINTF:
10482 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10483
10484 case BUILT_IN_STRCPY_CHK:
10485 case BUILT_IN_STPCPY_CHK:
10486 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10487 ignore, fcode);
10488
10489 case BUILT_IN_STRCAT_CHK:
10490 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10491
10492 case BUILT_IN_PRINTF_CHK:
10493 case BUILT_IN_VPRINTF_CHK:
10494 if (!validate_arg (arg0, INTEGER_TYPE)
10495 || TREE_SIDE_EFFECTS (arg0))
10496 return NULL_TREE;
10497 else
10498 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10499 break;
10500
10501 case BUILT_IN_FPRINTF:
10502 case BUILT_IN_FPRINTF_UNLOCKED:
10503 case BUILT_IN_VFPRINTF:
10504 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10505 ignore, fcode);
10506
10507 case BUILT_IN_FPRINTF_CHK:
10508 case BUILT_IN_VFPRINTF_CHK:
10509 if (!validate_arg (arg1, INTEGER_TYPE)
10510 || TREE_SIDE_EFFECTS (arg1))
10511 return NULL_TREE;
10512 else
10513 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10514 ignore, fcode);
10515
10516 default:
10517 break;
10518 }
10519 return NULL_TREE;
10520 }
10521
10522 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10523 ARG2, and ARG3. IGNORE is true if the result of the function call is
10524 ignored. This function returns NULL_TREE if no simplification was
10525 possible. */
10526
10527 static tree
10528 fold_builtin_4 (location_t loc, tree fndecl,
10529 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10530 {
10531 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10532
10533 switch (fcode)
10534 {
10535 case BUILT_IN_MEMCPY_CHK:
10536 case BUILT_IN_MEMPCPY_CHK:
10537 case BUILT_IN_MEMMOVE_CHK:
10538 case BUILT_IN_MEMSET_CHK:
10539 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10540 NULL_TREE, ignore,
10541 DECL_FUNCTION_CODE (fndecl));
10542
10543 case BUILT_IN_STRNCPY_CHK:
10544 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10545
10546 case BUILT_IN_STRNCAT_CHK:
10547 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10548
10549 case BUILT_IN_FPRINTF_CHK:
10550 case BUILT_IN_VFPRINTF_CHK:
10551 if (!validate_arg (arg1, INTEGER_TYPE)
10552 || TREE_SIDE_EFFECTS (arg1))
10553 return NULL_TREE;
10554 else
10555 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10556 ignore, fcode);
10557 break;
10558
10559 default:
10560 break;
10561 }
10562 return NULL_TREE;
10563 }
10564
10565 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10566 arguments, where NARGS <= 4. IGNORE is true if the result of the
10567 function call is ignored. This function returns NULL_TREE if no
10568 simplification was possible. Note that this only folds builtins with
10569 fixed argument patterns. Foldings that do varargs-to-varargs
10570 transformations, or that match calls with more than 4 arguments,
10571 need to be handled with fold_builtin_varargs instead. */
10572
10573 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10574
10575 static tree
10576 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10577 {
10578 tree ret = NULL_TREE;
10579
10580 switch (nargs)
10581 {
10582 case 0:
10583 ret = fold_builtin_0 (loc, fndecl, ignore);
10584 break;
10585 case 1:
10586 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10587 break;
10588 case 2:
10589 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10590 break;
10591 case 3:
10592 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10593 break;
10594 case 4:
10595 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10596 ignore);
10597 break;
10598 default:
10599 break;
10600 }
10601 if (ret)
10602 {
10603 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10604 SET_EXPR_LOCATION (ret, loc);
10605 TREE_NO_WARNING (ret) = 1;
10606 return ret;
10607 }
10608 return NULL_TREE;
10609 }
10610
10611 /* Builtins with folding operations that operate on "..." arguments
10612 need special handling; we need to store the arguments in a convenient
10613 data structure before attempting any folding. Fortunately there are
10614 only a few builtins that fall into this category. FNDECL is the
10615 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10616 result of the function call is ignored. */
10617
10618 static tree
10619 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10620 bool ignore ATTRIBUTE_UNUSED)
10621 {
10622 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10623 tree ret = NULL_TREE;
10624
10625 switch (fcode)
10626 {
10627 case BUILT_IN_SPRINTF_CHK:
10628 case BUILT_IN_VSPRINTF_CHK:
10629 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10630 break;
10631
10632 case BUILT_IN_SNPRINTF_CHK:
10633 case BUILT_IN_VSNPRINTF_CHK:
10634 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10635 break;
10636
10637 case BUILT_IN_FPCLASSIFY:
10638 ret = fold_builtin_fpclassify (loc, exp);
10639 break;
10640
10641 default:
10642 break;
10643 }
10644 if (ret)
10645 {
10646 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10647 SET_EXPR_LOCATION (ret, loc);
10648 TREE_NO_WARNING (ret) = 1;
10649 return ret;
10650 }
10651 return NULL_TREE;
10652 }
10653
10654 /* Return true if FNDECL shouldn't be folded right now.
10655 If a built-in function has an inline attribute always_inline
10656 wrapper, defer folding it after always_inline functions have
10657 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10658 might not be performed. */
10659
10660 static bool
10661 avoid_folding_inline_builtin (tree fndecl)
10662 {
10663 return (DECL_DECLARED_INLINE_P (fndecl)
10664 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10665 && cfun
10666 && !cfun->always_inline_functions_inlined
10667 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10668 }
10669
10670 /* A wrapper function for builtin folding that prevents warnings for
10671 "statement without effect" and the like, caused by removing the
10672 call node earlier than the warning is generated. */
10673
10674 tree
10675 fold_call_expr (location_t loc, tree exp, bool ignore)
10676 {
10677 tree ret = NULL_TREE;
10678 tree fndecl = get_callee_fndecl (exp);
10679 if (fndecl
10680 && TREE_CODE (fndecl) == FUNCTION_DECL
10681 && DECL_BUILT_IN (fndecl)
10682 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10683 yet. Defer folding until we see all the arguments
10684 (after inlining). */
10685 && !CALL_EXPR_VA_ARG_PACK (exp))
10686 {
10687 int nargs = call_expr_nargs (exp);
10688
10689 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10690 instead last argument is __builtin_va_arg_pack (). Defer folding
10691 even in that case, until arguments are finalized. */
10692 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10693 {
10694 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10695 if (fndecl2
10696 && TREE_CODE (fndecl2) == FUNCTION_DECL
10697 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10698 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10699 return NULL_TREE;
10700 }
10701
10702 if (avoid_folding_inline_builtin (fndecl))
10703 return NULL_TREE;
10704
10705 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10706 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10707 CALL_EXPR_ARGP (exp), ignore);
10708 else
10709 {
10710 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10711 {
10712 tree *args = CALL_EXPR_ARGP (exp);
10713 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10714 }
10715 if (!ret)
10716 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10717 if (ret)
10718 return ret;
10719 }
10720 }
10721 return NULL_TREE;
10722 }
10723
10724 /* Conveniently construct a function call expression. FNDECL names the
10725 function to be called and N arguments are passed in the array
10726 ARGARRAY. */
10727
10728 tree
10729 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10730 {
10731 tree fntype = TREE_TYPE (fndecl);
10732 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10733
10734 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10735 }
10736
10737 /* Conveniently construct a function call expression. FNDECL names the
10738 function to be called and the arguments are passed in the vector
10739 VEC. */
10740
10741 tree
10742 build_call_expr_loc_vec (location_t loc, tree fndecl, VEC(tree,gc) *vec)
10743 {
10744 return build_call_expr_loc_array (loc, fndecl, VEC_length (tree, vec),
10745 VEC_address (tree, vec));
10746 }
10747
10748
10749 /* Conveniently construct a function call expression. FNDECL names the
10750 function to be called, N is the number of arguments, and the "..."
10751 parameters are the argument expressions. */
10752
10753 tree
10754 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10755 {
10756 va_list ap;
10757 tree *argarray = XALLOCAVEC (tree, n);
10758 int i;
10759
10760 va_start (ap, n);
10761 for (i = 0; i < n; i++)
10762 argarray[i] = va_arg (ap, tree);
10763 va_end (ap);
10764 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10765 }
10766
10767 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10768 varargs macros aren't supported by all bootstrap compilers. */
10769
10770 tree
10771 build_call_expr (tree fndecl, int n, ...)
10772 {
10773 va_list ap;
10774 tree *argarray = XALLOCAVEC (tree, n);
10775 int i;
10776
10777 va_start (ap, n);
10778 for (i = 0; i < n; i++)
10779 argarray[i] = va_arg (ap, tree);
10780 va_end (ap);
10781 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10782 }
10783
10784 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10785 N arguments are passed in the array ARGARRAY. */
10786
10787 tree
10788 fold_builtin_call_array (location_t loc, tree type,
10789 tree fn,
10790 int n,
10791 tree *argarray)
10792 {
10793 tree ret = NULL_TREE;
10794 tree exp;
10795
10796 if (TREE_CODE (fn) == ADDR_EXPR)
10797 {
10798 tree fndecl = TREE_OPERAND (fn, 0);
10799 if (TREE_CODE (fndecl) == FUNCTION_DECL
10800 && DECL_BUILT_IN (fndecl))
10801 {
10802 /* If last argument is __builtin_va_arg_pack (), arguments to this
10803 function are not finalized yet. Defer folding until they are. */
10804 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10805 {
10806 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10807 if (fndecl2
10808 && TREE_CODE (fndecl2) == FUNCTION_DECL
10809 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10810 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10811 return build_call_array_loc (loc, type, fn, n, argarray);
10812 }
10813 if (avoid_folding_inline_builtin (fndecl))
10814 return build_call_array_loc (loc, type, fn, n, argarray);
10815 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10816 {
10817 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10818 if (ret)
10819 return ret;
10820
10821 return build_call_array_loc (loc, type, fn, n, argarray);
10822 }
10823 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10824 {
10825 /* First try the transformations that don't require consing up
10826 an exp. */
10827 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10828 if (ret)
10829 return ret;
10830 }
10831
10832 /* If we got this far, we need to build an exp. */
10833 exp = build_call_array_loc (loc, type, fn, n, argarray);
10834 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10835 return ret ? ret : exp;
10836 }
10837 }
10838
10839 return build_call_array_loc (loc, type, fn, n, argarray);
10840 }
10841
10842 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10843 along with N new arguments specified as the "..." parameters. SKIP
10844 is the number of arguments in EXP to be omitted. This function is used
10845 to do varargs-to-varargs transformations. */
10846
10847 static tree
10848 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10849 {
10850 int oldnargs = call_expr_nargs (exp);
10851 int nargs = oldnargs - skip + n;
10852 tree fntype = TREE_TYPE (fndecl);
10853 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10854 tree *buffer;
10855
10856 if (n > 0)
10857 {
10858 int i, j;
10859 va_list ap;
10860
10861 buffer = XALLOCAVEC (tree, nargs);
10862 va_start (ap, n);
10863 for (i = 0; i < n; i++)
10864 buffer[i] = va_arg (ap, tree);
10865 va_end (ap);
10866 for (j = skip; j < oldnargs; j++, i++)
10867 buffer[i] = CALL_EXPR_ARG (exp, j);
10868 }
10869 else
10870 buffer = CALL_EXPR_ARGP (exp) + skip;
10871
10872 return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
10873 }
10874
10875 /* Validate a single argument ARG against a tree code CODE representing
10876 a type. */
10877
10878 static bool
10879 validate_arg (const_tree arg, enum tree_code code)
10880 {
10881 if (!arg)
10882 return false;
10883 else if (code == POINTER_TYPE)
10884 return POINTER_TYPE_P (TREE_TYPE (arg));
10885 else if (code == INTEGER_TYPE)
10886 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10887 return code == TREE_CODE (TREE_TYPE (arg));
10888 }
10889
10890 /* This function validates the types of a function call argument list
10891 against a specified list of tree_codes. If the last specifier is a 0,
10892 that represents an ellipses, otherwise the last specifier must be a
10893 VOID_TYPE.
10894
10895 This is the GIMPLE version of validate_arglist. Eventually we want to
10896 completely convert builtins.c to work from GIMPLEs and the tree based
10897 validate_arglist will then be removed. */
10898
10899 bool
10900 validate_gimple_arglist (const_gimple call, ...)
10901 {
10902 enum tree_code code;
10903 bool res = 0;
10904 va_list ap;
10905 const_tree arg;
10906 size_t i;
10907
10908 va_start (ap, call);
10909 i = 0;
10910
10911 do
10912 {
10913 code = (enum tree_code) va_arg (ap, int);
10914 switch (code)
10915 {
10916 case 0:
10917 /* This signifies an ellipses, any further arguments are all ok. */
10918 res = true;
10919 goto end;
10920 case VOID_TYPE:
10921 /* This signifies an endlink, if no arguments remain, return
10922 true, otherwise return false. */
10923 res = (i == gimple_call_num_args (call));
10924 goto end;
10925 default:
10926 /* If no parameters remain or the parameter's code does not
10927 match the specified code, return false. Otherwise continue
10928 checking any remaining arguments. */
10929 arg = gimple_call_arg (call, i++);
10930 if (!validate_arg (arg, code))
10931 goto end;
10932 break;
10933 }
10934 }
10935 while (1);
10936
10937 /* We need gotos here since we can only have one VA_CLOSE in a
10938 function. */
10939 end: ;
10940 va_end (ap);
10941
10942 return res;
10943 }
10944
10945 /* This function validates the types of a function call argument list
10946 against a specified list of tree_codes. If the last specifier is a 0,
10947 that represents an ellipses, otherwise the last specifier must be a
10948 VOID_TYPE. */
10949
10950 bool
10951 validate_arglist (const_tree callexpr, ...)
10952 {
10953 enum tree_code code;
10954 bool res = 0;
10955 va_list ap;
10956 const_call_expr_arg_iterator iter;
10957 const_tree arg;
10958
10959 va_start (ap, callexpr);
10960 init_const_call_expr_arg_iterator (callexpr, &iter);
10961
10962 do
10963 {
10964 code = (enum tree_code) va_arg (ap, int);
10965 switch (code)
10966 {
10967 case 0:
10968 /* This signifies an ellipses, any further arguments are all ok. */
10969 res = true;
10970 goto end;
10971 case VOID_TYPE:
10972 /* This signifies an endlink, if no arguments remain, return
10973 true, otherwise return false. */
10974 res = !more_const_call_expr_args_p (&iter);
10975 goto end;
10976 default:
10977 /* If no parameters remain or the parameter's code does not
10978 match the specified code, return false. Otherwise continue
10979 checking any remaining arguments. */
10980 arg = next_const_call_expr_arg (&iter);
10981 if (!validate_arg (arg, code))
10982 goto end;
10983 break;
10984 }
10985 }
10986 while (1);
10987
10988 /* We need gotos here since we can only have one VA_CLOSE in a
10989 function. */
10990 end: ;
10991 va_end (ap);
10992
10993 return res;
10994 }
10995
10996 /* Default target-specific builtin expander that does nothing. */
10997
10998 rtx
10999 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11000 rtx target ATTRIBUTE_UNUSED,
11001 rtx subtarget ATTRIBUTE_UNUSED,
11002 enum machine_mode mode ATTRIBUTE_UNUSED,
11003 int ignore ATTRIBUTE_UNUSED)
11004 {
11005 return NULL_RTX;
11006 }
11007
11008 /* Returns true is EXP represents data that would potentially reside
11009 in a readonly section. */
11010
11011 static bool
11012 readonly_data_expr (tree exp)
11013 {
11014 STRIP_NOPS (exp);
11015
11016 if (TREE_CODE (exp) != ADDR_EXPR)
11017 return false;
11018
11019 exp = get_base_address (TREE_OPERAND (exp, 0));
11020 if (!exp)
11021 return false;
11022
11023 /* Make sure we call decl_readonly_section only for trees it
11024 can handle (since it returns true for everything it doesn't
11025 understand). */
11026 if (TREE_CODE (exp) == STRING_CST
11027 || TREE_CODE (exp) == CONSTRUCTOR
11028 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11029 return decl_readonly_section (exp, 0);
11030 else
11031 return false;
11032 }
11033
11034 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11035 to the call, and TYPE is its return type.
11036
11037 Return NULL_TREE if no simplification was possible, otherwise return the
11038 simplified form of the call as a tree.
11039
11040 The simplified form may be a constant or other expression which
11041 computes the same value, but in a more efficient manner (including
11042 calls to other builtin functions).
11043
11044 The call may contain arguments which need to be evaluated, but
11045 which are not useful to determine the result of the call. In
11046 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11047 COMPOUND_EXPR will be an argument which must be evaluated.
11048 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11049 COMPOUND_EXPR in the chain will contain the tree for the simplified
11050 form of the builtin function call. */
11051
11052 static tree
11053 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11054 {
11055 if (!validate_arg (s1, POINTER_TYPE)
11056 || !validate_arg (s2, POINTER_TYPE))
11057 return NULL_TREE;
11058 else
11059 {
11060 tree fn;
11061 const char *p1, *p2;
11062
11063 p2 = c_getstr (s2);
11064 if (p2 == NULL)
11065 return NULL_TREE;
11066
11067 p1 = c_getstr (s1);
11068 if (p1 != NULL)
11069 {
11070 const char *r = strstr (p1, p2);
11071 tree tem;
11072
11073 if (r == NULL)
11074 return build_int_cst (TREE_TYPE (s1), 0);
11075
11076 /* Return an offset into the constant string argument. */
11077 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11078 s1, size_int (r - p1));
11079 return fold_convert_loc (loc, type, tem);
11080 }
11081
11082 /* The argument is const char *, and the result is char *, so we need
11083 a type conversion here to avoid a warning. */
11084 if (p2[0] == '\0')
11085 return fold_convert_loc (loc, type, s1);
11086
11087 if (p2[1] != '\0')
11088 return NULL_TREE;
11089
11090 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11091 if (!fn)
11092 return NULL_TREE;
11093
11094 /* New argument list transforming strstr(s1, s2) to
11095 strchr(s1, s2[0]). */
11096 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11097 }
11098 }
11099
11100 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11101 the call, and TYPE is its return type.
11102
11103 Return NULL_TREE if no simplification was possible, otherwise return the
11104 simplified form of the call as a tree.
11105
11106 The simplified form may be a constant or other expression which
11107 computes the same value, but in a more efficient manner (including
11108 calls to other builtin functions).
11109
11110 The call may contain arguments which need to be evaluated, but
11111 which are not useful to determine the result of the call. In
11112 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11113 COMPOUND_EXPR will be an argument which must be evaluated.
11114 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11115 COMPOUND_EXPR in the chain will contain the tree for the simplified
11116 form of the builtin function call. */
11117
11118 static tree
11119 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11120 {
11121 if (!validate_arg (s1, POINTER_TYPE)
11122 || !validate_arg (s2, INTEGER_TYPE))
11123 return NULL_TREE;
11124 else
11125 {
11126 const char *p1;
11127
11128 if (TREE_CODE (s2) != INTEGER_CST)
11129 return NULL_TREE;
11130
11131 p1 = c_getstr (s1);
11132 if (p1 != NULL)
11133 {
11134 char c;
11135 const char *r;
11136 tree tem;
11137
11138 if (target_char_cast (s2, &c))
11139 return NULL_TREE;
11140
11141 r = strchr (p1, c);
11142
11143 if (r == NULL)
11144 return build_int_cst (TREE_TYPE (s1), 0);
11145
11146 /* Return an offset into the constant string argument. */
11147 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11148 s1, size_int (r - p1));
11149 return fold_convert_loc (loc, type, tem);
11150 }
11151 return NULL_TREE;
11152 }
11153 }
11154
11155 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11156 the call, and TYPE is its return type.
11157
11158 Return NULL_TREE if no simplification was possible, otherwise return the
11159 simplified form of the call as a tree.
11160
11161 The simplified form may be a constant or other expression which
11162 computes the same value, but in a more efficient manner (including
11163 calls to other builtin functions).
11164
11165 The call may contain arguments which need to be evaluated, but
11166 which are not useful to determine the result of the call. In
11167 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11168 COMPOUND_EXPR will be an argument which must be evaluated.
11169 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11170 COMPOUND_EXPR in the chain will contain the tree for the simplified
11171 form of the builtin function call. */
11172
11173 static tree
11174 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11175 {
11176 if (!validate_arg (s1, POINTER_TYPE)
11177 || !validate_arg (s2, INTEGER_TYPE))
11178 return NULL_TREE;
11179 else
11180 {
11181 tree fn;
11182 const char *p1;
11183
11184 if (TREE_CODE (s2) != INTEGER_CST)
11185 return NULL_TREE;
11186
11187 p1 = c_getstr (s1);
11188 if (p1 != NULL)
11189 {
11190 char c;
11191 const char *r;
11192 tree tem;
11193
11194 if (target_char_cast (s2, &c))
11195 return NULL_TREE;
11196
11197 r = strrchr (p1, c);
11198
11199 if (r == NULL)
11200 return build_int_cst (TREE_TYPE (s1), 0);
11201
11202 /* Return an offset into the constant string argument. */
11203 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11204 s1, size_int (r - p1));
11205 return fold_convert_loc (loc, type, tem);
11206 }
11207
11208 if (! integer_zerop (s2))
11209 return NULL_TREE;
11210
11211 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11212 if (!fn)
11213 return NULL_TREE;
11214
11215 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11216 return build_call_expr_loc (loc, fn, 2, s1, s2);
11217 }
11218 }
11219
11220 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11221 to the call, and TYPE is its return type.
11222
11223 Return NULL_TREE if no simplification was possible, otherwise return the
11224 simplified form of the call as a tree.
11225
11226 The simplified form may be a constant or other expression which
11227 computes the same value, but in a more efficient manner (including
11228 calls to other builtin functions).
11229
11230 The call may contain arguments which need to be evaluated, but
11231 which are not useful to determine the result of the call. In
11232 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11233 COMPOUND_EXPR will be an argument which must be evaluated.
11234 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11235 COMPOUND_EXPR in the chain will contain the tree for the simplified
11236 form of the builtin function call. */
11237
11238 static tree
11239 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11240 {
11241 if (!validate_arg (s1, POINTER_TYPE)
11242 || !validate_arg (s2, POINTER_TYPE))
11243 return NULL_TREE;
11244 else
11245 {
11246 tree fn;
11247 const char *p1, *p2;
11248
11249 p2 = c_getstr (s2);
11250 if (p2 == NULL)
11251 return NULL_TREE;
11252
11253 p1 = c_getstr (s1);
11254 if (p1 != NULL)
11255 {
11256 const char *r = strpbrk (p1, p2);
11257 tree tem;
11258
11259 if (r == NULL)
11260 return build_int_cst (TREE_TYPE (s1), 0);
11261
11262 /* Return an offset into the constant string argument. */
11263 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11264 s1, size_int (r - p1));
11265 return fold_convert_loc (loc, type, tem);
11266 }
11267
11268 if (p2[0] == '\0')
11269 /* strpbrk(x, "") == NULL.
11270 Evaluate and ignore s1 in case it had side-effects. */
11271 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11272
11273 if (p2[1] != '\0')
11274 return NULL_TREE; /* Really call strpbrk. */
11275
11276 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11277 if (!fn)
11278 return NULL_TREE;
11279
11280 /* New argument list transforming strpbrk(s1, s2) to
11281 strchr(s1, s2[0]). */
11282 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11283 }
11284 }
11285
11286 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11287 to the call.
11288
11289 Return NULL_TREE if no simplification was possible, otherwise return the
11290 simplified form of the call as a tree.
11291
11292 The simplified form may be a constant or other expression which
11293 computes the same value, but in a more efficient manner (including
11294 calls to other builtin functions).
11295
11296 The call may contain arguments which need to be evaluated, but
11297 which are not useful to determine the result of the call. In
11298 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11299 COMPOUND_EXPR will be an argument which must be evaluated.
11300 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11301 COMPOUND_EXPR in the chain will contain the tree for the simplified
11302 form of the builtin function call. */
11303
11304 static tree
11305 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11306 {
11307 if (!validate_arg (dst, POINTER_TYPE)
11308 || !validate_arg (src, POINTER_TYPE))
11309 return NULL_TREE;
11310 else
11311 {
11312 const char *p = c_getstr (src);
11313
11314 /* If the string length is zero, return the dst parameter. */
11315 if (p && *p == '\0')
11316 return dst;
11317
11318 if (optimize_insn_for_speed_p ())
11319 {
11320 /* See if we can store by pieces into (dst + strlen(dst)). */
11321 tree newdst, call;
11322 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11323 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11324
11325 if (!strlen_fn || !strcpy_fn)
11326 return NULL_TREE;
11327
11328 /* If we don't have a movstr we don't want to emit an strcpy
11329 call. We have to do that if the length of the source string
11330 isn't computable (in that case we can use memcpy probably
11331 later expanding to a sequence of mov instructions). If we
11332 have movstr instructions we can emit strcpy calls. */
11333 if (!HAVE_movstr)
11334 {
11335 tree len = c_strlen (src, 1);
11336 if (! len || TREE_SIDE_EFFECTS (len))
11337 return NULL_TREE;
11338 }
11339
11340 /* Stabilize the argument list. */
11341 dst = builtin_save_expr (dst);
11342
11343 /* Create strlen (dst). */
11344 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11345 /* Create (dst p+ strlen (dst)). */
11346
11347 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11348 TREE_TYPE (dst), dst, newdst);
11349 newdst = builtin_save_expr (newdst);
11350
11351 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11352 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11353 }
11354 return NULL_TREE;
11355 }
11356 }
11357
11358 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11359 arguments to the call.
11360
11361 Return NULL_TREE if no simplification was possible, otherwise return the
11362 simplified form of the call as a tree.
11363
11364 The simplified form may be a constant or other expression which
11365 computes the same value, but in a more efficient manner (including
11366 calls to other builtin functions).
11367
11368 The call may contain arguments which need to be evaluated, but
11369 which are not useful to determine the result of the call. In
11370 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11371 COMPOUND_EXPR will be an argument which must be evaluated.
11372 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11373 COMPOUND_EXPR in the chain will contain the tree for the simplified
11374 form of the builtin function call. */
11375
11376 static tree
11377 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11378 {
11379 if (!validate_arg (dst, POINTER_TYPE)
11380 || !validate_arg (src, POINTER_TYPE)
11381 || !validate_arg (len, INTEGER_TYPE))
11382 return NULL_TREE;
11383 else
11384 {
11385 const char *p = c_getstr (src);
11386
11387 /* If the requested length is zero, or the src parameter string
11388 length is zero, return the dst parameter. */
11389 if (integer_zerop (len) || (p && *p == '\0'))
11390 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11391
11392 /* If the requested len is greater than or equal to the string
11393 length, call strcat. */
11394 if (TREE_CODE (len) == INTEGER_CST && p
11395 && compare_tree_int (len, strlen (p)) >= 0)
11396 {
11397 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11398
11399 /* If the replacement _DECL isn't initialized, don't do the
11400 transformation. */
11401 if (!fn)
11402 return NULL_TREE;
11403
11404 return build_call_expr_loc (loc, fn, 2, dst, src);
11405 }
11406 return NULL_TREE;
11407 }
11408 }
11409
11410 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11411 to the call.
11412
11413 Return NULL_TREE if no simplification was possible, otherwise return the
11414 simplified form of the call as a tree.
11415
11416 The simplified form may be a constant or other expression which
11417 computes the same value, but in a more efficient manner (including
11418 calls to other builtin functions).
11419
11420 The call may contain arguments which need to be evaluated, but
11421 which are not useful to determine the result of the call. In
11422 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11423 COMPOUND_EXPR will be an argument which must be evaluated.
11424 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11425 COMPOUND_EXPR in the chain will contain the tree for the simplified
11426 form of the builtin function call. */
11427
11428 static tree
11429 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11430 {
11431 if (!validate_arg (s1, POINTER_TYPE)
11432 || !validate_arg (s2, POINTER_TYPE))
11433 return NULL_TREE;
11434 else
11435 {
11436 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11437
11438 /* If both arguments are constants, evaluate at compile-time. */
11439 if (p1 && p2)
11440 {
11441 const size_t r = strspn (p1, p2);
11442 return size_int (r);
11443 }
11444
11445 /* If either argument is "", return NULL_TREE. */
11446 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11447 /* Evaluate and ignore both arguments in case either one has
11448 side-effects. */
11449 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11450 s1, s2);
11451 return NULL_TREE;
11452 }
11453 }
11454
11455 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11456 to the call.
11457
11458 Return NULL_TREE if no simplification was possible, otherwise return the
11459 simplified form of the call as a tree.
11460
11461 The simplified form may be a constant or other expression which
11462 computes the same value, but in a more efficient manner (including
11463 calls to other builtin functions).
11464
11465 The call may contain arguments which need to be evaluated, but
11466 which are not useful to determine the result of the call. In
11467 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11468 COMPOUND_EXPR will be an argument which must be evaluated.
11469 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11470 COMPOUND_EXPR in the chain will contain the tree for the simplified
11471 form of the builtin function call. */
11472
11473 static tree
11474 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11475 {
11476 if (!validate_arg (s1, POINTER_TYPE)
11477 || !validate_arg (s2, POINTER_TYPE))
11478 return NULL_TREE;
11479 else
11480 {
11481 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11482
11483 /* If both arguments are constants, evaluate at compile-time. */
11484 if (p1 && p2)
11485 {
11486 const size_t r = strcspn (p1, p2);
11487 return size_int (r);
11488 }
11489
11490 /* If the first argument is "", return NULL_TREE. */
11491 if (p1 && *p1 == '\0')
11492 {
11493 /* Evaluate and ignore argument s2 in case it has
11494 side-effects. */
11495 return omit_one_operand_loc (loc, size_type_node,
11496 size_zero_node, s2);
11497 }
11498
11499 /* If the second argument is "", return __builtin_strlen(s1). */
11500 if (p2 && *p2 == '\0')
11501 {
11502 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11503
11504 /* If the replacement _DECL isn't initialized, don't do the
11505 transformation. */
11506 if (!fn)
11507 return NULL_TREE;
11508
11509 return build_call_expr_loc (loc, fn, 1, s1);
11510 }
11511 return NULL_TREE;
11512 }
11513 }
11514
11515 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11516 to the call. IGNORE is true if the value returned
11517 by the builtin will be ignored. UNLOCKED is true is true if this
11518 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11519 the known length of the string. Return NULL_TREE if no simplification
11520 was possible. */
11521
11522 tree
11523 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11524 bool ignore, bool unlocked, tree len)
11525 {
11526 /* If we're using an unlocked function, assume the other unlocked
11527 functions exist explicitly. */
11528 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11529 : implicit_built_in_decls[BUILT_IN_FPUTC];
11530 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11531 : implicit_built_in_decls[BUILT_IN_FWRITE];
11532
11533 /* If the return value is used, don't do the transformation. */
11534 if (!ignore)
11535 return NULL_TREE;
11536
11537 /* Verify the arguments in the original call. */
11538 if (!validate_arg (arg0, POINTER_TYPE)
11539 || !validate_arg (arg1, POINTER_TYPE))
11540 return NULL_TREE;
11541
11542 if (! len)
11543 len = c_strlen (arg0, 0);
11544
11545 /* Get the length of the string passed to fputs. If the length
11546 can't be determined, punt. */
11547 if (!len
11548 || TREE_CODE (len) != INTEGER_CST)
11549 return NULL_TREE;
11550
11551 switch (compare_tree_int (len, 1))
11552 {
11553 case -1: /* length is 0, delete the call entirely . */
11554 return omit_one_operand_loc (loc, integer_type_node,
11555 integer_zero_node, arg1);;
11556
11557 case 0: /* length is 1, call fputc. */
11558 {
11559 const char *p = c_getstr (arg0);
11560
11561 if (p != NULL)
11562 {
11563 if (fn_fputc)
11564 return build_call_expr_loc (loc, fn_fputc, 2,
11565 build_int_cst (NULL_TREE, p[0]), arg1);
11566 else
11567 return NULL_TREE;
11568 }
11569 }
11570 /* FALLTHROUGH */
11571 case 1: /* length is greater than 1, call fwrite. */
11572 {
11573 /* If optimizing for size keep fputs. */
11574 if (optimize_function_for_size_p (cfun))
11575 return NULL_TREE;
11576 /* New argument list transforming fputs(string, stream) to
11577 fwrite(string, 1, len, stream). */
11578 if (fn_fwrite)
11579 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11580 size_one_node, len, arg1);
11581 else
11582 return NULL_TREE;
11583 }
11584 default:
11585 gcc_unreachable ();
11586 }
11587 return NULL_TREE;
11588 }
11589
11590 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11591 produced. False otherwise. This is done so that we don't output the error
11592 or warning twice or three times. */
11593
11594 bool
11595 fold_builtin_next_arg (tree exp, bool va_start_p)
11596 {
11597 tree fntype = TREE_TYPE (current_function_decl);
11598 int nargs = call_expr_nargs (exp);
11599 tree arg;
11600
11601 if (!stdarg_p (fntype))
11602 {
11603 error ("%<va_start%> used in function with fixed args");
11604 return true;
11605 }
11606
11607 if (va_start_p)
11608 {
11609 if (va_start_p && (nargs != 2))
11610 {
11611 error ("wrong number of arguments to function %<va_start%>");
11612 return true;
11613 }
11614 arg = CALL_EXPR_ARG (exp, 1);
11615 }
11616 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11617 when we checked the arguments and if needed issued a warning. */
11618 else
11619 {
11620 if (nargs == 0)
11621 {
11622 /* Evidently an out of date version of <stdarg.h>; can't validate
11623 va_start's second argument, but can still work as intended. */
11624 warning (0, "%<__builtin_next_arg%> called without an argument");
11625 return true;
11626 }
11627 else if (nargs > 1)
11628 {
11629 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11630 return true;
11631 }
11632 arg = CALL_EXPR_ARG (exp, 0);
11633 }
11634
11635 if (TREE_CODE (arg) == SSA_NAME)
11636 arg = SSA_NAME_VAR (arg);
11637
11638 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11639 or __builtin_next_arg (0) the first time we see it, after checking
11640 the arguments and if needed issuing a warning. */
11641 if (!integer_zerop (arg))
11642 {
11643 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11644
11645 /* Strip off all nops for the sake of the comparison. This
11646 is not quite the same as STRIP_NOPS. It does more.
11647 We must also strip off INDIRECT_EXPR for C++ reference
11648 parameters. */
11649 while (CONVERT_EXPR_P (arg)
11650 || TREE_CODE (arg) == INDIRECT_REF)
11651 arg = TREE_OPERAND (arg, 0);
11652 if (arg != last_parm)
11653 {
11654 /* FIXME: Sometimes with the tree optimizers we can get the
11655 not the last argument even though the user used the last
11656 argument. We just warn and set the arg to be the last
11657 argument so that we will get wrong-code because of
11658 it. */
11659 warning (0, "second parameter of %<va_start%> not last named argument");
11660 }
11661
11662 /* Undefined by C99 7.15.1.4p4 (va_start):
11663 "If the parameter parmN is declared with the register storage
11664 class, with a function or array type, or with a type that is
11665 not compatible with the type that results after application of
11666 the default argument promotions, the behavior is undefined."
11667 */
11668 else if (DECL_REGISTER (arg))
11669 warning (0, "undefined behaviour when second parameter of "
11670 "%<va_start%> is declared with %<register%> storage");
11671
11672 /* We want to verify the second parameter just once before the tree
11673 optimizers are run and then avoid keeping it in the tree,
11674 as otherwise we could warn even for correct code like:
11675 void foo (int i, ...)
11676 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11677 if (va_start_p)
11678 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11679 else
11680 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11681 }
11682 return false;
11683 }
11684
11685
11686 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11687 ORIG may be null if this is a 2-argument call. We don't attempt to
11688 simplify calls with more than 3 arguments.
11689
11690 Return NULL_TREE if no simplification was possible, otherwise return the
11691 simplified form of the call as a tree. If IGNORED is true, it means that
11692 the caller does not use the returned value of the function. */
11693
11694 static tree
11695 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11696 tree orig, int ignored)
11697 {
11698 tree call, retval;
11699 const char *fmt_str = NULL;
11700
11701 /* Verify the required arguments in the original call. We deal with two
11702 types of sprintf() calls: 'sprintf (str, fmt)' and
11703 'sprintf (dest, "%s", orig)'. */
11704 if (!validate_arg (dest, POINTER_TYPE)
11705 || !validate_arg (fmt, POINTER_TYPE))
11706 return NULL_TREE;
11707 if (orig && !validate_arg (orig, POINTER_TYPE))
11708 return NULL_TREE;
11709
11710 /* Check whether the format is a literal string constant. */
11711 fmt_str = c_getstr (fmt);
11712 if (fmt_str == NULL)
11713 return NULL_TREE;
11714
11715 call = NULL_TREE;
11716 retval = NULL_TREE;
11717
11718 if (!init_target_chars ())
11719 return NULL_TREE;
11720
11721 /* If the format doesn't contain % args or %%, use strcpy. */
11722 if (strchr (fmt_str, target_percent) == NULL)
11723 {
11724 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11725
11726 if (!fn)
11727 return NULL_TREE;
11728
11729 /* Don't optimize sprintf (buf, "abc", ptr++). */
11730 if (orig)
11731 return NULL_TREE;
11732
11733 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11734 'format' is known to contain no % formats. */
11735 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11736 if (!ignored)
11737 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11738 }
11739
11740 /* If the format is "%s", use strcpy if the result isn't used. */
11741 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11742 {
11743 tree fn;
11744 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11745
11746 if (!fn)
11747 return NULL_TREE;
11748
11749 /* Don't crash on sprintf (str1, "%s"). */
11750 if (!orig)
11751 return NULL_TREE;
11752
11753 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11754 if (!ignored)
11755 {
11756 retval = c_strlen (orig, 1);
11757 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11758 return NULL_TREE;
11759 }
11760 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11761 }
11762
11763 if (call && retval)
11764 {
11765 retval = fold_convert_loc
11766 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11767 retval);
11768 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11769 }
11770 else
11771 return call;
11772 }
11773
11774 /* Expand a call EXP to __builtin_object_size. */
11775
11776 rtx
11777 expand_builtin_object_size (tree exp)
11778 {
11779 tree ost;
11780 int object_size_type;
11781 tree fndecl = get_callee_fndecl (exp);
11782
11783 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11784 {
11785 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11786 exp, fndecl);
11787 expand_builtin_trap ();
11788 return const0_rtx;
11789 }
11790
11791 ost = CALL_EXPR_ARG (exp, 1);
11792 STRIP_NOPS (ost);
11793
11794 if (TREE_CODE (ost) != INTEGER_CST
11795 || tree_int_cst_sgn (ost) < 0
11796 || compare_tree_int (ost, 3) > 0)
11797 {
11798 error ("%Klast argument of %D is not integer constant between 0 and 3",
11799 exp, fndecl);
11800 expand_builtin_trap ();
11801 return const0_rtx;
11802 }
11803
11804 object_size_type = tree_low_cst (ost, 0);
11805
11806 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11807 }
11808
11809 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11810 FCODE is the BUILT_IN_* to use.
11811 Return NULL_RTX if we failed; the caller should emit a normal call,
11812 otherwise try to get the result in TARGET, if convenient (and in
11813 mode MODE if that's convenient). */
11814
11815 static rtx
11816 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11817 enum built_in_function fcode)
11818 {
11819 tree dest, src, len, size;
11820
11821 if (!validate_arglist (exp,
11822 POINTER_TYPE,
11823 fcode == BUILT_IN_MEMSET_CHK
11824 ? INTEGER_TYPE : POINTER_TYPE,
11825 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11826 return NULL_RTX;
11827
11828 dest = CALL_EXPR_ARG (exp, 0);
11829 src = CALL_EXPR_ARG (exp, 1);
11830 len = CALL_EXPR_ARG (exp, 2);
11831 size = CALL_EXPR_ARG (exp, 3);
11832
11833 if (! host_integerp (size, 1))
11834 return NULL_RTX;
11835
11836 if (host_integerp (len, 1) || integer_all_onesp (size))
11837 {
11838 tree fn;
11839
11840 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11841 {
11842 warning_at (tree_nonartificial_location (exp),
11843 0, "%Kcall to %D will always overflow destination buffer",
11844 exp, get_callee_fndecl (exp));
11845 return NULL_RTX;
11846 }
11847
11848 fn = NULL_TREE;
11849 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11850 mem{cpy,pcpy,move,set} is available. */
11851 switch (fcode)
11852 {
11853 case BUILT_IN_MEMCPY_CHK:
11854 fn = built_in_decls[BUILT_IN_MEMCPY];
11855 break;
11856 case BUILT_IN_MEMPCPY_CHK:
11857 fn = built_in_decls[BUILT_IN_MEMPCPY];
11858 break;
11859 case BUILT_IN_MEMMOVE_CHK:
11860 fn = built_in_decls[BUILT_IN_MEMMOVE];
11861 break;
11862 case BUILT_IN_MEMSET_CHK:
11863 fn = built_in_decls[BUILT_IN_MEMSET];
11864 break;
11865 default:
11866 break;
11867 }
11868
11869 if (! fn)
11870 return NULL_RTX;
11871
11872 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11873 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11874 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11875 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11876 }
11877 else if (fcode == BUILT_IN_MEMSET_CHK)
11878 return NULL_RTX;
11879 else
11880 {
11881 unsigned int dest_align
11882 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11883
11884 /* If DEST is not a pointer type, call the normal function. */
11885 if (dest_align == 0)
11886 return NULL_RTX;
11887
11888 /* If SRC and DEST are the same (and not volatile), do nothing. */
11889 if (operand_equal_p (src, dest, 0))
11890 {
11891 tree expr;
11892
11893 if (fcode != BUILT_IN_MEMPCPY_CHK)
11894 {
11895 /* Evaluate and ignore LEN in case it has side-effects. */
11896 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11897 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11898 }
11899
11900 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11901 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11902 }
11903
11904 /* __memmove_chk special case. */
11905 if (fcode == BUILT_IN_MEMMOVE_CHK)
11906 {
11907 unsigned int src_align
11908 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11909
11910 if (src_align == 0)
11911 return NULL_RTX;
11912
11913 /* If src is categorized for a readonly section we can use
11914 normal __memcpy_chk. */
11915 if (readonly_data_expr (src))
11916 {
11917 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11918 if (!fn)
11919 return NULL_RTX;
11920 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11921 dest, src, len, size);
11922 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11923 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11924 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11925 }
11926 }
11927 return NULL_RTX;
11928 }
11929 }
11930
11931 /* Emit warning if a buffer overflow is detected at compile time. */
11932
11933 static void
11934 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11935 {
11936 int is_strlen = 0;
11937 tree len, size;
11938 location_t loc = tree_nonartificial_location (exp);
11939
11940 switch (fcode)
11941 {
11942 case BUILT_IN_STRCPY_CHK:
11943 case BUILT_IN_STPCPY_CHK:
11944 /* For __strcat_chk the warning will be emitted only if overflowing
11945 by at least strlen (dest) + 1 bytes. */
11946 case BUILT_IN_STRCAT_CHK:
11947 len = CALL_EXPR_ARG (exp, 1);
11948 size = CALL_EXPR_ARG (exp, 2);
11949 is_strlen = 1;
11950 break;
11951 case BUILT_IN_STRNCAT_CHK:
11952 case BUILT_IN_STRNCPY_CHK:
11953 len = CALL_EXPR_ARG (exp, 2);
11954 size = CALL_EXPR_ARG (exp, 3);
11955 break;
11956 case BUILT_IN_SNPRINTF_CHK:
11957 case BUILT_IN_VSNPRINTF_CHK:
11958 len = CALL_EXPR_ARG (exp, 1);
11959 size = CALL_EXPR_ARG (exp, 3);
11960 break;
11961 default:
11962 gcc_unreachable ();
11963 }
11964
11965 if (!len || !size)
11966 return;
11967
11968 if (! host_integerp (size, 1) || integer_all_onesp (size))
11969 return;
11970
11971 if (is_strlen)
11972 {
11973 len = c_strlen (len, 1);
11974 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11975 return;
11976 }
11977 else if (fcode == BUILT_IN_STRNCAT_CHK)
11978 {
11979 tree src = CALL_EXPR_ARG (exp, 1);
11980 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11981 return;
11982 src = c_strlen (src, 1);
11983 if (! src || ! host_integerp (src, 1))
11984 {
11985 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11986 exp, get_callee_fndecl (exp));
11987 return;
11988 }
11989 else if (tree_int_cst_lt (src, size))
11990 return;
11991 }
11992 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11993 return;
11994
11995 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11996 exp, get_callee_fndecl (exp));
11997 }
11998
11999 /* Emit warning if a buffer overflow is detected at compile time
12000 in __sprintf_chk/__vsprintf_chk calls. */
12001
12002 static void
12003 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12004 {
12005 tree size, len, fmt;
12006 const char *fmt_str;
12007 int nargs = call_expr_nargs (exp);
12008
12009 /* Verify the required arguments in the original call. */
12010
12011 if (nargs < 4)
12012 return;
12013 size = CALL_EXPR_ARG (exp, 2);
12014 fmt = CALL_EXPR_ARG (exp, 3);
12015
12016 if (! host_integerp (size, 1) || integer_all_onesp (size))
12017 return;
12018
12019 /* Check whether the format is a literal string constant. */
12020 fmt_str = c_getstr (fmt);
12021 if (fmt_str == NULL)
12022 return;
12023
12024 if (!init_target_chars ())
12025 return;
12026
12027 /* If the format doesn't contain % args or %%, we know its size. */
12028 if (strchr (fmt_str, target_percent) == 0)
12029 len = build_int_cstu (size_type_node, strlen (fmt_str));
12030 /* If the format is "%s" and first ... argument is a string literal,
12031 we know it too. */
12032 else if (fcode == BUILT_IN_SPRINTF_CHK
12033 && strcmp (fmt_str, target_percent_s) == 0)
12034 {
12035 tree arg;
12036
12037 if (nargs < 5)
12038 return;
12039 arg = CALL_EXPR_ARG (exp, 4);
12040 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12041 return;
12042
12043 len = c_strlen (arg, 1);
12044 if (!len || ! host_integerp (len, 1))
12045 return;
12046 }
12047 else
12048 return;
12049
12050 if (! tree_int_cst_lt (len, size))
12051 warning_at (tree_nonartificial_location (exp),
12052 0, "%Kcall to %D will always overflow destination buffer",
12053 exp, get_callee_fndecl (exp));
12054 }
12055
12056 /* Emit warning if a free is called with address of a variable. */
12057
12058 static void
12059 maybe_emit_free_warning (tree exp)
12060 {
12061 tree arg = CALL_EXPR_ARG (exp, 0);
12062
12063 STRIP_NOPS (arg);
12064 if (TREE_CODE (arg) != ADDR_EXPR)
12065 return;
12066
12067 arg = get_base_address (TREE_OPERAND (arg, 0));
12068 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12069 return;
12070
12071 if (SSA_VAR_P (arg))
12072 warning_at (tree_nonartificial_location (exp),
12073 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12074 else
12075 warning_at (tree_nonartificial_location (exp),
12076 0, "%Kattempt to free a non-heap object", exp);
12077 }
12078
12079 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12080 if possible. */
12081
12082 tree
12083 fold_builtin_object_size (tree ptr, tree ost)
12084 {
12085 unsigned HOST_WIDE_INT bytes;
12086 int object_size_type;
12087
12088 if (!validate_arg (ptr, POINTER_TYPE)
12089 || !validate_arg (ost, INTEGER_TYPE))
12090 return NULL_TREE;
12091
12092 STRIP_NOPS (ost);
12093
12094 if (TREE_CODE (ost) != INTEGER_CST
12095 || tree_int_cst_sgn (ost) < 0
12096 || compare_tree_int (ost, 3) > 0)
12097 return NULL_TREE;
12098
12099 object_size_type = tree_low_cst (ost, 0);
12100
12101 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12102 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12103 and (size_t) 0 for types 2 and 3. */
12104 if (TREE_SIDE_EFFECTS (ptr))
12105 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12106
12107 if (TREE_CODE (ptr) == ADDR_EXPR)
12108 {
12109 bytes = compute_builtin_object_size (ptr, object_size_type);
12110 if (double_int_fits_to_tree_p (size_type_node,
12111 uhwi_to_double_int (bytes)))
12112 return build_int_cstu (size_type_node, bytes);
12113 }
12114 else if (TREE_CODE (ptr) == SSA_NAME)
12115 {
12116 /* If object size is not known yet, delay folding until
12117 later. Maybe subsequent passes will help determining
12118 it. */
12119 bytes = compute_builtin_object_size (ptr, object_size_type);
12120 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12121 && double_int_fits_to_tree_p (size_type_node,
12122 uhwi_to_double_int (bytes)))
12123 return build_int_cstu (size_type_node, bytes);
12124 }
12125
12126 return NULL_TREE;
12127 }
12128
12129 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12130 DEST, SRC, LEN, and SIZE are the arguments to the call.
12131 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12132 code of the builtin. If MAXLEN is not NULL, it is maximum length
12133 passed as third argument. */
12134
12135 tree
12136 fold_builtin_memory_chk (location_t loc, tree fndecl,
12137 tree dest, tree src, tree len, tree size,
12138 tree maxlen, bool ignore,
12139 enum built_in_function fcode)
12140 {
12141 tree fn;
12142
12143 if (!validate_arg (dest, POINTER_TYPE)
12144 || !validate_arg (src,
12145 (fcode == BUILT_IN_MEMSET_CHK
12146 ? INTEGER_TYPE : POINTER_TYPE))
12147 || !validate_arg (len, INTEGER_TYPE)
12148 || !validate_arg (size, INTEGER_TYPE))
12149 return NULL_TREE;
12150
12151 /* If SRC and DEST are the same (and not volatile), return DEST
12152 (resp. DEST+LEN for __mempcpy_chk). */
12153 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12154 {
12155 if (fcode != BUILT_IN_MEMPCPY_CHK)
12156 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12157 dest, len);
12158 else
12159 {
12160 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
12161 dest, len);
12162 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12163 }
12164 }
12165
12166 if (! host_integerp (size, 1))
12167 return NULL_TREE;
12168
12169 if (! integer_all_onesp (size))
12170 {
12171 if (! host_integerp (len, 1))
12172 {
12173 /* If LEN is not constant, try MAXLEN too.
12174 For MAXLEN only allow optimizing into non-_ocs function
12175 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12176 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12177 {
12178 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12179 {
12180 /* (void) __mempcpy_chk () can be optimized into
12181 (void) __memcpy_chk (). */
12182 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12183 if (!fn)
12184 return NULL_TREE;
12185
12186 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12187 }
12188 return NULL_TREE;
12189 }
12190 }
12191 else
12192 maxlen = len;
12193
12194 if (tree_int_cst_lt (size, maxlen))
12195 return NULL_TREE;
12196 }
12197
12198 fn = NULL_TREE;
12199 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12200 mem{cpy,pcpy,move,set} is available. */
12201 switch (fcode)
12202 {
12203 case BUILT_IN_MEMCPY_CHK:
12204 fn = built_in_decls[BUILT_IN_MEMCPY];
12205 break;
12206 case BUILT_IN_MEMPCPY_CHK:
12207 fn = built_in_decls[BUILT_IN_MEMPCPY];
12208 break;
12209 case BUILT_IN_MEMMOVE_CHK:
12210 fn = built_in_decls[BUILT_IN_MEMMOVE];
12211 break;
12212 case BUILT_IN_MEMSET_CHK:
12213 fn = built_in_decls[BUILT_IN_MEMSET];
12214 break;
12215 default:
12216 break;
12217 }
12218
12219 if (!fn)
12220 return NULL_TREE;
12221
12222 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12223 }
12224
12225 /* Fold a call to the __st[rp]cpy_chk builtin.
12226 DEST, SRC, and SIZE are the arguments to the call.
12227 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12228 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12229 strings passed as second argument. */
12230
12231 tree
12232 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12233 tree src, tree size,
12234 tree maxlen, bool ignore,
12235 enum built_in_function fcode)
12236 {
12237 tree len, fn;
12238
12239 if (!validate_arg (dest, POINTER_TYPE)
12240 || !validate_arg (src, POINTER_TYPE)
12241 || !validate_arg (size, INTEGER_TYPE))
12242 return NULL_TREE;
12243
12244 /* If SRC and DEST are the same (and not volatile), return DEST. */
12245 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12246 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12247
12248 if (! host_integerp (size, 1))
12249 return NULL_TREE;
12250
12251 if (! integer_all_onesp (size))
12252 {
12253 len = c_strlen (src, 1);
12254 if (! len || ! host_integerp (len, 1))
12255 {
12256 /* If LEN is not constant, try MAXLEN too.
12257 For MAXLEN only allow optimizing into non-_ocs function
12258 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12259 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12260 {
12261 if (fcode == BUILT_IN_STPCPY_CHK)
12262 {
12263 if (! ignore)
12264 return NULL_TREE;
12265
12266 /* If return value of __stpcpy_chk is ignored,
12267 optimize into __strcpy_chk. */
12268 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12269 if (!fn)
12270 return NULL_TREE;
12271
12272 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12273 }
12274
12275 if (! len || TREE_SIDE_EFFECTS (len))
12276 return NULL_TREE;
12277
12278 /* If c_strlen returned something, but not a constant,
12279 transform __strcpy_chk into __memcpy_chk. */
12280 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12281 if (!fn)
12282 return NULL_TREE;
12283
12284 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12285 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12286 build_call_expr_loc (loc, fn, 4,
12287 dest, src, len, size));
12288 }
12289 }
12290 else
12291 maxlen = len;
12292
12293 if (! tree_int_cst_lt (maxlen, size))
12294 return NULL_TREE;
12295 }
12296
12297 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12298 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12299 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12300 if (!fn)
12301 return NULL_TREE;
12302
12303 return build_call_expr_loc (loc, fn, 2, dest, src);
12304 }
12305
12306 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12307 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12308 length passed as third argument. */
12309
12310 tree
12311 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12312 tree len, tree size, tree maxlen)
12313 {
12314 tree fn;
12315
12316 if (!validate_arg (dest, POINTER_TYPE)
12317 || !validate_arg (src, POINTER_TYPE)
12318 || !validate_arg (len, INTEGER_TYPE)
12319 || !validate_arg (size, INTEGER_TYPE))
12320 return NULL_TREE;
12321
12322 if (! host_integerp (size, 1))
12323 return NULL_TREE;
12324
12325 if (! integer_all_onesp (size))
12326 {
12327 if (! host_integerp (len, 1))
12328 {
12329 /* If LEN is not constant, try MAXLEN too.
12330 For MAXLEN only allow optimizing into non-_ocs function
12331 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12332 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12333 return NULL_TREE;
12334 }
12335 else
12336 maxlen = len;
12337
12338 if (tree_int_cst_lt (size, maxlen))
12339 return NULL_TREE;
12340 }
12341
12342 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12343 fn = built_in_decls[BUILT_IN_STRNCPY];
12344 if (!fn)
12345 return NULL_TREE;
12346
12347 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12348 }
12349
12350 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12351 are the arguments to the call. */
12352
12353 static tree
12354 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12355 tree src, tree size)
12356 {
12357 tree fn;
12358 const char *p;
12359
12360 if (!validate_arg (dest, POINTER_TYPE)
12361 || !validate_arg (src, POINTER_TYPE)
12362 || !validate_arg (size, INTEGER_TYPE))
12363 return NULL_TREE;
12364
12365 p = c_getstr (src);
12366 /* If the SRC parameter is "", return DEST. */
12367 if (p && *p == '\0')
12368 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12369
12370 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12371 return NULL_TREE;
12372
12373 /* If __builtin_strcat_chk is used, assume strcat is available. */
12374 fn = built_in_decls[BUILT_IN_STRCAT];
12375 if (!fn)
12376 return NULL_TREE;
12377
12378 return build_call_expr_loc (loc, fn, 2, dest, src);
12379 }
12380
12381 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12382 LEN, and SIZE. */
12383
12384 static tree
12385 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12386 tree dest, tree src, tree len, tree size)
12387 {
12388 tree fn;
12389 const char *p;
12390
12391 if (!validate_arg (dest, POINTER_TYPE)
12392 || !validate_arg (src, POINTER_TYPE)
12393 || !validate_arg (size, INTEGER_TYPE)
12394 || !validate_arg (size, INTEGER_TYPE))
12395 return NULL_TREE;
12396
12397 p = c_getstr (src);
12398 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12399 if (p && *p == '\0')
12400 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12401 else if (integer_zerop (len))
12402 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12403
12404 if (! host_integerp (size, 1))
12405 return NULL_TREE;
12406
12407 if (! integer_all_onesp (size))
12408 {
12409 tree src_len = c_strlen (src, 1);
12410 if (src_len
12411 && host_integerp (src_len, 1)
12412 && host_integerp (len, 1)
12413 && ! tree_int_cst_lt (len, src_len))
12414 {
12415 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12416 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12417 if (!fn)
12418 return NULL_TREE;
12419
12420 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12421 }
12422 return NULL_TREE;
12423 }
12424
12425 /* If __builtin_strncat_chk is used, assume strncat is available. */
12426 fn = built_in_decls[BUILT_IN_STRNCAT];
12427 if (!fn)
12428 return NULL_TREE;
12429
12430 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12431 }
12432
12433 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12434 a normal call should be emitted rather than expanding the function
12435 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12436
12437 static tree
12438 fold_builtin_sprintf_chk (location_t loc, tree exp,
12439 enum built_in_function fcode)
12440 {
12441 tree dest, size, len, fn, fmt, flag;
12442 const char *fmt_str;
12443 int nargs = call_expr_nargs (exp);
12444
12445 /* Verify the required arguments in the original call. */
12446 if (nargs < 4)
12447 return NULL_TREE;
12448 dest = CALL_EXPR_ARG (exp, 0);
12449 if (!validate_arg (dest, POINTER_TYPE))
12450 return NULL_TREE;
12451 flag = CALL_EXPR_ARG (exp, 1);
12452 if (!validate_arg (flag, INTEGER_TYPE))
12453 return NULL_TREE;
12454 size = CALL_EXPR_ARG (exp, 2);
12455 if (!validate_arg (size, INTEGER_TYPE))
12456 return NULL_TREE;
12457 fmt = CALL_EXPR_ARG (exp, 3);
12458 if (!validate_arg (fmt, POINTER_TYPE))
12459 return NULL_TREE;
12460
12461 if (! host_integerp (size, 1))
12462 return NULL_TREE;
12463
12464 len = NULL_TREE;
12465
12466 if (!init_target_chars ())
12467 return NULL_TREE;
12468
12469 /* Check whether the format is a literal string constant. */
12470 fmt_str = c_getstr (fmt);
12471 if (fmt_str != NULL)
12472 {
12473 /* If the format doesn't contain % args or %%, we know the size. */
12474 if (strchr (fmt_str, target_percent) == 0)
12475 {
12476 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12477 len = build_int_cstu (size_type_node, strlen (fmt_str));
12478 }
12479 /* If the format is "%s" and first ... argument is a string literal,
12480 we know the size too. */
12481 else if (fcode == BUILT_IN_SPRINTF_CHK
12482 && strcmp (fmt_str, target_percent_s) == 0)
12483 {
12484 tree arg;
12485
12486 if (nargs == 5)
12487 {
12488 arg = CALL_EXPR_ARG (exp, 4);
12489 if (validate_arg (arg, POINTER_TYPE))
12490 {
12491 len = c_strlen (arg, 1);
12492 if (! len || ! host_integerp (len, 1))
12493 len = NULL_TREE;
12494 }
12495 }
12496 }
12497 }
12498
12499 if (! integer_all_onesp (size))
12500 {
12501 if (! len || ! tree_int_cst_lt (len, size))
12502 return NULL_TREE;
12503 }
12504
12505 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12506 or if format doesn't contain % chars or is "%s". */
12507 if (! integer_zerop (flag))
12508 {
12509 if (fmt_str == NULL)
12510 return NULL_TREE;
12511 if (strchr (fmt_str, target_percent) != NULL
12512 && strcmp (fmt_str, target_percent_s))
12513 return NULL_TREE;
12514 }
12515
12516 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12517 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12518 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12519 if (!fn)
12520 return NULL_TREE;
12521
12522 return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
12523 }
12524
12525 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12526 a normal call should be emitted rather than expanding the function
12527 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12528 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12529 passed as second argument. */
12530
12531 tree
12532 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12533 enum built_in_function fcode)
12534 {
12535 tree dest, size, len, fn, fmt, flag;
12536 const char *fmt_str;
12537
12538 /* Verify the required arguments in the original call. */
12539 if (call_expr_nargs (exp) < 5)
12540 return NULL_TREE;
12541 dest = CALL_EXPR_ARG (exp, 0);
12542 if (!validate_arg (dest, POINTER_TYPE))
12543 return NULL_TREE;
12544 len = CALL_EXPR_ARG (exp, 1);
12545 if (!validate_arg (len, INTEGER_TYPE))
12546 return NULL_TREE;
12547 flag = CALL_EXPR_ARG (exp, 2);
12548 if (!validate_arg (flag, INTEGER_TYPE))
12549 return NULL_TREE;
12550 size = CALL_EXPR_ARG (exp, 3);
12551 if (!validate_arg (size, INTEGER_TYPE))
12552 return NULL_TREE;
12553 fmt = CALL_EXPR_ARG (exp, 4);
12554 if (!validate_arg (fmt, POINTER_TYPE))
12555 return NULL_TREE;
12556
12557 if (! host_integerp (size, 1))
12558 return NULL_TREE;
12559
12560 if (! integer_all_onesp (size))
12561 {
12562 if (! host_integerp (len, 1))
12563 {
12564 /* If LEN is not constant, try MAXLEN too.
12565 For MAXLEN only allow optimizing into non-_ocs function
12566 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12567 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12568 return NULL_TREE;
12569 }
12570 else
12571 maxlen = len;
12572
12573 if (tree_int_cst_lt (size, maxlen))
12574 return NULL_TREE;
12575 }
12576
12577 if (!init_target_chars ())
12578 return NULL_TREE;
12579
12580 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12581 or if format doesn't contain % chars or is "%s". */
12582 if (! integer_zerop (flag))
12583 {
12584 fmt_str = c_getstr (fmt);
12585 if (fmt_str == NULL)
12586 return NULL_TREE;
12587 if (strchr (fmt_str, target_percent) != NULL
12588 && strcmp (fmt_str, target_percent_s))
12589 return NULL_TREE;
12590 }
12591
12592 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12593 available. */
12594 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12595 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12596 if (!fn)
12597 return NULL_TREE;
12598
12599 return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
12600 }
12601
12602 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12603 FMT and ARG are the arguments to the call; we don't fold cases with
12604 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12605
12606 Return NULL_TREE if no simplification was possible, otherwise return the
12607 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12608 code of the function to be simplified. */
12609
12610 static tree
12611 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12612 tree arg, bool ignore,
12613 enum built_in_function fcode)
12614 {
12615 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12616 const char *fmt_str = NULL;
12617
12618 /* If the return value is used, don't do the transformation. */
12619 if (! ignore)
12620 return NULL_TREE;
12621
12622 /* Verify the required arguments in the original call. */
12623 if (!validate_arg (fmt, POINTER_TYPE))
12624 return NULL_TREE;
12625
12626 /* Check whether the format is a literal string constant. */
12627 fmt_str = c_getstr (fmt);
12628 if (fmt_str == NULL)
12629 return NULL_TREE;
12630
12631 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12632 {
12633 /* If we're using an unlocked function, assume the other
12634 unlocked functions exist explicitly. */
12635 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12636 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12637 }
12638 else
12639 {
12640 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12641 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12642 }
12643
12644 if (!init_target_chars ())
12645 return NULL_TREE;
12646
12647 if (strcmp (fmt_str, target_percent_s) == 0
12648 || strchr (fmt_str, target_percent) == NULL)
12649 {
12650 const char *str;
12651
12652 if (strcmp (fmt_str, target_percent_s) == 0)
12653 {
12654 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12655 return NULL_TREE;
12656
12657 if (!arg || !validate_arg (arg, POINTER_TYPE))
12658 return NULL_TREE;
12659
12660 str = c_getstr (arg);
12661 if (str == NULL)
12662 return NULL_TREE;
12663 }
12664 else
12665 {
12666 /* The format specifier doesn't contain any '%' characters. */
12667 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12668 && arg)
12669 return NULL_TREE;
12670 str = fmt_str;
12671 }
12672
12673 /* If the string was "", printf does nothing. */
12674 if (str[0] == '\0')
12675 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12676
12677 /* If the string has length of 1, call putchar. */
12678 if (str[1] == '\0')
12679 {
12680 /* Given printf("c"), (where c is any one character,)
12681 convert "c"[0] to an int and pass that to the replacement
12682 function. */
12683 newarg = build_int_cst (NULL_TREE, str[0]);
12684 if (fn_putchar)
12685 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12686 }
12687 else
12688 {
12689 /* If the string was "string\n", call puts("string"). */
12690 size_t len = strlen (str);
12691 if ((unsigned char)str[len - 1] == target_newline)
12692 {
12693 /* Create a NUL-terminated string that's one char shorter
12694 than the original, stripping off the trailing '\n'. */
12695 char *newstr = XALLOCAVEC (char, len);
12696 memcpy (newstr, str, len - 1);
12697 newstr[len - 1] = 0;
12698
12699 newarg = build_string_literal (len, newstr);
12700 if (fn_puts)
12701 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12702 }
12703 else
12704 /* We'd like to arrange to call fputs(string,stdout) here,
12705 but we need stdout and don't have a way to get it yet. */
12706 return NULL_TREE;
12707 }
12708 }
12709
12710 /* The other optimizations can be done only on the non-va_list variants. */
12711 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12712 return NULL_TREE;
12713
12714 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12715 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12716 {
12717 if (!arg || !validate_arg (arg, POINTER_TYPE))
12718 return NULL_TREE;
12719 if (fn_puts)
12720 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12721 }
12722
12723 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12724 else if (strcmp (fmt_str, target_percent_c) == 0)
12725 {
12726 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12727 return NULL_TREE;
12728 if (fn_putchar)
12729 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12730 }
12731
12732 if (!call)
12733 return NULL_TREE;
12734
12735 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12736 }
12737
12738 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12739 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12740 more than 3 arguments, and ARG may be null in the 2-argument case.
12741
12742 Return NULL_TREE if no simplification was possible, otherwise return the
12743 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12744 code of the function to be simplified. */
12745
12746 static tree
12747 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12748 tree fmt, tree arg, bool ignore,
12749 enum built_in_function fcode)
12750 {
12751 tree fn_fputc, fn_fputs, call = NULL_TREE;
12752 const char *fmt_str = NULL;
12753
12754 /* If the return value is used, don't do the transformation. */
12755 if (! ignore)
12756 return NULL_TREE;
12757
12758 /* Verify the required arguments in the original call. */
12759 if (!validate_arg (fp, POINTER_TYPE))
12760 return NULL_TREE;
12761 if (!validate_arg (fmt, POINTER_TYPE))
12762 return NULL_TREE;
12763
12764 /* Check whether the format is a literal string constant. */
12765 fmt_str = c_getstr (fmt);
12766 if (fmt_str == NULL)
12767 return NULL_TREE;
12768
12769 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12770 {
12771 /* If we're using an unlocked function, assume the other
12772 unlocked functions exist explicitly. */
12773 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12774 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12775 }
12776 else
12777 {
12778 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12779 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12780 }
12781
12782 if (!init_target_chars ())
12783 return NULL_TREE;
12784
12785 /* If the format doesn't contain % args or %%, use strcpy. */
12786 if (strchr (fmt_str, target_percent) == NULL)
12787 {
12788 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12789 && arg)
12790 return NULL_TREE;
12791
12792 /* If the format specifier was "", fprintf does nothing. */
12793 if (fmt_str[0] == '\0')
12794 {
12795 /* If FP has side-effects, just wait until gimplification is
12796 done. */
12797 if (TREE_SIDE_EFFECTS (fp))
12798 return NULL_TREE;
12799
12800 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12801 }
12802
12803 /* When "string" doesn't contain %, replace all cases of
12804 fprintf (fp, string) with fputs (string, fp). The fputs
12805 builtin will take care of special cases like length == 1. */
12806 if (fn_fputs)
12807 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12808 }
12809
12810 /* The other optimizations can be done only on the non-va_list variants. */
12811 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12812 return NULL_TREE;
12813
12814 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12815 else if (strcmp (fmt_str, target_percent_s) == 0)
12816 {
12817 if (!arg || !validate_arg (arg, POINTER_TYPE))
12818 return NULL_TREE;
12819 if (fn_fputs)
12820 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12821 }
12822
12823 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12824 else if (strcmp (fmt_str, target_percent_c) == 0)
12825 {
12826 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12827 return NULL_TREE;
12828 if (fn_fputc)
12829 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12830 }
12831
12832 if (!call)
12833 return NULL_TREE;
12834 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12835 }
12836
12837 /* Initialize format string characters in the target charset. */
12838
12839 static bool
12840 init_target_chars (void)
12841 {
12842 static bool init;
12843 if (!init)
12844 {
12845 target_newline = lang_hooks.to_target_charset ('\n');
12846 target_percent = lang_hooks.to_target_charset ('%');
12847 target_c = lang_hooks.to_target_charset ('c');
12848 target_s = lang_hooks.to_target_charset ('s');
12849 if (target_newline == 0 || target_percent == 0 || target_c == 0
12850 || target_s == 0)
12851 return false;
12852
12853 target_percent_c[0] = target_percent;
12854 target_percent_c[1] = target_c;
12855 target_percent_c[2] = '\0';
12856
12857 target_percent_s[0] = target_percent;
12858 target_percent_s[1] = target_s;
12859 target_percent_s[2] = '\0';
12860
12861 target_percent_s_newline[0] = target_percent;
12862 target_percent_s_newline[1] = target_s;
12863 target_percent_s_newline[2] = target_newline;
12864 target_percent_s_newline[3] = '\0';
12865
12866 init = true;
12867 }
12868 return true;
12869 }
12870
12871 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12872 and no overflow/underflow occurred. INEXACT is true if M was not
12873 exactly calculated. TYPE is the tree type for the result. This
12874 function assumes that you cleared the MPFR flags and then
12875 calculated M to see if anything subsequently set a flag prior to
12876 entering this function. Return NULL_TREE if any checks fail. */
12877
12878 static tree
12879 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12880 {
12881 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12882 overflow/underflow occurred. If -frounding-math, proceed iff the
12883 result of calling FUNC was exact. */
12884 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12885 && (!flag_rounding_math || !inexact))
12886 {
12887 REAL_VALUE_TYPE rr;
12888
12889 real_from_mpfr (&rr, m, type, GMP_RNDN);
12890 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12891 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12892 but the mpft_t is not, then we underflowed in the
12893 conversion. */
12894 if (real_isfinite (&rr)
12895 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12896 {
12897 REAL_VALUE_TYPE rmode;
12898
12899 real_convert (&rmode, TYPE_MODE (type), &rr);
12900 /* Proceed iff the specified mode can hold the value. */
12901 if (real_identical (&rmode, &rr))
12902 return build_real (type, rmode);
12903 }
12904 }
12905 return NULL_TREE;
12906 }
12907
12908 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12909 number and no overflow/underflow occurred. INEXACT is true if M
12910 was not exactly calculated. TYPE is the tree type for the result.
12911 This function assumes that you cleared the MPFR flags and then
12912 calculated M to see if anything subsequently set a flag prior to
12913 entering this function. Return NULL_TREE if any checks fail, if
12914 FORCE_CONVERT is true, then bypass the checks. */
12915
12916 static tree
12917 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12918 {
12919 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12920 overflow/underflow occurred. If -frounding-math, proceed iff the
12921 result of calling FUNC was exact. */
12922 if (force_convert
12923 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12924 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12925 && (!flag_rounding_math || !inexact)))
12926 {
12927 REAL_VALUE_TYPE re, im;
12928
12929 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12930 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12931 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12932 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12933 but the mpft_t is not, then we underflowed in the
12934 conversion. */
12935 if (force_convert
12936 || (real_isfinite (&re) && real_isfinite (&im)
12937 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12938 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12939 {
12940 REAL_VALUE_TYPE re_mode, im_mode;
12941
12942 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12943 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12944 /* Proceed iff the specified mode can hold the value. */
12945 if (force_convert
12946 || (real_identical (&re_mode, &re)
12947 && real_identical (&im_mode, &im)))
12948 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12949 build_real (TREE_TYPE (type), im_mode));
12950 }
12951 }
12952 return NULL_TREE;
12953 }
12954
12955 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12956 FUNC on it and return the resulting value as a tree with type TYPE.
12957 If MIN and/or MAX are not NULL, then the supplied ARG must be
12958 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12959 acceptable values, otherwise they are not. The mpfr precision is
12960 set to the precision of TYPE. We assume that function FUNC returns
12961 zero if the result could be calculated exactly within the requested
12962 precision. */
12963
12964 static tree
12965 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12966 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12967 bool inclusive)
12968 {
12969 tree result = NULL_TREE;
12970
12971 STRIP_NOPS (arg);
12972
12973 /* To proceed, MPFR must exactly represent the target floating point
12974 format, which only happens when the target base equals two. */
12975 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12976 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12977 {
12978 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12979
12980 if (real_isfinite (ra)
12981 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12982 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12983 {
12984 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12985 const int prec = fmt->p;
12986 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12987 int inexact;
12988 mpfr_t m;
12989
12990 mpfr_init2 (m, prec);
12991 mpfr_from_real (m, ra, GMP_RNDN);
12992 mpfr_clear_flags ();
12993 inexact = func (m, m, rnd);
12994 result = do_mpfr_ckconv (m, type, inexact);
12995 mpfr_clear (m);
12996 }
12997 }
12998
12999 return result;
13000 }
13001
13002 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13003 FUNC on it and return the resulting value as a tree with type TYPE.
13004 The mpfr precision is set to the precision of TYPE. We assume that
13005 function FUNC returns zero if the result could be calculated
13006 exactly within the requested precision. */
13007
13008 static tree
13009 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13010 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13011 {
13012 tree result = NULL_TREE;
13013
13014 STRIP_NOPS (arg1);
13015 STRIP_NOPS (arg2);
13016
13017 /* To proceed, MPFR must exactly represent the target floating point
13018 format, which only happens when the target base equals two. */
13019 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13020 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13021 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13022 {
13023 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13024 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13025
13026 if (real_isfinite (ra1) && real_isfinite (ra2))
13027 {
13028 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13029 const int prec = fmt->p;
13030 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13031 int inexact;
13032 mpfr_t m1, m2;
13033
13034 mpfr_inits2 (prec, m1, m2, NULL);
13035 mpfr_from_real (m1, ra1, GMP_RNDN);
13036 mpfr_from_real (m2, ra2, GMP_RNDN);
13037 mpfr_clear_flags ();
13038 inexact = func (m1, m1, m2, rnd);
13039 result = do_mpfr_ckconv (m1, type, inexact);
13040 mpfr_clears (m1, m2, NULL);
13041 }
13042 }
13043
13044 return result;
13045 }
13046
13047 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13048 FUNC on it and return the resulting value as a tree with type TYPE.
13049 The mpfr precision is set to the precision of TYPE. We assume that
13050 function FUNC returns zero if the result could be calculated
13051 exactly within the requested precision. */
13052
13053 static tree
13054 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13055 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13056 {
13057 tree result = NULL_TREE;
13058
13059 STRIP_NOPS (arg1);
13060 STRIP_NOPS (arg2);
13061 STRIP_NOPS (arg3);
13062
13063 /* To proceed, MPFR must exactly represent the target floating point
13064 format, which only happens when the target base equals two. */
13065 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13066 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13067 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13068 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13069 {
13070 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13071 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13072 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13073
13074 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13075 {
13076 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13077 const int prec = fmt->p;
13078 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13079 int inexact;
13080 mpfr_t m1, m2, m3;
13081
13082 mpfr_inits2 (prec, m1, m2, m3, NULL);
13083 mpfr_from_real (m1, ra1, GMP_RNDN);
13084 mpfr_from_real (m2, ra2, GMP_RNDN);
13085 mpfr_from_real (m3, ra3, GMP_RNDN);
13086 mpfr_clear_flags ();
13087 inexact = func (m1, m1, m2, m3, rnd);
13088 result = do_mpfr_ckconv (m1, type, inexact);
13089 mpfr_clears (m1, m2, m3, NULL);
13090 }
13091 }
13092
13093 return result;
13094 }
13095
13096 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13097 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13098 If ARG_SINP and ARG_COSP are NULL then the result is returned
13099 as a complex value.
13100 The type is taken from the type of ARG and is used for setting the
13101 precision of the calculation and results. */
13102
13103 static tree
13104 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13105 {
13106 tree const type = TREE_TYPE (arg);
13107 tree result = NULL_TREE;
13108
13109 STRIP_NOPS (arg);
13110
13111 /* To proceed, MPFR must exactly represent the target floating point
13112 format, which only happens when the target base equals two. */
13113 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13114 && TREE_CODE (arg) == REAL_CST
13115 && !TREE_OVERFLOW (arg))
13116 {
13117 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13118
13119 if (real_isfinite (ra))
13120 {
13121 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13122 const int prec = fmt->p;
13123 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13124 tree result_s, result_c;
13125 int inexact;
13126 mpfr_t m, ms, mc;
13127
13128 mpfr_inits2 (prec, m, ms, mc, NULL);
13129 mpfr_from_real (m, ra, GMP_RNDN);
13130 mpfr_clear_flags ();
13131 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13132 result_s = do_mpfr_ckconv (ms, type, inexact);
13133 result_c = do_mpfr_ckconv (mc, type, inexact);
13134 mpfr_clears (m, ms, mc, NULL);
13135 if (result_s && result_c)
13136 {
13137 /* If we are to return in a complex value do so. */
13138 if (!arg_sinp && !arg_cosp)
13139 return build_complex (build_complex_type (type),
13140 result_c, result_s);
13141
13142 /* Dereference the sin/cos pointer arguments. */
13143 arg_sinp = build_fold_indirect_ref (arg_sinp);
13144 arg_cosp = build_fold_indirect_ref (arg_cosp);
13145 /* Proceed if valid pointer type were passed in. */
13146 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13147 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13148 {
13149 /* Set the values. */
13150 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13151 result_s);
13152 TREE_SIDE_EFFECTS (result_s) = 1;
13153 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13154 result_c);
13155 TREE_SIDE_EFFECTS (result_c) = 1;
13156 /* Combine the assignments into a compound expr. */
13157 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13158 result_s, result_c));
13159 }
13160 }
13161 }
13162 }
13163 return result;
13164 }
13165
13166 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13167 two-argument mpfr order N Bessel function FUNC on them and return
13168 the resulting value as a tree with type TYPE. The mpfr precision
13169 is set to the precision of TYPE. We assume that function FUNC
13170 returns zero if the result could be calculated exactly within the
13171 requested precision. */
13172 static tree
13173 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13174 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13175 const REAL_VALUE_TYPE *min, bool inclusive)
13176 {
13177 tree result = NULL_TREE;
13178
13179 STRIP_NOPS (arg1);
13180 STRIP_NOPS (arg2);
13181
13182 /* To proceed, MPFR must exactly represent the target floating point
13183 format, which only happens when the target base equals two. */
13184 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13185 && host_integerp (arg1, 0)
13186 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13187 {
13188 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13189 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13190
13191 if (n == (long)n
13192 && real_isfinite (ra)
13193 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13194 {
13195 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13196 const int prec = fmt->p;
13197 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13198 int inexact;
13199 mpfr_t m;
13200
13201 mpfr_init2 (m, prec);
13202 mpfr_from_real (m, ra, GMP_RNDN);
13203 mpfr_clear_flags ();
13204 inexact = func (m, n, m, rnd);
13205 result = do_mpfr_ckconv (m, type, inexact);
13206 mpfr_clear (m);
13207 }
13208 }
13209
13210 return result;
13211 }
13212
13213 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13214 the pointer *(ARG_QUO) and return the result. The type is taken
13215 from the type of ARG0 and is used for setting the precision of the
13216 calculation and results. */
13217
13218 static tree
13219 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13220 {
13221 tree const type = TREE_TYPE (arg0);
13222 tree result = NULL_TREE;
13223
13224 STRIP_NOPS (arg0);
13225 STRIP_NOPS (arg1);
13226
13227 /* To proceed, MPFR must exactly represent the target floating point
13228 format, which only happens when the target base equals two. */
13229 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13230 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13231 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13232 {
13233 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13234 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13235
13236 if (real_isfinite (ra0) && real_isfinite (ra1))
13237 {
13238 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13239 const int prec = fmt->p;
13240 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13241 tree result_rem;
13242 long integer_quo;
13243 mpfr_t m0, m1;
13244
13245 mpfr_inits2 (prec, m0, m1, NULL);
13246 mpfr_from_real (m0, ra0, GMP_RNDN);
13247 mpfr_from_real (m1, ra1, GMP_RNDN);
13248 mpfr_clear_flags ();
13249 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13250 /* Remquo is independent of the rounding mode, so pass
13251 inexact=0 to do_mpfr_ckconv(). */
13252 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13253 mpfr_clears (m0, m1, NULL);
13254 if (result_rem)
13255 {
13256 /* MPFR calculates quo in the host's long so it may
13257 return more bits in quo than the target int can hold
13258 if sizeof(host long) > sizeof(target int). This can
13259 happen even for native compilers in LP64 mode. In
13260 these cases, modulo the quo value with the largest
13261 number that the target int can hold while leaving one
13262 bit for the sign. */
13263 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13264 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13265
13266 /* Dereference the quo pointer argument. */
13267 arg_quo = build_fold_indirect_ref (arg_quo);
13268 /* Proceed iff a valid pointer type was passed in. */
13269 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13270 {
13271 /* Set the value. */
13272 tree result_quo = fold_build2 (MODIFY_EXPR,
13273 TREE_TYPE (arg_quo), arg_quo,
13274 build_int_cst (NULL, integer_quo));
13275 TREE_SIDE_EFFECTS (result_quo) = 1;
13276 /* Combine the quo assignment with the rem. */
13277 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13278 result_quo, result_rem));
13279 }
13280 }
13281 }
13282 }
13283 return result;
13284 }
13285
13286 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13287 resulting value as a tree with type TYPE. The mpfr precision is
13288 set to the precision of TYPE. We assume that this mpfr function
13289 returns zero if the result could be calculated exactly within the
13290 requested precision. In addition, the integer pointer represented
13291 by ARG_SG will be dereferenced and set to the appropriate signgam
13292 (-1,1) value. */
13293
13294 static tree
13295 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13296 {
13297 tree result = NULL_TREE;
13298
13299 STRIP_NOPS (arg);
13300
13301 /* To proceed, MPFR must exactly represent the target floating point
13302 format, which only happens when the target base equals two. Also
13303 verify ARG is a constant and that ARG_SG is an int pointer. */
13304 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13305 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13306 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13307 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13308 {
13309 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13310
13311 /* In addition to NaN and Inf, the argument cannot be zero or a
13312 negative integer. */
13313 if (real_isfinite (ra)
13314 && ra->cl != rvc_zero
13315 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13316 {
13317 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13318 const int prec = fmt->p;
13319 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13320 int inexact, sg;
13321 mpfr_t m;
13322 tree result_lg;
13323
13324 mpfr_init2 (m, prec);
13325 mpfr_from_real (m, ra, GMP_RNDN);
13326 mpfr_clear_flags ();
13327 inexact = mpfr_lgamma (m, &sg, m, rnd);
13328 result_lg = do_mpfr_ckconv (m, type, inexact);
13329 mpfr_clear (m);
13330 if (result_lg)
13331 {
13332 tree result_sg;
13333
13334 /* Dereference the arg_sg pointer argument. */
13335 arg_sg = build_fold_indirect_ref (arg_sg);
13336 /* Assign the signgam value into *arg_sg. */
13337 result_sg = fold_build2 (MODIFY_EXPR,
13338 TREE_TYPE (arg_sg), arg_sg,
13339 build_int_cst (NULL, sg));
13340 TREE_SIDE_EFFECTS (result_sg) = 1;
13341 /* Combine the signgam assignment with the lgamma result. */
13342 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13343 result_sg, result_lg));
13344 }
13345 }
13346 }
13347
13348 return result;
13349 }
13350
13351 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13352 function FUNC on it and return the resulting value as a tree with
13353 type TYPE. The mpfr precision is set to the precision of TYPE. We
13354 assume that function FUNC returns zero if the result could be
13355 calculated exactly within the requested precision. */
13356
13357 static tree
13358 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13359 {
13360 tree result = NULL_TREE;
13361
13362 STRIP_NOPS (arg);
13363
13364 /* To proceed, MPFR must exactly represent the target floating point
13365 format, which only happens when the target base equals two. */
13366 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13367 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13368 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13369 {
13370 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13371 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13372
13373 if (real_isfinite (re) && real_isfinite (im))
13374 {
13375 const struct real_format *const fmt =
13376 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13377 const int prec = fmt->p;
13378 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13379 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13380 int inexact;
13381 mpc_t m;
13382
13383 mpc_init2 (m, prec);
13384 mpfr_from_real (mpc_realref(m), re, rnd);
13385 mpfr_from_real (mpc_imagref(m), im, rnd);
13386 mpfr_clear_flags ();
13387 inexact = func (m, m, crnd);
13388 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13389 mpc_clear (m);
13390 }
13391 }
13392
13393 return result;
13394 }
13395
13396 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13397 mpc function FUNC on it and return the resulting value as a tree
13398 with type TYPE. The mpfr precision is set to the precision of
13399 TYPE. We assume that function FUNC returns zero if the result
13400 could be calculated exactly within the requested precision. If
13401 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13402 in the arguments and/or results. */
13403
13404 tree
13405 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13406 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13407 {
13408 tree result = NULL_TREE;
13409
13410 STRIP_NOPS (arg0);
13411 STRIP_NOPS (arg1);
13412
13413 /* To proceed, MPFR must exactly represent the target floating point
13414 format, which only happens when the target base equals two. */
13415 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13416 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13417 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13418 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13419 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13420 {
13421 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13422 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13423 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13424 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13425
13426 if (do_nonfinite
13427 || (real_isfinite (re0) && real_isfinite (im0)
13428 && real_isfinite (re1) && real_isfinite (im1)))
13429 {
13430 const struct real_format *const fmt =
13431 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13432 const int prec = fmt->p;
13433 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13434 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13435 int inexact;
13436 mpc_t m0, m1;
13437
13438 mpc_init2 (m0, prec);
13439 mpc_init2 (m1, prec);
13440 mpfr_from_real (mpc_realref(m0), re0, rnd);
13441 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13442 mpfr_from_real (mpc_realref(m1), re1, rnd);
13443 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13444 mpfr_clear_flags ();
13445 inexact = func (m0, m0, m1, crnd);
13446 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13447 mpc_clear (m0);
13448 mpc_clear (m1);
13449 }
13450 }
13451
13452 return result;
13453 }
13454
13455 /* FIXME tuples.
13456 The functions below provide an alternate interface for folding
13457 builtin function calls presented as GIMPLE_CALL statements rather
13458 than as CALL_EXPRs. The folded result is still expressed as a
13459 tree. There is too much code duplication in the handling of
13460 varargs functions, and a more intrusive re-factoring would permit
13461 better sharing of code between the tree and statement-based
13462 versions of these functions. */
13463
13464 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13465 along with N new arguments specified as the "..." parameters. SKIP
13466 is the number of arguments in STMT to be omitted. This function is used
13467 to do varargs-to-varargs transformations. */
13468
13469 static tree
13470 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13471 {
13472 int oldnargs = gimple_call_num_args (stmt);
13473 int nargs = oldnargs - skip + n;
13474 tree fntype = TREE_TYPE (fndecl);
13475 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13476 tree *buffer;
13477 int i, j;
13478 va_list ap;
13479 location_t loc = gimple_location (stmt);
13480
13481 buffer = XALLOCAVEC (tree, nargs);
13482 va_start (ap, n);
13483 for (i = 0; i < n; i++)
13484 buffer[i] = va_arg (ap, tree);
13485 va_end (ap);
13486 for (j = skip; j < oldnargs; j++, i++)
13487 buffer[i] = gimple_call_arg (stmt, j);
13488
13489 return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
13490 }
13491
13492 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13493 a normal call should be emitted rather than expanding the function
13494 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13495
13496 static tree
13497 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13498 {
13499 tree dest, size, len, fn, fmt, flag;
13500 const char *fmt_str;
13501 int nargs = gimple_call_num_args (stmt);
13502
13503 /* Verify the required arguments in the original call. */
13504 if (nargs < 4)
13505 return NULL_TREE;
13506 dest = gimple_call_arg (stmt, 0);
13507 if (!validate_arg (dest, POINTER_TYPE))
13508 return NULL_TREE;
13509 flag = gimple_call_arg (stmt, 1);
13510 if (!validate_arg (flag, INTEGER_TYPE))
13511 return NULL_TREE;
13512 size = gimple_call_arg (stmt, 2);
13513 if (!validate_arg (size, INTEGER_TYPE))
13514 return NULL_TREE;
13515 fmt = gimple_call_arg (stmt, 3);
13516 if (!validate_arg (fmt, POINTER_TYPE))
13517 return NULL_TREE;
13518
13519 if (! host_integerp (size, 1))
13520 return NULL_TREE;
13521
13522 len = NULL_TREE;
13523
13524 if (!init_target_chars ())
13525 return NULL_TREE;
13526
13527 /* Check whether the format is a literal string constant. */
13528 fmt_str = c_getstr (fmt);
13529 if (fmt_str != NULL)
13530 {
13531 /* If the format doesn't contain % args or %%, we know the size. */
13532 if (strchr (fmt_str, target_percent) == 0)
13533 {
13534 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13535 len = build_int_cstu (size_type_node, strlen (fmt_str));
13536 }
13537 /* If the format is "%s" and first ... argument is a string literal,
13538 we know the size too. */
13539 else if (fcode == BUILT_IN_SPRINTF_CHK
13540 && strcmp (fmt_str, target_percent_s) == 0)
13541 {
13542 tree arg;
13543
13544 if (nargs == 5)
13545 {
13546 arg = gimple_call_arg (stmt, 4);
13547 if (validate_arg (arg, POINTER_TYPE))
13548 {
13549 len = c_strlen (arg, 1);
13550 if (! len || ! host_integerp (len, 1))
13551 len = NULL_TREE;
13552 }
13553 }
13554 }
13555 }
13556
13557 if (! integer_all_onesp (size))
13558 {
13559 if (! len || ! tree_int_cst_lt (len, size))
13560 return NULL_TREE;
13561 }
13562
13563 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13564 or if format doesn't contain % chars or is "%s". */
13565 if (! integer_zerop (flag))
13566 {
13567 if (fmt_str == NULL)
13568 return NULL_TREE;
13569 if (strchr (fmt_str, target_percent) != NULL
13570 && strcmp (fmt_str, target_percent_s))
13571 return NULL_TREE;
13572 }
13573
13574 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13575 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13576 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13577 if (!fn)
13578 return NULL_TREE;
13579
13580 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13581 }
13582
13583 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13584 a normal call should be emitted rather than expanding the function
13585 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13586 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13587 passed as second argument. */
13588
13589 tree
13590 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13591 enum built_in_function fcode)
13592 {
13593 tree dest, size, len, fn, fmt, flag;
13594 const char *fmt_str;
13595
13596 /* Verify the required arguments in the original call. */
13597 if (gimple_call_num_args (stmt) < 5)
13598 return NULL_TREE;
13599 dest = gimple_call_arg (stmt, 0);
13600 if (!validate_arg (dest, POINTER_TYPE))
13601 return NULL_TREE;
13602 len = gimple_call_arg (stmt, 1);
13603 if (!validate_arg (len, INTEGER_TYPE))
13604 return NULL_TREE;
13605 flag = gimple_call_arg (stmt, 2);
13606 if (!validate_arg (flag, INTEGER_TYPE))
13607 return NULL_TREE;
13608 size = gimple_call_arg (stmt, 3);
13609 if (!validate_arg (size, INTEGER_TYPE))
13610 return NULL_TREE;
13611 fmt = gimple_call_arg (stmt, 4);
13612 if (!validate_arg (fmt, POINTER_TYPE))
13613 return NULL_TREE;
13614
13615 if (! host_integerp (size, 1))
13616 return NULL_TREE;
13617
13618 if (! integer_all_onesp (size))
13619 {
13620 if (! host_integerp (len, 1))
13621 {
13622 /* If LEN is not constant, try MAXLEN too.
13623 For MAXLEN only allow optimizing into non-_ocs function
13624 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13625 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13626 return NULL_TREE;
13627 }
13628 else
13629 maxlen = len;
13630
13631 if (tree_int_cst_lt (size, maxlen))
13632 return NULL_TREE;
13633 }
13634
13635 if (!init_target_chars ())
13636 return NULL_TREE;
13637
13638 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13639 or if format doesn't contain % chars or is "%s". */
13640 if (! integer_zerop (flag))
13641 {
13642 fmt_str = c_getstr (fmt);
13643 if (fmt_str == NULL)
13644 return NULL_TREE;
13645 if (strchr (fmt_str, target_percent) != NULL
13646 && strcmp (fmt_str, target_percent_s))
13647 return NULL_TREE;
13648 }
13649
13650 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13651 available. */
13652 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13653 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13654 if (!fn)
13655 return NULL_TREE;
13656
13657 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13658 }
13659
13660 /* Builtins with folding operations that operate on "..." arguments
13661 need special handling; we need to store the arguments in a convenient
13662 data structure before attempting any folding. Fortunately there are
13663 only a few builtins that fall into this category. FNDECL is the
13664 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13665 result of the function call is ignored. */
13666
13667 static tree
13668 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13669 bool ignore ATTRIBUTE_UNUSED)
13670 {
13671 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13672 tree ret = NULL_TREE;
13673
13674 switch (fcode)
13675 {
13676 case BUILT_IN_SPRINTF_CHK:
13677 case BUILT_IN_VSPRINTF_CHK:
13678 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13679 break;
13680
13681 case BUILT_IN_SNPRINTF_CHK:
13682 case BUILT_IN_VSNPRINTF_CHK:
13683 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13684
13685 default:
13686 break;
13687 }
13688 if (ret)
13689 {
13690 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13691 TREE_NO_WARNING (ret) = 1;
13692 return ret;
13693 }
13694 return NULL_TREE;
13695 }
13696
13697 /* A wrapper function for builtin folding that prevents warnings for
13698 "statement without effect" and the like, caused by removing the
13699 call node earlier than the warning is generated. */
13700
13701 tree
13702 fold_call_stmt (gimple stmt, bool ignore)
13703 {
13704 tree ret = NULL_TREE;
13705 tree fndecl = gimple_call_fndecl (stmt);
13706 location_t loc = gimple_location (stmt);
13707 if (fndecl
13708 && TREE_CODE (fndecl) == FUNCTION_DECL
13709 && DECL_BUILT_IN (fndecl)
13710 && !gimple_call_va_arg_pack_p (stmt))
13711 {
13712 int nargs = gimple_call_num_args (stmt);
13713
13714 if (avoid_folding_inline_builtin (fndecl))
13715 return NULL_TREE;
13716 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13717 {
13718 return targetm.fold_builtin (fndecl, nargs,
13719 (nargs > 0
13720 ? gimple_call_arg_ptr (stmt, 0)
13721 : &error_mark_node), ignore);
13722 }
13723 else
13724 {
13725 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13726 {
13727 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13728 int i;
13729 for (i = 0; i < nargs; i++)
13730 args[i] = gimple_call_arg (stmt, i);
13731 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13732 }
13733 if (!ret)
13734 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13735 if (ret)
13736 {
13737 /* Propagate location information from original call to
13738 expansion of builtin. Otherwise things like
13739 maybe_emit_chk_warning, that operate on the expansion
13740 of a builtin, will use the wrong location information. */
13741 if (gimple_has_location (stmt))
13742 {
13743 tree realret = ret;
13744 if (TREE_CODE (ret) == NOP_EXPR)
13745 realret = TREE_OPERAND (ret, 0);
13746 if (CAN_HAVE_LOCATION_P (realret)
13747 && !EXPR_HAS_LOCATION (realret))
13748 SET_EXPR_LOCATION (realret, loc);
13749 return realret;
13750 }
13751 return ret;
13752 }
13753 }
13754 }
13755 return NULL_TREE;
13756 }
13757
13758 /* Look up the function in built_in_decls that corresponds to DECL
13759 and set ASMSPEC as its user assembler name. DECL must be a
13760 function decl that declares a builtin. */
13761
13762 void
13763 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13764 {
13765 tree builtin;
13766 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13767 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13768 && asmspec != 0);
13769
13770 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13771 set_user_assembler_name (builtin, asmspec);
13772 switch (DECL_FUNCTION_CODE (decl))
13773 {
13774 case BUILT_IN_MEMCPY:
13775 init_block_move_fn (asmspec);
13776 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13777 break;
13778 case BUILT_IN_MEMSET:
13779 init_block_clear_fn (asmspec);
13780 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13781 break;
13782 case BUILT_IN_MEMMOVE:
13783 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13784 break;
13785 case BUILT_IN_MEMCMP:
13786 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13787 break;
13788 case BUILT_IN_ABORT:
13789 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13790 break;
13791 case BUILT_IN_FFS:
13792 if (INT_TYPE_SIZE < BITS_PER_WORD)
13793 {
13794 set_user_assembler_libfunc ("ffs", asmspec);
13795 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13796 MODE_INT, 0), "ffs");
13797 }
13798 break;
13799 default:
13800 break;
13801 }
13802 }
13803
13804 /* Return true if DECL is a builtin that expands to a constant or similarly
13805 simple code. */
13806 bool
13807 is_simple_builtin (tree decl)
13808 {
13809 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13810 switch (DECL_FUNCTION_CODE (decl))
13811 {
13812 /* Builtins that expand to constants. */
13813 case BUILT_IN_CONSTANT_P:
13814 case BUILT_IN_EXPECT:
13815 case BUILT_IN_OBJECT_SIZE:
13816 case BUILT_IN_UNREACHABLE:
13817 /* Simple register moves or loads from stack. */
13818 case BUILT_IN_RETURN_ADDRESS:
13819 case BUILT_IN_EXTRACT_RETURN_ADDR:
13820 case BUILT_IN_FROB_RETURN_ADDR:
13821 case BUILT_IN_RETURN:
13822 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
13823 case BUILT_IN_FRAME_ADDRESS:
13824 case BUILT_IN_VA_END:
13825 case BUILT_IN_STACK_SAVE:
13826 case BUILT_IN_STACK_RESTORE:
13827 /* Exception state returns or moves registers around. */
13828 case BUILT_IN_EH_FILTER:
13829 case BUILT_IN_EH_POINTER:
13830 case BUILT_IN_EH_COPY_VALUES:
13831 return true;
13832
13833 default:
13834 return false;
13835 }
13836
13837 return false;
13838 }
13839
13840 /* Return true if DECL is a builtin that is not expensive, i.e., they are
13841 most probably expanded inline into reasonably simple code. This is a
13842 superset of is_simple_builtin. */
13843 bool
13844 is_inexpensive_builtin (tree decl)
13845 {
13846 if (!decl)
13847 return false;
13848 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
13849 return true;
13850 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13851 switch (DECL_FUNCTION_CODE (decl))
13852 {
13853 case BUILT_IN_ABS:
13854 case BUILT_IN_ALLOCA:
13855 case BUILT_IN_BSWAP32:
13856 case BUILT_IN_BSWAP64:
13857 case BUILT_IN_CLZ:
13858 case BUILT_IN_CLZIMAX:
13859 case BUILT_IN_CLZL:
13860 case BUILT_IN_CLZLL:
13861 case BUILT_IN_CTZ:
13862 case BUILT_IN_CTZIMAX:
13863 case BUILT_IN_CTZL:
13864 case BUILT_IN_CTZLL:
13865 case BUILT_IN_FFS:
13866 case BUILT_IN_FFSIMAX:
13867 case BUILT_IN_FFSL:
13868 case BUILT_IN_FFSLL:
13869 case BUILT_IN_IMAXABS:
13870 case BUILT_IN_FINITE:
13871 case BUILT_IN_FINITEF:
13872 case BUILT_IN_FINITEL:
13873 case BUILT_IN_FINITED32:
13874 case BUILT_IN_FINITED64:
13875 case BUILT_IN_FINITED128:
13876 case BUILT_IN_FPCLASSIFY:
13877 case BUILT_IN_ISFINITE:
13878 case BUILT_IN_ISINF_SIGN:
13879 case BUILT_IN_ISINF:
13880 case BUILT_IN_ISINFF:
13881 case BUILT_IN_ISINFL:
13882 case BUILT_IN_ISINFD32:
13883 case BUILT_IN_ISINFD64:
13884 case BUILT_IN_ISINFD128:
13885 case BUILT_IN_ISNAN:
13886 case BUILT_IN_ISNANF:
13887 case BUILT_IN_ISNANL:
13888 case BUILT_IN_ISNAND32:
13889 case BUILT_IN_ISNAND64:
13890 case BUILT_IN_ISNAND128:
13891 case BUILT_IN_ISNORMAL:
13892 case BUILT_IN_ISGREATER:
13893 case BUILT_IN_ISGREATEREQUAL:
13894 case BUILT_IN_ISLESS:
13895 case BUILT_IN_ISLESSEQUAL:
13896 case BUILT_IN_ISLESSGREATER:
13897 case BUILT_IN_ISUNORDERED:
13898 case BUILT_IN_VA_ARG_PACK:
13899 case BUILT_IN_VA_ARG_PACK_LEN:
13900 case BUILT_IN_VA_COPY:
13901 case BUILT_IN_TRAP:
13902 case BUILT_IN_SAVEREGS:
13903 case BUILT_IN_POPCOUNTL:
13904 case BUILT_IN_POPCOUNTLL:
13905 case BUILT_IN_POPCOUNTIMAX:
13906 case BUILT_IN_POPCOUNT:
13907 case BUILT_IN_PARITYL:
13908 case BUILT_IN_PARITYLL:
13909 case BUILT_IN_PARITYIMAX:
13910 case BUILT_IN_PARITY:
13911 case BUILT_IN_LABS:
13912 case BUILT_IN_LLABS:
13913 case BUILT_IN_PREFETCH:
13914 return true;
13915
13916 default:
13917 return is_simple_builtin (decl);
13918 }
13919
13920 return false;
13921 }
13922