builtins.c (get_object_alignment_1): Return whether we can determine the alignment...
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "realmpfr.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "predict.h"
44 #include "tm_p.h"
45 #include "target.h"
46 #include "langhooks.h"
47 #include "basic-block.h"
48 #include "tree-mudflap.h"
49 #include "tree-flow.h"
50 #include "value-prof.h"
51 #include "diagnostic-core.h"
52 #include "builtins.h"
53
54
55 #ifndef PAD_VARARGS_DOWN
56 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
57 #endif
58 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
59
60 struct target_builtins default_target_builtins;
61 #if SWITCHABLE_TARGET
62 struct target_builtins *this_target_builtins = &default_target_builtins;
63 #endif
64
65 /* Define the names of the builtin function types and codes. */
66 const char *const built_in_class_names[4]
67 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
68
69 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
70 const char * built_in_names[(int) END_BUILTINS] =
71 {
72 #include "builtins.def"
73 };
74 #undef DEF_BUILTIN
75
76 /* Setup an array of _DECL trees, make sure each element is
77 initialized to NULL_TREE. */
78 builtin_info_type builtin_info;
79
80 static const char *c_getstr (tree);
81 static rtx c_readstr (const char *, enum machine_mode);
82 static int target_char_cast (tree, char *);
83 static rtx get_memory_rtx (tree, tree);
84 static int apply_args_size (void);
85 static int apply_result_size (void);
86 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
87 static rtx result_vector (int, rtx);
88 #endif
89 static void expand_builtin_update_setjmp_buf (rtx);
90 static void expand_builtin_prefetch (tree);
91 static rtx expand_builtin_apply_args (void);
92 static rtx expand_builtin_apply_args_1 (void);
93 static rtx expand_builtin_apply (rtx, rtx, rtx);
94 static void expand_builtin_return (rtx);
95 static enum type_class type_to_class (tree);
96 static rtx expand_builtin_classify_type (tree);
97 static void expand_errno_check (tree, rtx);
98 static rtx expand_builtin_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
100 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
102 static rtx expand_builtin_interclass_mathfn (tree, rtx);
103 static rtx expand_builtin_sincos (tree);
104 static rtx expand_builtin_cexpi (tree, rtx);
105 static rtx expand_builtin_int_roundingfn (tree, rtx);
106 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
107 static rtx expand_builtin_next_arg (void);
108 static rtx expand_builtin_va_start (tree);
109 static rtx expand_builtin_va_end (tree);
110 static rtx expand_builtin_va_copy (tree);
111 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
112 static rtx expand_builtin_strcmp (tree, rtx);
113 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
114 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
115 static rtx expand_builtin_memcpy (tree, rtx);
116 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
118 enum machine_mode, int);
119 static rtx expand_builtin_strcpy (tree, rtx);
120 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
121 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_strncpy (tree, rtx);
123 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
124 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
126 static rtx expand_builtin_bzero (tree);
127 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_alloca (tree, bool);
129 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
130 static rtx expand_builtin_frame_address (tree, tree);
131 static tree stabilize_va_list_loc (location_t, tree, int);
132 static rtx expand_builtin_expect (tree, rtx);
133 static tree fold_builtin_constant_p (tree);
134 static tree fold_builtin_expect (location_t, tree, tree);
135 static tree fold_builtin_classify_type (tree);
136 static tree fold_builtin_strlen (location_t, tree, tree);
137 static tree fold_builtin_inf (location_t, tree, int);
138 static tree fold_builtin_nan (tree, tree, int);
139 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
140 static bool validate_arg (const_tree, enum tree_code code);
141 static bool integer_valued_real_p (tree);
142 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
143 static bool readonly_data_expr (tree);
144 static rtx expand_builtin_fabs (tree, rtx, rtx);
145 static rtx expand_builtin_signbit (tree, rtx);
146 static tree fold_builtin_sqrt (location_t, tree, tree);
147 static tree fold_builtin_cbrt (location_t, tree, tree);
148 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
149 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
150 static tree fold_builtin_cos (location_t, tree, tree, tree);
151 static tree fold_builtin_cosh (location_t, tree, tree, tree);
152 static tree fold_builtin_tan (tree, tree);
153 static tree fold_builtin_trunc (location_t, tree, tree);
154 static tree fold_builtin_floor (location_t, tree, tree);
155 static tree fold_builtin_ceil (location_t, tree, tree);
156 static tree fold_builtin_round (location_t, tree, tree);
157 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
158 static tree fold_builtin_bitop (tree, tree);
159 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
160 static tree fold_builtin_strchr (location_t, tree, tree, tree);
161 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
162 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
163 static tree fold_builtin_strcmp (location_t, tree, tree);
164 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
165 static tree fold_builtin_signbit (location_t, tree, tree);
166 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
167 static tree fold_builtin_isascii (location_t, tree);
168 static tree fold_builtin_toascii (location_t, tree);
169 static tree fold_builtin_isdigit (location_t, tree);
170 static tree fold_builtin_fabs (location_t, tree, tree);
171 static tree fold_builtin_abs (location_t, tree, tree);
172 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
173 enum tree_code);
174 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
175 static tree fold_builtin_0 (location_t, tree, bool);
176 static tree fold_builtin_1 (location_t, tree, tree, bool);
177 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
178 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
179 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
180 static tree fold_builtin_varargs (location_t, tree, tree, bool);
181
182 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
183 static tree fold_builtin_strstr (location_t, tree, tree, tree);
184 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
185 static tree fold_builtin_strcat (location_t, tree, tree);
186 static tree fold_builtin_strncat (location_t, tree, tree, tree);
187 static tree fold_builtin_strspn (location_t, tree, tree);
188 static tree fold_builtin_strcspn (location_t, tree, tree);
189 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
190 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
191
192 static rtx expand_builtin_object_size (tree);
193 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
194 enum built_in_function);
195 static void maybe_emit_chk_warning (tree, enum built_in_function);
196 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
197 static void maybe_emit_free_warning (tree);
198 static tree fold_builtin_object_size (tree, tree);
199 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
200 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
201 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
202 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
203 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
204 enum built_in_function);
205 static bool init_target_chars (void);
206
207 static unsigned HOST_WIDE_INT target_newline;
208 static unsigned HOST_WIDE_INT target_percent;
209 static unsigned HOST_WIDE_INT target_c;
210 static unsigned HOST_WIDE_INT target_s;
211 static char target_percent_c[3];
212 static char target_percent_s[3];
213 static char target_percent_s_newline[4];
214 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
215 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
216 static tree do_mpfr_arg2 (tree, tree, tree,
217 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
218 static tree do_mpfr_arg3 (tree, tree, tree, tree,
219 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
220 static tree do_mpfr_sincos (tree, tree, tree);
221 static tree do_mpfr_bessel_n (tree, tree, tree,
222 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
223 const REAL_VALUE_TYPE *, bool);
224 static tree do_mpfr_remquo (tree, tree, tree);
225 static tree do_mpfr_lgamma_r (tree, tree, tree);
226 static void expand_builtin_sync_synchronize (void);
227
228 /* Return true if NAME starts with __builtin_ or __sync_. */
229
230 static bool
231 is_builtin_name (const char *name)
232 {
233 if (strncmp (name, "__builtin_", 10) == 0)
234 return true;
235 if (strncmp (name, "__sync_", 7) == 0)
236 return true;
237 if (strncmp (name, "__atomic_", 9) == 0)
238 return true;
239 return false;
240 }
241
242
243 /* Return true if DECL is a function symbol representing a built-in. */
244
245 bool
246 is_builtin_fn (tree decl)
247 {
248 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
249 }
250
251
252 /* Return true if NODE should be considered for inline expansion regardless
253 of the optimization level. This means whenever a function is invoked with
254 its "internal" name, which normally contains the prefix "__builtin". */
255
256 static bool
257 called_as_built_in (tree node)
258 {
259 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
260 we want the name used to call the function, not the name it
261 will have. */
262 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
263 return is_builtin_name (name);
264 }
265
266 /* Compute values M and N such that M divides (address of EXP - N) and such
267 that N < M. If these numbers can be determined, store M in alignp and N in
268 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
269 *alignp and any bit-offset to *bitposp.
270
271 Note that the address (and thus the alignment) computed here is based
272 on the address to which a symbol resolves, whereas DECL_ALIGN is based
273 on the address at which an object is actually located. These two
274 addresses are not always the same. For example, on ARM targets,
275 the address &foo of a Thumb function foo() has the lowest bit set,
276 whereas foo() itself starts on an even address. */
277
278 bool
279 get_object_alignment_1 (tree exp, unsigned int *alignp,
280 unsigned HOST_WIDE_INT *bitposp)
281 {
282 HOST_WIDE_INT bitsize, bitpos;
283 tree offset;
284 enum machine_mode mode;
285 int unsignedp, volatilep;
286 unsigned int inner, align = BITS_PER_UNIT;
287 bool known_alignment = false;
288
289 /* Get the innermost object and the constant (bitpos) and possibly
290 variable (offset) offset of the access. */
291 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
292 &mode, &unsignedp, &volatilep, true);
293
294 /* Extract alignment information from the innermost object and
295 possibly adjust bitpos and offset. */
296 if (TREE_CODE (exp) == CONST_DECL)
297 exp = DECL_INITIAL (exp);
298 if (DECL_P (exp)
299 && TREE_CODE (exp) != LABEL_DECL)
300 {
301 if (TREE_CODE (exp) == FUNCTION_DECL)
302 {
303 /* Function addresses can encode extra information besides their
304 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
305 allows the low bit to be used as a virtual bit, we know
306 that the address itself must be 2-byte aligned. */
307 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
308 {
309 known_alignment = true;
310 align = 2 * BITS_PER_UNIT;
311 }
312 }
313 else
314 {
315 known_alignment = true;
316 align = DECL_ALIGN (exp);
317 }
318 }
319 else if (CONSTANT_CLASS_P (exp))
320 {
321 known_alignment = true;
322 align = TYPE_ALIGN (TREE_TYPE (exp));
323 #ifdef CONSTANT_ALIGNMENT
324 align = (unsigned)CONSTANT_ALIGNMENT (exp, align);
325 #endif
326 }
327 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
328 {
329 known_alignment = true;
330 align = TYPE_ALIGN (TREE_TYPE (exp));
331 }
332 else if (TREE_CODE (exp) == INDIRECT_REF)
333 {
334 known_alignment = true;
335 align = TYPE_ALIGN (TREE_TYPE (exp));
336 }
337 else if (TREE_CODE (exp) == MEM_REF)
338 {
339 tree addr = TREE_OPERAND (exp, 0);
340 unsigned ptr_align;
341 unsigned HOST_WIDE_INT ptr_bitpos;
342
343 if (TREE_CODE (addr) == BIT_AND_EXPR
344 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
345 {
346 known_alignment = true;
347 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
348 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
349 align *= BITS_PER_UNIT;
350 addr = TREE_OPERAND (addr, 0);
351 }
352
353 if (get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos))
354 {
355 known_alignment = true;
356 bitpos += ptr_bitpos & ~(align - 1);
357 align = MAX (ptr_align, align);
358 }
359
360 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
361 }
362 else if (TREE_CODE (exp) == TARGET_MEM_REF)
363 {
364 unsigned ptr_align;
365 unsigned HOST_WIDE_INT ptr_bitpos;
366 tree addr = TMR_BASE (exp);
367
368 if (TREE_CODE (addr) == BIT_AND_EXPR
369 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
370 {
371 known_alignment = true;
372 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
373 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
374 align *= BITS_PER_UNIT;
375 addr = TREE_OPERAND (addr, 0);
376 }
377
378 if (get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos))
379 {
380 known_alignment = true;
381 bitpos += ptr_bitpos & ~(align - 1);
382 align = MAX (ptr_align, align);
383 }
384
385 if (TMR_OFFSET (exp))
386 bitpos += TREE_INT_CST_LOW (TMR_OFFSET (exp)) * BITS_PER_UNIT;
387 if (TMR_INDEX (exp) && TMR_STEP (exp))
388 {
389 unsigned HOST_WIDE_INT step = TREE_INT_CST_LOW (TMR_STEP (exp));
390 align = MIN (align, (step & -step) * BITS_PER_UNIT);
391 known_alignment = true;
392 }
393 else if (TMR_INDEX (exp))
394 known_alignment = false;
395
396 if (TMR_INDEX2 (exp))
397 known_alignment = false;
398 }
399
400 /* If there is a non-constant offset part extract the maximum
401 alignment that can prevail. */
402 inner = ~0U;
403 while (offset)
404 {
405 tree next_offset;
406
407 if (TREE_CODE (offset) == PLUS_EXPR)
408 {
409 next_offset = TREE_OPERAND (offset, 0);
410 offset = TREE_OPERAND (offset, 1);
411 }
412 else
413 next_offset = NULL;
414 if (host_integerp (offset, 1))
415 {
416 /* Any overflow in calculating offset_bits won't change
417 the alignment. */
418 unsigned offset_bits
419 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
420
421 if (offset_bits)
422 inner = MIN (inner, (offset_bits & -offset_bits));
423 }
424 else if (TREE_CODE (offset) == MULT_EXPR
425 && host_integerp (TREE_OPERAND (offset, 1), 1))
426 {
427 /* Any overflow in calculating offset_factor won't change
428 the alignment. */
429 unsigned offset_factor
430 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
431 * BITS_PER_UNIT);
432
433 if (offset_factor)
434 inner = MIN (inner, (offset_factor & -offset_factor));
435 }
436 else
437 {
438 known_alignment = false;
439 break;
440 }
441 offset = next_offset;
442 }
443
444 if (known_alignment)
445 {
446 /* Alignment is innermost object alignment adjusted by the constant
447 and non-constant offset parts. */
448 align = MIN (align, inner);
449 bitpos = bitpos & (align - 1);
450 *alignp = align;
451 }
452 else
453 {
454 bitpos = bitpos & (BITS_PER_UNIT - 1);
455 *alignp = BITS_PER_UNIT;
456 }
457 *bitposp = bitpos;
458 return known_alignment;
459 }
460
461 /* Return the alignment in bits of EXP, an object. */
462
463 unsigned int
464 get_object_alignment (tree exp)
465 {
466 unsigned HOST_WIDE_INT bitpos = 0;
467 unsigned int align;
468
469 get_object_alignment_1 (exp, &align, &bitpos);
470
471 /* align and bitpos now specify known low bits of the pointer.
472 ptr & (align - 1) == bitpos. */
473
474 if (bitpos != 0)
475 align = (bitpos & -bitpos);
476 return align;
477 }
478
479 /* Return the alignment of object EXP, also considering its type when we do
480 not know of explicit misalignment. Only handle MEM_REF and TARGET_MEM_REF.
481
482 ??? Note that, in the general case, the type of an expression is not kept
483 consistent with misalignment information by the front-end, for example when
484 taking the address of a member of a packed structure. However, in most of
485 the cases, expressions have the alignment of their type so we optimistically
486 fall back to this alignment when we cannot compute a misalignment. */
487
488 unsigned int
489 get_object_or_type_alignment (tree exp)
490 {
491 unsigned HOST_WIDE_INT misalign;
492 unsigned int align;
493 bool known_alignment;
494
495 gcc_assert (TREE_CODE (exp) == MEM_REF || TREE_CODE (exp) == TARGET_MEM_REF);
496 known_alignment = get_object_alignment_1 (exp, &align, &misalign);
497 if (misalign != 0)
498 align = (misalign & -misalign);
499 else if (!known_alignment)
500 align = TYPE_ALIGN (TREE_TYPE (exp));
501
502 return align;
503 }
504
505 /* For a pointer valued expression EXP compute values M and N such that M
506 divides (EXP - N) and such that N < M. If these numbers can be determined,
507 store M in alignp and N in *BITPOSP and return true. Otherwise return false
508 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp.
509
510 If EXP is not a pointer, false is returned too. */
511
512 bool
513 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
514 unsigned HOST_WIDE_INT *bitposp)
515 {
516 STRIP_NOPS (exp);
517
518 if (TREE_CODE (exp) == ADDR_EXPR)
519 return get_object_alignment_1 (TREE_OPERAND (exp, 0), alignp, bitposp);
520 else if (TREE_CODE (exp) == SSA_NAME
521 && POINTER_TYPE_P (TREE_TYPE (exp)))
522 {
523 unsigned int ptr_align, ptr_misalign;
524 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
525
526 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
527 {
528 *bitposp = ptr_misalign * BITS_PER_UNIT;
529 *alignp = ptr_align * BITS_PER_UNIT;
530 return true;
531 }
532 else
533 {
534 *bitposp = 0;
535 *alignp = BITS_PER_UNIT;
536 return false;
537 }
538 }
539
540 *bitposp = 0;
541 *alignp = BITS_PER_UNIT;
542 return false;
543 }
544
545 /* Return the alignment in bits of EXP, a pointer valued expression.
546 The alignment returned is, by default, the alignment of the thing that
547 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
548
549 Otherwise, look at the expression to see if we can do better, i.e., if the
550 expression is actually pointing at an object whose alignment is tighter. */
551
552 unsigned int
553 get_pointer_alignment (tree exp)
554 {
555 unsigned HOST_WIDE_INT bitpos = 0;
556 unsigned int align;
557
558 get_pointer_alignment_1 (exp, &align, &bitpos);
559
560 /* align and bitpos now specify known low bits of the pointer.
561 ptr & (align - 1) == bitpos. */
562
563 if (bitpos != 0)
564 align = (bitpos & -bitpos);
565
566 return align;
567 }
568
569 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
570 way, because it could contain a zero byte in the middle.
571 TREE_STRING_LENGTH is the size of the character array, not the string.
572
573 ONLY_VALUE should be nonzero if the result is not going to be emitted
574 into the instruction stream and zero if it is going to be expanded.
575 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
576 is returned, otherwise NULL, since
577 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
578 evaluate the side-effects.
579
580 The value returned is of type `ssizetype'.
581
582 Unfortunately, string_constant can't access the values of const char
583 arrays with initializers, so neither can we do so here. */
584
585 tree
586 c_strlen (tree src, int only_value)
587 {
588 tree offset_node;
589 HOST_WIDE_INT offset;
590 int max;
591 const char *ptr;
592 location_t loc;
593
594 STRIP_NOPS (src);
595 if (TREE_CODE (src) == COND_EXPR
596 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
597 {
598 tree len1, len2;
599
600 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
601 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
602 if (tree_int_cst_equal (len1, len2))
603 return len1;
604 }
605
606 if (TREE_CODE (src) == COMPOUND_EXPR
607 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
608 return c_strlen (TREE_OPERAND (src, 1), only_value);
609
610 loc = EXPR_LOC_OR_HERE (src);
611
612 src = string_constant (src, &offset_node);
613 if (src == 0)
614 return NULL_TREE;
615
616 max = TREE_STRING_LENGTH (src) - 1;
617 ptr = TREE_STRING_POINTER (src);
618
619 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
620 {
621 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
622 compute the offset to the following null if we don't know where to
623 start searching for it. */
624 int i;
625
626 for (i = 0; i < max; i++)
627 if (ptr[i] == 0)
628 return NULL_TREE;
629
630 /* We don't know the starting offset, but we do know that the string
631 has no internal zero bytes. We can assume that the offset falls
632 within the bounds of the string; otherwise, the programmer deserves
633 what he gets. Subtract the offset from the length of the string,
634 and return that. This would perhaps not be valid if we were dealing
635 with named arrays in addition to literal string constants. */
636
637 return size_diffop_loc (loc, size_int (max), offset_node);
638 }
639
640 /* We have a known offset into the string. Start searching there for
641 a null character if we can represent it as a single HOST_WIDE_INT. */
642 if (offset_node == 0)
643 offset = 0;
644 else if (! host_integerp (offset_node, 0))
645 offset = -1;
646 else
647 offset = tree_low_cst (offset_node, 0);
648
649 /* If the offset is known to be out of bounds, warn, and call strlen at
650 runtime. */
651 if (offset < 0 || offset > max)
652 {
653 /* Suppress multiple warnings for propagated constant strings. */
654 if (! TREE_NO_WARNING (src))
655 {
656 warning_at (loc, 0, "offset outside bounds of constant string");
657 TREE_NO_WARNING (src) = 1;
658 }
659 return NULL_TREE;
660 }
661
662 /* Use strlen to search for the first zero byte. Since any strings
663 constructed with build_string will have nulls appended, we win even
664 if we get handed something like (char[4])"abcd".
665
666 Since OFFSET is our starting index into the string, no further
667 calculation is needed. */
668 return ssize_int (strlen (ptr + offset));
669 }
670
671 /* Return a char pointer for a C string if it is a string constant
672 or sum of string constant and integer constant. */
673
674 static const char *
675 c_getstr (tree src)
676 {
677 tree offset_node;
678
679 src = string_constant (src, &offset_node);
680 if (src == 0)
681 return 0;
682
683 if (offset_node == 0)
684 return TREE_STRING_POINTER (src);
685 else if (!host_integerp (offset_node, 1)
686 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
687 return 0;
688
689 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
690 }
691
692 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
693 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
694
695 static rtx
696 c_readstr (const char *str, enum machine_mode mode)
697 {
698 HOST_WIDE_INT c[2];
699 HOST_WIDE_INT ch;
700 unsigned int i, j;
701
702 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
703
704 c[0] = 0;
705 c[1] = 0;
706 ch = 1;
707 for (i = 0; i < GET_MODE_SIZE (mode); i++)
708 {
709 j = i;
710 if (WORDS_BIG_ENDIAN)
711 j = GET_MODE_SIZE (mode) - i - 1;
712 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
713 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
714 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
715 j *= BITS_PER_UNIT;
716 gcc_assert (j < 2 * HOST_BITS_PER_WIDE_INT);
717
718 if (ch)
719 ch = (unsigned char) str[i];
720 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
721 }
722 return immed_double_const (c[0], c[1], mode);
723 }
724
725 /* Cast a target constant CST to target CHAR and if that value fits into
726 host char type, return zero and put that value into variable pointed to by
727 P. */
728
729 static int
730 target_char_cast (tree cst, char *p)
731 {
732 unsigned HOST_WIDE_INT val, hostval;
733
734 if (TREE_CODE (cst) != INTEGER_CST
735 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
736 return 1;
737
738 val = TREE_INT_CST_LOW (cst);
739 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
740 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
741
742 hostval = val;
743 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
744 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
745
746 if (val != hostval)
747 return 1;
748
749 *p = hostval;
750 return 0;
751 }
752
753 /* Similar to save_expr, but assumes that arbitrary code is not executed
754 in between the multiple evaluations. In particular, we assume that a
755 non-addressable local variable will not be modified. */
756
757 static tree
758 builtin_save_expr (tree exp)
759 {
760 if (TREE_CODE (exp) == SSA_NAME
761 || (TREE_ADDRESSABLE (exp) == 0
762 && (TREE_CODE (exp) == PARM_DECL
763 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
764 return exp;
765
766 return save_expr (exp);
767 }
768
769 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
770 times to get the address of either a higher stack frame, or a return
771 address located within it (depending on FNDECL_CODE). */
772
773 static rtx
774 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
775 {
776 int i;
777
778 #ifdef INITIAL_FRAME_ADDRESS_RTX
779 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
780 #else
781 rtx tem;
782
783 /* For a zero count with __builtin_return_address, we don't care what
784 frame address we return, because target-specific definitions will
785 override us. Therefore frame pointer elimination is OK, and using
786 the soft frame pointer is OK.
787
788 For a nonzero count, or a zero count with __builtin_frame_address,
789 we require a stable offset from the current frame pointer to the
790 previous one, so we must use the hard frame pointer, and
791 we must disable frame pointer elimination. */
792 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
793 tem = frame_pointer_rtx;
794 else
795 {
796 tem = hard_frame_pointer_rtx;
797
798 /* Tell reload not to eliminate the frame pointer. */
799 crtl->accesses_prior_frames = 1;
800 }
801 #endif
802
803 /* Some machines need special handling before we can access
804 arbitrary frames. For example, on the SPARC, we must first flush
805 all register windows to the stack. */
806 #ifdef SETUP_FRAME_ADDRESSES
807 if (count > 0)
808 SETUP_FRAME_ADDRESSES ();
809 #endif
810
811 /* On the SPARC, the return address is not in the frame, it is in a
812 register. There is no way to access it off of the current frame
813 pointer, but it can be accessed off the previous frame pointer by
814 reading the value from the register window save area. */
815 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
816 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
817 count--;
818 #endif
819
820 /* Scan back COUNT frames to the specified frame. */
821 for (i = 0; i < count; i++)
822 {
823 /* Assume the dynamic chain pointer is in the word that the
824 frame address points to, unless otherwise specified. */
825 #ifdef DYNAMIC_CHAIN_ADDRESS
826 tem = DYNAMIC_CHAIN_ADDRESS (tem);
827 #endif
828 tem = memory_address (Pmode, tem);
829 tem = gen_frame_mem (Pmode, tem);
830 tem = copy_to_reg (tem);
831 }
832
833 /* For __builtin_frame_address, return what we've got. But, on
834 the SPARC for example, we may have to add a bias. */
835 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
836 #ifdef FRAME_ADDR_RTX
837 return FRAME_ADDR_RTX (tem);
838 #else
839 return tem;
840 #endif
841
842 /* For __builtin_return_address, get the return address from that frame. */
843 #ifdef RETURN_ADDR_RTX
844 tem = RETURN_ADDR_RTX (count, tem);
845 #else
846 tem = memory_address (Pmode,
847 plus_constant (tem, GET_MODE_SIZE (Pmode)));
848 tem = gen_frame_mem (Pmode, tem);
849 #endif
850 return tem;
851 }
852
853 /* Alias set used for setjmp buffer. */
854 static alias_set_type setjmp_alias_set = -1;
855
856 /* Construct the leading half of a __builtin_setjmp call. Control will
857 return to RECEIVER_LABEL. This is also called directly by the SJLJ
858 exception handling code. */
859
860 void
861 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
862 {
863 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
864 rtx stack_save;
865 rtx mem;
866
867 if (setjmp_alias_set == -1)
868 setjmp_alias_set = new_alias_set ();
869
870 buf_addr = convert_memory_address (Pmode, buf_addr);
871
872 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
873
874 /* We store the frame pointer and the address of receiver_label in
875 the buffer and use the rest of it for the stack save area, which
876 is machine-dependent. */
877
878 mem = gen_rtx_MEM (Pmode, buf_addr);
879 set_mem_alias_set (mem, setjmp_alias_set);
880 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
881
882 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
883 set_mem_alias_set (mem, setjmp_alias_set);
884
885 emit_move_insn (validize_mem (mem),
886 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
887
888 stack_save = gen_rtx_MEM (sa_mode,
889 plus_constant (buf_addr,
890 2 * GET_MODE_SIZE (Pmode)));
891 set_mem_alias_set (stack_save, setjmp_alias_set);
892 emit_stack_save (SAVE_NONLOCAL, &stack_save);
893
894 /* If there is further processing to do, do it. */
895 #ifdef HAVE_builtin_setjmp_setup
896 if (HAVE_builtin_setjmp_setup)
897 emit_insn (gen_builtin_setjmp_setup (buf_addr));
898 #endif
899
900 /* We have a nonlocal label. */
901 cfun->has_nonlocal_label = 1;
902 }
903
904 /* Construct the trailing part of a __builtin_setjmp call. This is
905 also called directly by the SJLJ exception handling code. */
906
907 void
908 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
909 {
910 rtx chain;
911
912 /* Clobber the FP when we get here, so we have to make sure it's
913 marked as used by this function. */
914 emit_use (hard_frame_pointer_rtx);
915
916 /* Mark the static chain as clobbered here so life information
917 doesn't get messed up for it. */
918 chain = targetm.calls.static_chain (current_function_decl, true);
919 if (chain && REG_P (chain))
920 emit_clobber (chain);
921
922 /* Now put in the code to restore the frame pointer, and argument
923 pointer, if needed. */
924 #ifdef HAVE_nonlocal_goto
925 if (! HAVE_nonlocal_goto)
926 #endif
927 {
928 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
929 /* This might change the hard frame pointer in ways that aren't
930 apparent to early optimization passes, so force a clobber. */
931 emit_clobber (hard_frame_pointer_rtx);
932 }
933
934 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
935 if (fixed_regs[ARG_POINTER_REGNUM])
936 {
937 #ifdef ELIMINABLE_REGS
938 size_t i;
939 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
940
941 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
942 if (elim_regs[i].from == ARG_POINTER_REGNUM
943 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
944 break;
945
946 if (i == ARRAY_SIZE (elim_regs))
947 #endif
948 {
949 /* Now restore our arg pointer from the address at which it
950 was saved in our stack frame. */
951 emit_move_insn (crtl->args.internal_arg_pointer,
952 copy_to_reg (get_arg_pointer_save_area ()));
953 }
954 }
955 #endif
956
957 #ifdef HAVE_builtin_setjmp_receiver
958 if (HAVE_builtin_setjmp_receiver)
959 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
960 else
961 #endif
962 #ifdef HAVE_nonlocal_goto_receiver
963 if (HAVE_nonlocal_goto_receiver)
964 emit_insn (gen_nonlocal_goto_receiver ());
965 else
966 #endif
967 { /* Nothing */ }
968
969 /* We must not allow the code we just generated to be reordered by
970 scheduling. Specifically, the update of the frame pointer must
971 happen immediately, not later. */
972 emit_insn (gen_blockage ());
973 }
974
975 /* __builtin_longjmp is passed a pointer to an array of five words (not
976 all will be used on all machines). It operates similarly to the C
977 library function of the same name, but is more efficient. Much of
978 the code below is copied from the handling of non-local gotos. */
979
980 static void
981 expand_builtin_longjmp (rtx buf_addr, rtx value)
982 {
983 rtx fp, lab, stack, insn, last;
984 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
985
986 /* DRAP is needed for stack realign if longjmp is expanded to current
987 function */
988 if (SUPPORTS_STACK_ALIGNMENT)
989 crtl->need_drap = true;
990
991 if (setjmp_alias_set == -1)
992 setjmp_alias_set = new_alias_set ();
993
994 buf_addr = convert_memory_address (Pmode, buf_addr);
995
996 buf_addr = force_reg (Pmode, buf_addr);
997
998 /* We require that the user must pass a second argument of 1, because
999 that is what builtin_setjmp will return. */
1000 gcc_assert (value == const1_rtx);
1001
1002 last = get_last_insn ();
1003 #ifdef HAVE_builtin_longjmp
1004 if (HAVE_builtin_longjmp)
1005 emit_insn (gen_builtin_longjmp (buf_addr));
1006 else
1007 #endif
1008 {
1009 fp = gen_rtx_MEM (Pmode, buf_addr);
1010 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
1011 GET_MODE_SIZE (Pmode)));
1012
1013 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
1014 2 * GET_MODE_SIZE (Pmode)));
1015 set_mem_alias_set (fp, setjmp_alias_set);
1016 set_mem_alias_set (lab, setjmp_alias_set);
1017 set_mem_alias_set (stack, setjmp_alias_set);
1018
1019 /* Pick up FP, label, and SP from the block and jump. This code is
1020 from expand_goto in stmt.c; see there for detailed comments. */
1021 #ifdef HAVE_nonlocal_goto
1022 if (HAVE_nonlocal_goto)
1023 /* We have to pass a value to the nonlocal_goto pattern that will
1024 get copied into the static_chain pointer, but it does not matter
1025 what that value is, because builtin_setjmp does not use it. */
1026 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1027 else
1028 #endif
1029 {
1030 lab = copy_to_reg (lab);
1031
1032 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1033 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1034
1035 emit_move_insn (hard_frame_pointer_rtx, fp);
1036 emit_stack_restore (SAVE_NONLOCAL, stack);
1037
1038 emit_use (hard_frame_pointer_rtx);
1039 emit_use (stack_pointer_rtx);
1040 emit_indirect_jump (lab);
1041 }
1042 }
1043
1044 /* Search backwards and mark the jump insn as a non-local goto.
1045 Note that this precludes the use of __builtin_longjmp to a
1046 __builtin_setjmp target in the same function. However, we've
1047 already cautioned the user that these functions are for
1048 internal exception handling use only. */
1049 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1050 {
1051 gcc_assert (insn != last);
1052
1053 if (JUMP_P (insn))
1054 {
1055 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1056 break;
1057 }
1058 else if (CALL_P (insn))
1059 break;
1060 }
1061 }
1062
1063 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1064 and the address of the save area. */
1065
1066 static rtx
1067 expand_builtin_nonlocal_goto (tree exp)
1068 {
1069 tree t_label, t_save_area;
1070 rtx r_label, r_save_area, r_fp, r_sp, insn;
1071
1072 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1073 return NULL_RTX;
1074
1075 t_label = CALL_EXPR_ARG (exp, 0);
1076 t_save_area = CALL_EXPR_ARG (exp, 1);
1077
1078 r_label = expand_normal (t_label);
1079 r_label = convert_memory_address (Pmode, r_label);
1080 r_save_area = expand_normal (t_save_area);
1081 r_save_area = convert_memory_address (Pmode, r_save_area);
1082 /* Copy the address of the save location to a register just in case it was
1083 based on the frame pointer. */
1084 r_save_area = copy_to_reg (r_save_area);
1085 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1086 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1087 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
1088
1089 crtl->has_nonlocal_goto = 1;
1090
1091 #ifdef HAVE_nonlocal_goto
1092 /* ??? We no longer need to pass the static chain value, afaik. */
1093 if (HAVE_nonlocal_goto)
1094 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1095 else
1096 #endif
1097 {
1098 r_label = copy_to_reg (r_label);
1099
1100 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1101 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1102
1103 /* Restore frame pointer for containing function. */
1104 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1105 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1106
1107 /* USE of hard_frame_pointer_rtx added for consistency;
1108 not clear if really needed. */
1109 emit_use (hard_frame_pointer_rtx);
1110 emit_use (stack_pointer_rtx);
1111
1112 /* If the architecture is using a GP register, we must
1113 conservatively assume that the target function makes use of it.
1114 The prologue of functions with nonlocal gotos must therefore
1115 initialize the GP register to the appropriate value, and we
1116 must then make sure that this value is live at the point
1117 of the jump. (Note that this doesn't necessarily apply
1118 to targets with a nonlocal_goto pattern; they are free
1119 to implement it in their own way. Note also that this is
1120 a no-op if the GP register is a global invariant.) */
1121 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1122 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1123 emit_use (pic_offset_table_rtx);
1124
1125 emit_indirect_jump (r_label);
1126 }
1127
1128 /* Search backwards to the jump insn and mark it as a
1129 non-local goto. */
1130 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1131 {
1132 if (JUMP_P (insn))
1133 {
1134 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1135 break;
1136 }
1137 else if (CALL_P (insn))
1138 break;
1139 }
1140
1141 return const0_rtx;
1142 }
1143
1144 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1145 (not all will be used on all machines) that was passed to __builtin_setjmp.
1146 It updates the stack pointer in that block to correspond to the current
1147 stack pointer. */
1148
1149 static void
1150 expand_builtin_update_setjmp_buf (rtx buf_addr)
1151 {
1152 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1153 rtx stack_save
1154 = gen_rtx_MEM (sa_mode,
1155 memory_address
1156 (sa_mode,
1157 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1158
1159 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1160 }
1161
1162 /* Expand a call to __builtin_prefetch. For a target that does not support
1163 data prefetch, evaluate the memory address argument in case it has side
1164 effects. */
1165
1166 static void
1167 expand_builtin_prefetch (tree exp)
1168 {
1169 tree arg0, arg1, arg2;
1170 int nargs;
1171 rtx op0, op1, op2;
1172
1173 if (!validate_arglist (exp, POINTER_TYPE, 0))
1174 return;
1175
1176 arg0 = CALL_EXPR_ARG (exp, 0);
1177
1178 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1179 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1180 locality). */
1181 nargs = call_expr_nargs (exp);
1182 if (nargs > 1)
1183 arg1 = CALL_EXPR_ARG (exp, 1);
1184 else
1185 arg1 = integer_zero_node;
1186 if (nargs > 2)
1187 arg2 = CALL_EXPR_ARG (exp, 2);
1188 else
1189 arg2 = integer_three_node;
1190
1191 /* Argument 0 is an address. */
1192 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1193
1194 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1195 if (TREE_CODE (arg1) != INTEGER_CST)
1196 {
1197 error ("second argument to %<__builtin_prefetch%> must be a constant");
1198 arg1 = integer_zero_node;
1199 }
1200 op1 = expand_normal (arg1);
1201 /* Argument 1 must be either zero or one. */
1202 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1203 {
1204 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1205 " using zero");
1206 op1 = const0_rtx;
1207 }
1208
1209 /* Argument 2 (locality) must be a compile-time constant int. */
1210 if (TREE_CODE (arg2) != INTEGER_CST)
1211 {
1212 error ("third argument to %<__builtin_prefetch%> must be a constant");
1213 arg2 = integer_zero_node;
1214 }
1215 op2 = expand_normal (arg2);
1216 /* Argument 2 must be 0, 1, 2, or 3. */
1217 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1218 {
1219 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1220 op2 = const0_rtx;
1221 }
1222
1223 #ifdef HAVE_prefetch
1224 if (HAVE_prefetch)
1225 {
1226 struct expand_operand ops[3];
1227
1228 create_address_operand (&ops[0], op0);
1229 create_integer_operand (&ops[1], INTVAL (op1));
1230 create_integer_operand (&ops[2], INTVAL (op2));
1231 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1232 return;
1233 }
1234 #endif
1235
1236 /* Don't do anything with direct references to volatile memory, but
1237 generate code to handle other side effects. */
1238 if (!MEM_P (op0) && side_effects_p (op0))
1239 emit_insn (op0);
1240 }
1241
1242 /* Get a MEM rtx for expression EXP which is the address of an operand
1243 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1244 the maximum length of the block of memory that might be accessed or
1245 NULL if unknown. */
1246
1247 static rtx
1248 get_memory_rtx (tree exp, tree len)
1249 {
1250 tree orig_exp = exp;
1251 rtx addr, mem;
1252 HOST_WIDE_INT off;
1253
1254 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1255 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1256 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1257 exp = TREE_OPERAND (exp, 0);
1258
1259 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1260 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1261
1262 /* Get an expression we can use to find the attributes to assign to MEM.
1263 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1264 we can. First remove any nops. */
1265 while (CONVERT_EXPR_P (exp)
1266 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1267 exp = TREE_OPERAND (exp, 0);
1268
1269 off = 0;
1270 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1271 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1272 && host_integerp (TREE_OPERAND (exp, 1), 0)
1273 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1274 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1275 else if (TREE_CODE (exp) == ADDR_EXPR)
1276 exp = TREE_OPERAND (exp, 0);
1277 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1278 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1279 else
1280 exp = NULL;
1281
1282 /* Honor attributes derived from exp, except for the alias set
1283 (as builtin stringops may alias with anything) and the size
1284 (as stringops may access multiple array elements). */
1285 if (exp)
1286 {
1287 set_mem_attributes (mem, exp, 0);
1288
1289 if (off)
1290 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1291
1292 /* Allow the string and memory builtins to overflow from one
1293 field into another, see http://gcc.gnu.org/PR23561.
1294 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1295 memory accessed by the string or memory builtin will fit
1296 within the field. */
1297 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1298 {
1299 tree mem_expr = MEM_EXPR (mem);
1300 HOST_WIDE_INT offset = -1, length = -1;
1301 tree inner = exp;
1302
1303 while (TREE_CODE (inner) == ARRAY_REF
1304 || CONVERT_EXPR_P (inner)
1305 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1306 || TREE_CODE (inner) == SAVE_EXPR)
1307 inner = TREE_OPERAND (inner, 0);
1308
1309 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1310
1311 if (MEM_OFFSET_KNOWN_P (mem))
1312 offset = MEM_OFFSET (mem);
1313
1314 if (offset >= 0 && len && host_integerp (len, 0))
1315 length = tree_low_cst (len, 0);
1316
1317 while (TREE_CODE (inner) == COMPONENT_REF)
1318 {
1319 tree field = TREE_OPERAND (inner, 1);
1320 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1321 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1322
1323 /* Bitfields are generally not byte-addressable. */
1324 gcc_assert (!DECL_BIT_FIELD (field)
1325 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1326 % BITS_PER_UNIT) == 0
1327 && host_integerp (DECL_SIZE (field), 0)
1328 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1329 % BITS_PER_UNIT) == 0));
1330
1331 /* If we can prove that the memory starting at XEXP (mem, 0) and
1332 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1333 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1334 fields without DECL_SIZE_UNIT like flexible array members. */
1335 if (length >= 0
1336 && DECL_SIZE_UNIT (field)
1337 && host_integerp (DECL_SIZE_UNIT (field), 0))
1338 {
1339 HOST_WIDE_INT size
1340 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1341 if (offset <= size
1342 && length <= size
1343 && offset + length <= size)
1344 break;
1345 }
1346
1347 if (offset >= 0
1348 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1349 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1350 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1351 / BITS_PER_UNIT;
1352 else
1353 {
1354 offset = -1;
1355 length = -1;
1356 }
1357
1358 mem_expr = TREE_OPERAND (mem_expr, 0);
1359 inner = TREE_OPERAND (inner, 0);
1360 }
1361
1362 if (mem_expr == NULL)
1363 offset = -1;
1364 if (mem_expr != MEM_EXPR (mem))
1365 {
1366 set_mem_expr (mem, mem_expr);
1367 if (offset >= 0)
1368 set_mem_offset (mem, offset);
1369 else
1370 clear_mem_offset (mem);
1371 }
1372 }
1373 set_mem_alias_set (mem, 0);
1374 clear_mem_size (mem);
1375 }
1376
1377 return mem;
1378 }
1379 \f
1380 /* Built-in functions to perform an untyped call and return. */
1381
1382 #define apply_args_mode \
1383 (this_target_builtins->x_apply_args_mode)
1384 #define apply_result_mode \
1385 (this_target_builtins->x_apply_result_mode)
1386
1387 /* Return the size required for the block returned by __builtin_apply_args,
1388 and initialize apply_args_mode. */
1389
1390 static int
1391 apply_args_size (void)
1392 {
1393 static int size = -1;
1394 int align;
1395 unsigned int regno;
1396 enum machine_mode mode;
1397
1398 /* The values computed by this function never change. */
1399 if (size < 0)
1400 {
1401 /* The first value is the incoming arg-pointer. */
1402 size = GET_MODE_SIZE (Pmode);
1403
1404 /* The second value is the structure value address unless this is
1405 passed as an "invisible" first argument. */
1406 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1407 size += GET_MODE_SIZE (Pmode);
1408
1409 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1410 if (FUNCTION_ARG_REGNO_P (regno))
1411 {
1412 mode = targetm.calls.get_raw_arg_mode (regno);
1413
1414 gcc_assert (mode != VOIDmode);
1415
1416 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1417 if (size % align != 0)
1418 size = CEIL (size, align) * align;
1419 size += GET_MODE_SIZE (mode);
1420 apply_args_mode[regno] = mode;
1421 }
1422 else
1423 {
1424 apply_args_mode[regno] = VOIDmode;
1425 }
1426 }
1427 return size;
1428 }
1429
1430 /* Return the size required for the block returned by __builtin_apply,
1431 and initialize apply_result_mode. */
1432
1433 static int
1434 apply_result_size (void)
1435 {
1436 static int size = -1;
1437 int align, regno;
1438 enum machine_mode mode;
1439
1440 /* The values computed by this function never change. */
1441 if (size < 0)
1442 {
1443 size = 0;
1444
1445 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1446 if (targetm.calls.function_value_regno_p (regno))
1447 {
1448 mode = targetm.calls.get_raw_result_mode (regno);
1449
1450 gcc_assert (mode != VOIDmode);
1451
1452 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1453 if (size % align != 0)
1454 size = CEIL (size, align) * align;
1455 size += GET_MODE_SIZE (mode);
1456 apply_result_mode[regno] = mode;
1457 }
1458 else
1459 apply_result_mode[regno] = VOIDmode;
1460
1461 /* Allow targets that use untyped_call and untyped_return to override
1462 the size so that machine-specific information can be stored here. */
1463 #ifdef APPLY_RESULT_SIZE
1464 size = APPLY_RESULT_SIZE;
1465 #endif
1466 }
1467 return size;
1468 }
1469
1470 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1471 /* Create a vector describing the result block RESULT. If SAVEP is true,
1472 the result block is used to save the values; otherwise it is used to
1473 restore the values. */
1474
1475 static rtx
1476 result_vector (int savep, rtx result)
1477 {
1478 int regno, size, align, nelts;
1479 enum machine_mode mode;
1480 rtx reg, mem;
1481 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1482
1483 size = nelts = 0;
1484 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1485 if ((mode = apply_result_mode[regno]) != VOIDmode)
1486 {
1487 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1488 if (size % align != 0)
1489 size = CEIL (size, align) * align;
1490 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1491 mem = adjust_address (result, mode, size);
1492 savevec[nelts++] = (savep
1493 ? gen_rtx_SET (VOIDmode, mem, reg)
1494 : gen_rtx_SET (VOIDmode, reg, mem));
1495 size += GET_MODE_SIZE (mode);
1496 }
1497 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1498 }
1499 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1500
1501 /* Save the state required to perform an untyped call with the same
1502 arguments as were passed to the current function. */
1503
1504 static rtx
1505 expand_builtin_apply_args_1 (void)
1506 {
1507 rtx registers, tem;
1508 int size, align, regno;
1509 enum machine_mode mode;
1510 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1511
1512 /* Create a block where the arg-pointer, structure value address,
1513 and argument registers can be saved. */
1514 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1515
1516 /* Walk past the arg-pointer and structure value address. */
1517 size = GET_MODE_SIZE (Pmode);
1518 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1519 size += GET_MODE_SIZE (Pmode);
1520
1521 /* Save each register used in calling a function to the block. */
1522 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1523 if ((mode = apply_args_mode[regno]) != VOIDmode)
1524 {
1525 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1526 if (size % align != 0)
1527 size = CEIL (size, align) * align;
1528
1529 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1530
1531 emit_move_insn (adjust_address (registers, mode, size), tem);
1532 size += GET_MODE_SIZE (mode);
1533 }
1534
1535 /* Save the arg pointer to the block. */
1536 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1537 #ifdef STACK_GROWS_DOWNWARD
1538 /* We need the pointer as the caller actually passed them to us, not
1539 as we might have pretended they were passed. Make sure it's a valid
1540 operand, as emit_move_insn isn't expected to handle a PLUS. */
1541 tem
1542 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1543 NULL_RTX);
1544 #endif
1545 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1546
1547 size = GET_MODE_SIZE (Pmode);
1548
1549 /* Save the structure value address unless this is passed as an
1550 "invisible" first argument. */
1551 if (struct_incoming_value)
1552 {
1553 emit_move_insn (adjust_address (registers, Pmode, size),
1554 copy_to_reg (struct_incoming_value));
1555 size += GET_MODE_SIZE (Pmode);
1556 }
1557
1558 /* Return the address of the block. */
1559 return copy_addr_to_reg (XEXP (registers, 0));
1560 }
1561
1562 /* __builtin_apply_args returns block of memory allocated on
1563 the stack into which is stored the arg pointer, structure
1564 value address, static chain, and all the registers that might
1565 possibly be used in performing a function call. The code is
1566 moved to the start of the function so the incoming values are
1567 saved. */
1568
1569 static rtx
1570 expand_builtin_apply_args (void)
1571 {
1572 /* Don't do __builtin_apply_args more than once in a function.
1573 Save the result of the first call and reuse it. */
1574 if (apply_args_value != 0)
1575 return apply_args_value;
1576 {
1577 /* When this function is called, it means that registers must be
1578 saved on entry to this function. So we migrate the
1579 call to the first insn of this function. */
1580 rtx temp;
1581 rtx seq;
1582
1583 start_sequence ();
1584 temp = expand_builtin_apply_args_1 ();
1585 seq = get_insns ();
1586 end_sequence ();
1587
1588 apply_args_value = temp;
1589
1590 /* Put the insns after the NOTE that starts the function.
1591 If this is inside a start_sequence, make the outer-level insn
1592 chain current, so the code is placed at the start of the
1593 function. If internal_arg_pointer is a non-virtual pseudo,
1594 it needs to be placed after the function that initializes
1595 that pseudo. */
1596 push_topmost_sequence ();
1597 if (REG_P (crtl->args.internal_arg_pointer)
1598 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1599 emit_insn_before (seq, parm_birth_insn);
1600 else
1601 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1602 pop_topmost_sequence ();
1603 return temp;
1604 }
1605 }
1606
1607 /* Perform an untyped call and save the state required to perform an
1608 untyped return of whatever value was returned by the given function. */
1609
1610 static rtx
1611 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1612 {
1613 int size, align, regno;
1614 enum machine_mode mode;
1615 rtx incoming_args, result, reg, dest, src, call_insn;
1616 rtx old_stack_level = 0;
1617 rtx call_fusage = 0;
1618 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1619
1620 arguments = convert_memory_address (Pmode, arguments);
1621
1622 /* Create a block where the return registers can be saved. */
1623 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1624
1625 /* Fetch the arg pointer from the ARGUMENTS block. */
1626 incoming_args = gen_reg_rtx (Pmode);
1627 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1628 #ifndef STACK_GROWS_DOWNWARD
1629 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1630 incoming_args, 0, OPTAB_LIB_WIDEN);
1631 #endif
1632
1633 /* Push a new argument block and copy the arguments. Do not allow
1634 the (potential) memcpy call below to interfere with our stack
1635 manipulations. */
1636 do_pending_stack_adjust ();
1637 NO_DEFER_POP;
1638
1639 /* Save the stack with nonlocal if available. */
1640 #ifdef HAVE_save_stack_nonlocal
1641 if (HAVE_save_stack_nonlocal)
1642 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1643 else
1644 #endif
1645 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1646
1647 /* Allocate a block of memory onto the stack and copy the memory
1648 arguments to the outgoing arguments address. We can pass TRUE
1649 as the 4th argument because we just saved the stack pointer
1650 and will restore it right after the call. */
1651 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1652
1653 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1654 may have already set current_function_calls_alloca to true.
1655 current_function_calls_alloca won't be set if argsize is zero,
1656 so we have to guarantee need_drap is true here. */
1657 if (SUPPORTS_STACK_ALIGNMENT)
1658 crtl->need_drap = true;
1659
1660 dest = virtual_outgoing_args_rtx;
1661 #ifndef STACK_GROWS_DOWNWARD
1662 if (CONST_INT_P (argsize))
1663 dest = plus_constant (dest, -INTVAL (argsize));
1664 else
1665 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1666 #endif
1667 dest = gen_rtx_MEM (BLKmode, dest);
1668 set_mem_align (dest, PARM_BOUNDARY);
1669 src = gen_rtx_MEM (BLKmode, incoming_args);
1670 set_mem_align (src, PARM_BOUNDARY);
1671 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1672
1673 /* Refer to the argument block. */
1674 apply_args_size ();
1675 arguments = gen_rtx_MEM (BLKmode, arguments);
1676 set_mem_align (arguments, PARM_BOUNDARY);
1677
1678 /* Walk past the arg-pointer and structure value address. */
1679 size = GET_MODE_SIZE (Pmode);
1680 if (struct_value)
1681 size += GET_MODE_SIZE (Pmode);
1682
1683 /* Restore each of the registers previously saved. Make USE insns
1684 for each of these registers for use in making the call. */
1685 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1686 if ((mode = apply_args_mode[regno]) != VOIDmode)
1687 {
1688 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1689 if (size % align != 0)
1690 size = CEIL (size, align) * align;
1691 reg = gen_rtx_REG (mode, regno);
1692 emit_move_insn (reg, adjust_address (arguments, mode, size));
1693 use_reg (&call_fusage, reg);
1694 size += GET_MODE_SIZE (mode);
1695 }
1696
1697 /* Restore the structure value address unless this is passed as an
1698 "invisible" first argument. */
1699 size = GET_MODE_SIZE (Pmode);
1700 if (struct_value)
1701 {
1702 rtx value = gen_reg_rtx (Pmode);
1703 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1704 emit_move_insn (struct_value, value);
1705 if (REG_P (struct_value))
1706 use_reg (&call_fusage, struct_value);
1707 size += GET_MODE_SIZE (Pmode);
1708 }
1709
1710 /* All arguments and registers used for the call are set up by now! */
1711 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1712
1713 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1714 and we don't want to load it into a register as an optimization,
1715 because prepare_call_address already did it if it should be done. */
1716 if (GET_CODE (function) != SYMBOL_REF)
1717 function = memory_address (FUNCTION_MODE, function);
1718
1719 /* Generate the actual call instruction and save the return value. */
1720 #ifdef HAVE_untyped_call
1721 if (HAVE_untyped_call)
1722 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1723 result, result_vector (1, result)));
1724 else
1725 #endif
1726 #ifdef HAVE_call_value
1727 if (HAVE_call_value)
1728 {
1729 rtx valreg = 0;
1730
1731 /* Locate the unique return register. It is not possible to
1732 express a call that sets more than one return register using
1733 call_value; use untyped_call for that. In fact, untyped_call
1734 only needs to save the return registers in the given block. */
1735 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1736 if ((mode = apply_result_mode[regno]) != VOIDmode)
1737 {
1738 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1739
1740 valreg = gen_rtx_REG (mode, regno);
1741 }
1742
1743 emit_call_insn (GEN_CALL_VALUE (valreg,
1744 gen_rtx_MEM (FUNCTION_MODE, function),
1745 const0_rtx, NULL_RTX, const0_rtx));
1746
1747 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1748 }
1749 else
1750 #endif
1751 gcc_unreachable ();
1752
1753 /* Find the CALL insn we just emitted, and attach the register usage
1754 information. */
1755 call_insn = last_call_insn ();
1756 add_function_usage_to (call_insn, call_fusage);
1757
1758 /* Restore the stack. */
1759 #ifdef HAVE_save_stack_nonlocal
1760 if (HAVE_save_stack_nonlocal)
1761 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1762 else
1763 #endif
1764 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1765 fixup_args_size_notes (call_insn, get_last_insn(), 0);
1766
1767 OK_DEFER_POP;
1768
1769 /* Return the address of the result block. */
1770 result = copy_addr_to_reg (XEXP (result, 0));
1771 return convert_memory_address (ptr_mode, result);
1772 }
1773
1774 /* Perform an untyped return. */
1775
1776 static void
1777 expand_builtin_return (rtx result)
1778 {
1779 int size, align, regno;
1780 enum machine_mode mode;
1781 rtx reg;
1782 rtx call_fusage = 0;
1783
1784 result = convert_memory_address (Pmode, result);
1785
1786 apply_result_size ();
1787 result = gen_rtx_MEM (BLKmode, result);
1788
1789 #ifdef HAVE_untyped_return
1790 if (HAVE_untyped_return)
1791 {
1792 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1793 emit_barrier ();
1794 return;
1795 }
1796 #endif
1797
1798 /* Restore the return value and note that each value is used. */
1799 size = 0;
1800 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1801 if ((mode = apply_result_mode[regno]) != VOIDmode)
1802 {
1803 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1804 if (size % align != 0)
1805 size = CEIL (size, align) * align;
1806 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1807 emit_move_insn (reg, adjust_address (result, mode, size));
1808
1809 push_to_sequence (call_fusage);
1810 emit_use (reg);
1811 call_fusage = get_insns ();
1812 end_sequence ();
1813 size += GET_MODE_SIZE (mode);
1814 }
1815
1816 /* Put the USE insns before the return. */
1817 emit_insn (call_fusage);
1818
1819 /* Return whatever values was restored by jumping directly to the end
1820 of the function. */
1821 expand_naked_return ();
1822 }
1823
1824 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1825
1826 static enum type_class
1827 type_to_class (tree type)
1828 {
1829 switch (TREE_CODE (type))
1830 {
1831 case VOID_TYPE: return void_type_class;
1832 case INTEGER_TYPE: return integer_type_class;
1833 case ENUMERAL_TYPE: return enumeral_type_class;
1834 case BOOLEAN_TYPE: return boolean_type_class;
1835 case POINTER_TYPE: return pointer_type_class;
1836 case REFERENCE_TYPE: return reference_type_class;
1837 case OFFSET_TYPE: return offset_type_class;
1838 case REAL_TYPE: return real_type_class;
1839 case COMPLEX_TYPE: return complex_type_class;
1840 case FUNCTION_TYPE: return function_type_class;
1841 case METHOD_TYPE: return method_type_class;
1842 case RECORD_TYPE: return record_type_class;
1843 case UNION_TYPE:
1844 case QUAL_UNION_TYPE: return union_type_class;
1845 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1846 ? string_type_class : array_type_class);
1847 case LANG_TYPE: return lang_type_class;
1848 default: return no_type_class;
1849 }
1850 }
1851
1852 /* Expand a call EXP to __builtin_classify_type. */
1853
1854 static rtx
1855 expand_builtin_classify_type (tree exp)
1856 {
1857 if (call_expr_nargs (exp))
1858 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1859 return GEN_INT (no_type_class);
1860 }
1861
1862 /* This helper macro, meant to be used in mathfn_built_in below,
1863 determines which among a set of three builtin math functions is
1864 appropriate for a given type mode. The `F' and `L' cases are
1865 automatically generated from the `double' case. */
1866 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1867 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1868 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1869 fcodel = BUILT_IN_MATHFN##L ; break;
1870 /* Similar to above, but appends _R after any F/L suffix. */
1871 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1872 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1873 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1874 fcodel = BUILT_IN_MATHFN##L_R ; break;
1875
1876 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1877 if available. If IMPLICIT is true use the implicit builtin declaration,
1878 otherwise use the explicit declaration. If we can't do the conversion,
1879 return zero. */
1880
1881 static tree
1882 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1883 {
1884 enum built_in_function fcode, fcodef, fcodel, fcode2;
1885
1886 switch (fn)
1887 {
1888 CASE_MATHFN (BUILT_IN_ACOS)
1889 CASE_MATHFN (BUILT_IN_ACOSH)
1890 CASE_MATHFN (BUILT_IN_ASIN)
1891 CASE_MATHFN (BUILT_IN_ASINH)
1892 CASE_MATHFN (BUILT_IN_ATAN)
1893 CASE_MATHFN (BUILT_IN_ATAN2)
1894 CASE_MATHFN (BUILT_IN_ATANH)
1895 CASE_MATHFN (BUILT_IN_CBRT)
1896 CASE_MATHFN (BUILT_IN_CEIL)
1897 CASE_MATHFN (BUILT_IN_CEXPI)
1898 CASE_MATHFN (BUILT_IN_COPYSIGN)
1899 CASE_MATHFN (BUILT_IN_COS)
1900 CASE_MATHFN (BUILT_IN_COSH)
1901 CASE_MATHFN (BUILT_IN_DREM)
1902 CASE_MATHFN (BUILT_IN_ERF)
1903 CASE_MATHFN (BUILT_IN_ERFC)
1904 CASE_MATHFN (BUILT_IN_EXP)
1905 CASE_MATHFN (BUILT_IN_EXP10)
1906 CASE_MATHFN (BUILT_IN_EXP2)
1907 CASE_MATHFN (BUILT_IN_EXPM1)
1908 CASE_MATHFN (BUILT_IN_FABS)
1909 CASE_MATHFN (BUILT_IN_FDIM)
1910 CASE_MATHFN (BUILT_IN_FLOOR)
1911 CASE_MATHFN (BUILT_IN_FMA)
1912 CASE_MATHFN (BUILT_IN_FMAX)
1913 CASE_MATHFN (BUILT_IN_FMIN)
1914 CASE_MATHFN (BUILT_IN_FMOD)
1915 CASE_MATHFN (BUILT_IN_FREXP)
1916 CASE_MATHFN (BUILT_IN_GAMMA)
1917 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1918 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1919 CASE_MATHFN (BUILT_IN_HYPOT)
1920 CASE_MATHFN (BUILT_IN_ILOGB)
1921 CASE_MATHFN (BUILT_IN_ICEIL)
1922 CASE_MATHFN (BUILT_IN_IFLOOR)
1923 CASE_MATHFN (BUILT_IN_INF)
1924 CASE_MATHFN (BUILT_IN_IRINT)
1925 CASE_MATHFN (BUILT_IN_IROUND)
1926 CASE_MATHFN (BUILT_IN_ISINF)
1927 CASE_MATHFN (BUILT_IN_J0)
1928 CASE_MATHFN (BUILT_IN_J1)
1929 CASE_MATHFN (BUILT_IN_JN)
1930 CASE_MATHFN (BUILT_IN_LCEIL)
1931 CASE_MATHFN (BUILT_IN_LDEXP)
1932 CASE_MATHFN (BUILT_IN_LFLOOR)
1933 CASE_MATHFN (BUILT_IN_LGAMMA)
1934 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1935 CASE_MATHFN (BUILT_IN_LLCEIL)
1936 CASE_MATHFN (BUILT_IN_LLFLOOR)
1937 CASE_MATHFN (BUILT_IN_LLRINT)
1938 CASE_MATHFN (BUILT_IN_LLROUND)
1939 CASE_MATHFN (BUILT_IN_LOG)
1940 CASE_MATHFN (BUILT_IN_LOG10)
1941 CASE_MATHFN (BUILT_IN_LOG1P)
1942 CASE_MATHFN (BUILT_IN_LOG2)
1943 CASE_MATHFN (BUILT_IN_LOGB)
1944 CASE_MATHFN (BUILT_IN_LRINT)
1945 CASE_MATHFN (BUILT_IN_LROUND)
1946 CASE_MATHFN (BUILT_IN_MODF)
1947 CASE_MATHFN (BUILT_IN_NAN)
1948 CASE_MATHFN (BUILT_IN_NANS)
1949 CASE_MATHFN (BUILT_IN_NEARBYINT)
1950 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1951 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1952 CASE_MATHFN (BUILT_IN_POW)
1953 CASE_MATHFN (BUILT_IN_POWI)
1954 CASE_MATHFN (BUILT_IN_POW10)
1955 CASE_MATHFN (BUILT_IN_REMAINDER)
1956 CASE_MATHFN (BUILT_IN_REMQUO)
1957 CASE_MATHFN (BUILT_IN_RINT)
1958 CASE_MATHFN (BUILT_IN_ROUND)
1959 CASE_MATHFN (BUILT_IN_SCALB)
1960 CASE_MATHFN (BUILT_IN_SCALBLN)
1961 CASE_MATHFN (BUILT_IN_SCALBN)
1962 CASE_MATHFN (BUILT_IN_SIGNBIT)
1963 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1964 CASE_MATHFN (BUILT_IN_SIN)
1965 CASE_MATHFN (BUILT_IN_SINCOS)
1966 CASE_MATHFN (BUILT_IN_SINH)
1967 CASE_MATHFN (BUILT_IN_SQRT)
1968 CASE_MATHFN (BUILT_IN_TAN)
1969 CASE_MATHFN (BUILT_IN_TANH)
1970 CASE_MATHFN (BUILT_IN_TGAMMA)
1971 CASE_MATHFN (BUILT_IN_TRUNC)
1972 CASE_MATHFN (BUILT_IN_Y0)
1973 CASE_MATHFN (BUILT_IN_Y1)
1974 CASE_MATHFN (BUILT_IN_YN)
1975
1976 default:
1977 return NULL_TREE;
1978 }
1979
1980 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1981 fcode2 = fcode;
1982 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1983 fcode2 = fcodef;
1984 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1985 fcode2 = fcodel;
1986 else
1987 return NULL_TREE;
1988
1989 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1990 return NULL_TREE;
1991
1992 return builtin_decl_explicit (fcode2);
1993 }
1994
1995 /* Like mathfn_built_in_1(), but always use the implicit array. */
1996
1997 tree
1998 mathfn_built_in (tree type, enum built_in_function fn)
1999 {
2000 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2001 }
2002
2003 /* If errno must be maintained, expand the RTL to check if the result,
2004 TARGET, of a built-in function call, EXP, is NaN, and if so set
2005 errno to EDOM. */
2006
2007 static void
2008 expand_errno_check (tree exp, rtx target)
2009 {
2010 rtx lab = gen_label_rtx ();
2011
2012 /* Test the result; if it is NaN, set errno=EDOM because
2013 the argument was not in the domain. */
2014 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
2015 NULL_RTX, NULL_RTX, lab,
2016 /* The jump is very likely. */
2017 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
2018
2019 #ifdef TARGET_EDOM
2020 /* If this built-in doesn't throw an exception, set errno directly. */
2021 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
2022 {
2023 #ifdef GEN_ERRNO_RTX
2024 rtx errno_rtx = GEN_ERRNO_RTX;
2025 #else
2026 rtx errno_rtx
2027 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
2028 #endif
2029 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
2030 emit_label (lab);
2031 return;
2032 }
2033 #endif
2034
2035 /* Make sure the library call isn't expanded as a tail call. */
2036 CALL_EXPR_TAILCALL (exp) = 0;
2037
2038 /* We can't set errno=EDOM directly; let the library call do it.
2039 Pop the arguments right away in case the call gets deleted. */
2040 NO_DEFER_POP;
2041 expand_call (exp, target, 0);
2042 OK_DEFER_POP;
2043 emit_label (lab);
2044 }
2045
2046 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2047 Return NULL_RTX if a normal call should be emitted rather than expanding
2048 the function in-line. EXP is the expression that is a call to the builtin
2049 function; if convenient, the result should be placed in TARGET.
2050 SUBTARGET may be used as the target for computing one of EXP's operands. */
2051
2052 static rtx
2053 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2054 {
2055 optab builtin_optab;
2056 rtx op0, insns;
2057 tree fndecl = get_callee_fndecl (exp);
2058 enum machine_mode mode;
2059 bool errno_set = false;
2060 tree arg;
2061
2062 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2063 return NULL_RTX;
2064
2065 arg = CALL_EXPR_ARG (exp, 0);
2066
2067 switch (DECL_FUNCTION_CODE (fndecl))
2068 {
2069 CASE_FLT_FN (BUILT_IN_SQRT):
2070 errno_set = ! tree_expr_nonnegative_p (arg);
2071 builtin_optab = sqrt_optab;
2072 break;
2073 CASE_FLT_FN (BUILT_IN_EXP):
2074 errno_set = true; builtin_optab = exp_optab; break;
2075 CASE_FLT_FN (BUILT_IN_EXP10):
2076 CASE_FLT_FN (BUILT_IN_POW10):
2077 errno_set = true; builtin_optab = exp10_optab; break;
2078 CASE_FLT_FN (BUILT_IN_EXP2):
2079 errno_set = true; builtin_optab = exp2_optab; break;
2080 CASE_FLT_FN (BUILT_IN_EXPM1):
2081 errno_set = true; builtin_optab = expm1_optab; break;
2082 CASE_FLT_FN (BUILT_IN_LOGB):
2083 errno_set = true; builtin_optab = logb_optab; break;
2084 CASE_FLT_FN (BUILT_IN_LOG):
2085 errno_set = true; builtin_optab = log_optab; break;
2086 CASE_FLT_FN (BUILT_IN_LOG10):
2087 errno_set = true; builtin_optab = log10_optab; break;
2088 CASE_FLT_FN (BUILT_IN_LOG2):
2089 errno_set = true; builtin_optab = log2_optab; break;
2090 CASE_FLT_FN (BUILT_IN_LOG1P):
2091 errno_set = true; builtin_optab = log1p_optab; break;
2092 CASE_FLT_FN (BUILT_IN_ASIN):
2093 builtin_optab = asin_optab; break;
2094 CASE_FLT_FN (BUILT_IN_ACOS):
2095 builtin_optab = acos_optab; break;
2096 CASE_FLT_FN (BUILT_IN_TAN):
2097 builtin_optab = tan_optab; break;
2098 CASE_FLT_FN (BUILT_IN_ATAN):
2099 builtin_optab = atan_optab; break;
2100 CASE_FLT_FN (BUILT_IN_FLOOR):
2101 builtin_optab = floor_optab; break;
2102 CASE_FLT_FN (BUILT_IN_CEIL):
2103 builtin_optab = ceil_optab; break;
2104 CASE_FLT_FN (BUILT_IN_TRUNC):
2105 builtin_optab = btrunc_optab; break;
2106 CASE_FLT_FN (BUILT_IN_ROUND):
2107 builtin_optab = round_optab; break;
2108 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2109 builtin_optab = nearbyint_optab;
2110 if (flag_trapping_math)
2111 break;
2112 /* Else fallthrough and expand as rint. */
2113 CASE_FLT_FN (BUILT_IN_RINT):
2114 builtin_optab = rint_optab; break;
2115 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2116 builtin_optab = significand_optab; break;
2117 default:
2118 gcc_unreachable ();
2119 }
2120
2121 /* Make a suitable register to place result in. */
2122 mode = TYPE_MODE (TREE_TYPE (exp));
2123
2124 if (! flag_errno_math || ! HONOR_NANS (mode))
2125 errno_set = false;
2126
2127 /* Before working hard, check whether the instruction is available. */
2128 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2129 && (!errno_set || !optimize_insn_for_size_p ()))
2130 {
2131 target = gen_reg_rtx (mode);
2132
2133 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2134 need to expand the argument again. This way, we will not perform
2135 side-effects more the once. */
2136 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2137
2138 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2139
2140 start_sequence ();
2141
2142 /* Compute into TARGET.
2143 Set TARGET to wherever the result comes back. */
2144 target = expand_unop (mode, builtin_optab, op0, target, 0);
2145
2146 if (target != 0)
2147 {
2148 if (errno_set)
2149 expand_errno_check (exp, target);
2150
2151 /* Output the entire sequence. */
2152 insns = get_insns ();
2153 end_sequence ();
2154 emit_insn (insns);
2155 return target;
2156 }
2157
2158 /* If we were unable to expand via the builtin, stop the sequence
2159 (without outputting the insns) and call to the library function
2160 with the stabilized argument list. */
2161 end_sequence ();
2162 }
2163
2164 return expand_call (exp, target, target == const0_rtx);
2165 }
2166
2167 /* Expand a call to the builtin binary math functions (pow and atan2).
2168 Return NULL_RTX if a normal call should be emitted rather than expanding the
2169 function in-line. EXP is the expression that is a call to the builtin
2170 function; if convenient, the result should be placed in TARGET.
2171 SUBTARGET may be used as the target for computing one of EXP's
2172 operands. */
2173
2174 static rtx
2175 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2176 {
2177 optab builtin_optab;
2178 rtx op0, op1, insns;
2179 int op1_type = REAL_TYPE;
2180 tree fndecl = get_callee_fndecl (exp);
2181 tree arg0, arg1;
2182 enum machine_mode mode;
2183 bool errno_set = true;
2184
2185 switch (DECL_FUNCTION_CODE (fndecl))
2186 {
2187 CASE_FLT_FN (BUILT_IN_SCALBN):
2188 CASE_FLT_FN (BUILT_IN_SCALBLN):
2189 CASE_FLT_FN (BUILT_IN_LDEXP):
2190 op1_type = INTEGER_TYPE;
2191 default:
2192 break;
2193 }
2194
2195 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2196 return NULL_RTX;
2197
2198 arg0 = CALL_EXPR_ARG (exp, 0);
2199 arg1 = CALL_EXPR_ARG (exp, 1);
2200
2201 switch (DECL_FUNCTION_CODE (fndecl))
2202 {
2203 CASE_FLT_FN (BUILT_IN_POW):
2204 builtin_optab = pow_optab; break;
2205 CASE_FLT_FN (BUILT_IN_ATAN2):
2206 builtin_optab = atan2_optab; break;
2207 CASE_FLT_FN (BUILT_IN_SCALB):
2208 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2209 return 0;
2210 builtin_optab = scalb_optab; break;
2211 CASE_FLT_FN (BUILT_IN_SCALBN):
2212 CASE_FLT_FN (BUILT_IN_SCALBLN):
2213 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2214 return 0;
2215 /* Fall through... */
2216 CASE_FLT_FN (BUILT_IN_LDEXP):
2217 builtin_optab = ldexp_optab; break;
2218 CASE_FLT_FN (BUILT_IN_FMOD):
2219 builtin_optab = fmod_optab; break;
2220 CASE_FLT_FN (BUILT_IN_REMAINDER):
2221 CASE_FLT_FN (BUILT_IN_DREM):
2222 builtin_optab = remainder_optab; break;
2223 default:
2224 gcc_unreachable ();
2225 }
2226
2227 /* Make a suitable register to place result in. */
2228 mode = TYPE_MODE (TREE_TYPE (exp));
2229
2230 /* Before working hard, check whether the instruction is available. */
2231 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2232 return NULL_RTX;
2233
2234 target = gen_reg_rtx (mode);
2235
2236 if (! flag_errno_math || ! HONOR_NANS (mode))
2237 errno_set = false;
2238
2239 if (errno_set && optimize_insn_for_size_p ())
2240 return 0;
2241
2242 /* Always stabilize the argument list. */
2243 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2244 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2245
2246 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2247 op1 = expand_normal (arg1);
2248
2249 start_sequence ();
2250
2251 /* Compute into TARGET.
2252 Set TARGET to wherever the result comes back. */
2253 target = expand_binop (mode, builtin_optab, op0, op1,
2254 target, 0, OPTAB_DIRECT);
2255
2256 /* If we were unable to expand via the builtin, stop the sequence
2257 (without outputting the insns) and call to the library function
2258 with the stabilized argument list. */
2259 if (target == 0)
2260 {
2261 end_sequence ();
2262 return expand_call (exp, target, target == const0_rtx);
2263 }
2264
2265 if (errno_set)
2266 expand_errno_check (exp, target);
2267
2268 /* Output the entire sequence. */
2269 insns = get_insns ();
2270 end_sequence ();
2271 emit_insn (insns);
2272
2273 return target;
2274 }
2275
2276 /* Expand a call to the builtin trinary math functions (fma).
2277 Return NULL_RTX if a normal call should be emitted rather than expanding the
2278 function in-line. EXP is the expression that is a call to the builtin
2279 function; if convenient, the result should be placed in TARGET.
2280 SUBTARGET may be used as the target for computing one of EXP's
2281 operands. */
2282
2283 static rtx
2284 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2285 {
2286 optab builtin_optab;
2287 rtx op0, op1, op2, insns;
2288 tree fndecl = get_callee_fndecl (exp);
2289 tree arg0, arg1, arg2;
2290 enum machine_mode mode;
2291
2292 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2293 return NULL_RTX;
2294
2295 arg0 = CALL_EXPR_ARG (exp, 0);
2296 arg1 = CALL_EXPR_ARG (exp, 1);
2297 arg2 = CALL_EXPR_ARG (exp, 2);
2298
2299 switch (DECL_FUNCTION_CODE (fndecl))
2300 {
2301 CASE_FLT_FN (BUILT_IN_FMA):
2302 builtin_optab = fma_optab; break;
2303 default:
2304 gcc_unreachable ();
2305 }
2306
2307 /* Make a suitable register to place result in. */
2308 mode = TYPE_MODE (TREE_TYPE (exp));
2309
2310 /* Before working hard, check whether the instruction is available. */
2311 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2312 return NULL_RTX;
2313
2314 target = gen_reg_rtx (mode);
2315
2316 /* Always stabilize the argument list. */
2317 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2318 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2319 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2320
2321 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2322 op1 = expand_normal (arg1);
2323 op2 = expand_normal (arg2);
2324
2325 start_sequence ();
2326
2327 /* Compute into TARGET.
2328 Set TARGET to wherever the result comes back. */
2329 target = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2330 target, 0);
2331
2332 /* If we were unable to expand via the builtin, stop the sequence
2333 (without outputting the insns) and call to the library function
2334 with the stabilized argument list. */
2335 if (target == 0)
2336 {
2337 end_sequence ();
2338 return expand_call (exp, target, target == const0_rtx);
2339 }
2340
2341 /* Output the entire sequence. */
2342 insns = get_insns ();
2343 end_sequence ();
2344 emit_insn (insns);
2345
2346 return target;
2347 }
2348
2349 /* Expand a call to the builtin sin and cos math functions.
2350 Return NULL_RTX if a normal call should be emitted rather than expanding the
2351 function in-line. EXP is the expression that is a call to the builtin
2352 function; if convenient, the result should be placed in TARGET.
2353 SUBTARGET may be used as the target for computing one of EXP's
2354 operands. */
2355
2356 static rtx
2357 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2358 {
2359 optab builtin_optab;
2360 rtx op0, insns;
2361 tree fndecl = get_callee_fndecl (exp);
2362 enum machine_mode mode;
2363 tree arg;
2364
2365 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2366 return NULL_RTX;
2367
2368 arg = CALL_EXPR_ARG (exp, 0);
2369
2370 switch (DECL_FUNCTION_CODE (fndecl))
2371 {
2372 CASE_FLT_FN (BUILT_IN_SIN):
2373 CASE_FLT_FN (BUILT_IN_COS):
2374 builtin_optab = sincos_optab; break;
2375 default:
2376 gcc_unreachable ();
2377 }
2378
2379 /* Make a suitable register to place result in. */
2380 mode = TYPE_MODE (TREE_TYPE (exp));
2381
2382 /* Check if sincos insn is available, otherwise fallback
2383 to sin or cos insn. */
2384 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2385 switch (DECL_FUNCTION_CODE (fndecl))
2386 {
2387 CASE_FLT_FN (BUILT_IN_SIN):
2388 builtin_optab = sin_optab; break;
2389 CASE_FLT_FN (BUILT_IN_COS):
2390 builtin_optab = cos_optab; break;
2391 default:
2392 gcc_unreachable ();
2393 }
2394
2395 /* Before working hard, check whether the instruction is available. */
2396 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2397 {
2398 target = gen_reg_rtx (mode);
2399
2400 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2401 need to expand the argument again. This way, we will not perform
2402 side-effects more the once. */
2403 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2404
2405 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2406
2407 start_sequence ();
2408
2409 /* Compute into TARGET.
2410 Set TARGET to wherever the result comes back. */
2411 if (builtin_optab == sincos_optab)
2412 {
2413 int result;
2414
2415 switch (DECL_FUNCTION_CODE (fndecl))
2416 {
2417 CASE_FLT_FN (BUILT_IN_SIN):
2418 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2419 break;
2420 CASE_FLT_FN (BUILT_IN_COS):
2421 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2422 break;
2423 default:
2424 gcc_unreachable ();
2425 }
2426 gcc_assert (result);
2427 }
2428 else
2429 {
2430 target = expand_unop (mode, builtin_optab, op0, target, 0);
2431 }
2432
2433 if (target != 0)
2434 {
2435 /* Output the entire sequence. */
2436 insns = get_insns ();
2437 end_sequence ();
2438 emit_insn (insns);
2439 return target;
2440 }
2441
2442 /* If we were unable to expand via the builtin, stop the sequence
2443 (without outputting the insns) and call to the library function
2444 with the stabilized argument list. */
2445 end_sequence ();
2446 }
2447
2448 target = expand_call (exp, target, target == const0_rtx);
2449
2450 return target;
2451 }
2452
2453 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2454 return an RTL instruction code that implements the functionality.
2455 If that isn't possible or available return CODE_FOR_nothing. */
2456
2457 static enum insn_code
2458 interclass_mathfn_icode (tree arg, tree fndecl)
2459 {
2460 bool errno_set = false;
2461 optab builtin_optab = 0;
2462 enum machine_mode mode;
2463
2464 switch (DECL_FUNCTION_CODE (fndecl))
2465 {
2466 CASE_FLT_FN (BUILT_IN_ILOGB):
2467 errno_set = true; builtin_optab = ilogb_optab; break;
2468 CASE_FLT_FN (BUILT_IN_ISINF):
2469 builtin_optab = isinf_optab; break;
2470 case BUILT_IN_ISNORMAL:
2471 case BUILT_IN_ISFINITE:
2472 CASE_FLT_FN (BUILT_IN_FINITE):
2473 case BUILT_IN_FINITED32:
2474 case BUILT_IN_FINITED64:
2475 case BUILT_IN_FINITED128:
2476 case BUILT_IN_ISINFD32:
2477 case BUILT_IN_ISINFD64:
2478 case BUILT_IN_ISINFD128:
2479 /* These builtins have no optabs (yet). */
2480 break;
2481 default:
2482 gcc_unreachable ();
2483 }
2484
2485 /* There's no easy way to detect the case we need to set EDOM. */
2486 if (flag_errno_math && errno_set)
2487 return CODE_FOR_nothing;
2488
2489 /* Optab mode depends on the mode of the input argument. */
2490 mode = TYPE_MODE (TREE_TYPE (arg));
2491
2492 if (builtin_optab)
2493 return optab_handler (builtin_optab, mode);
2494 return CODE_FOR_nothing;
2495 }
2496
2497 /* Expand a call to one of the builtin math functions that operate on
2498 floating point argument and output an integer result (ilogb, isinf,
2499 isnan, etc).
2500 Return 0 if a normal call should be emitted rather than expanding the
2501 function in-line. EXP is the expression that is a call to the builtin
2502 function; if convenient, the result should be placed in TARGET. */
2503
2504 static rtx
2505 expand_builtin_interclass_mathfn (tree exp, rtx target)
2506 {
2507 enum insn_code icode = CODE_FOR_nothing;
2508 rtx op0;
2509 tree fndecl = get_callee_fndecl (exp);
2510 enum machine_mode mode;
2511 tree arg;
2512
2513 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2514 return NULL_RTX;
2515
2516 arg = CALL_EXPR_ARG (exp, 0);
2517 icode = interclass_mathfn_icode (arg, fndecl);
2518 mode = TYPE_MODE (TREE_TYPE (arg));
2519
2520 if (icode != CODE_FOR_nothing)
2521 {
2522 struct expand_operand ops[1];
2523 rtx last = get_last_insn ();
2524 tree orig_arg = arg;
2525
2526 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2527 need to expand the argument again. This way, we will not perform
2528 side-effects more the once. */
2529 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2530
2531 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2532
2533 if (mode != GET_MODE (op0))
2534 op0 = convert_to_mode (mode, op0, 0);
2535
2536 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2537 if (maybe_legitimize_operands (icode, 0, 1, ops)
2538 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2539 return ops[0].value;
2540
2541 delete_insns_since (last);
2542 CALL_EXPR_ARG (exp, 0) = orig_arg;
2543 }
2544
2545 return NULL_RTX;
2546 }
2547
2548 /* Expand a call to the builtin sincos math function.
2549 Return NULL_RTX if a normal call should be emitted rather than expanding the
2550 function in-line. EXP is the expression that is a call to the builtin
2551 function. */
2552
2553 static rtx
2554 expand_builtin_sincos (tree exp)
2555 {
2556 rtx op0, op1, op2, target1, target2;
2557 enum machine_mode mode;
2558 tree arg, sinp, cosp;
2559 int result;
2560 location_t loc = EXPR_LOCATION (exp);
2561 tree alias_type, alias_off;
2562
2563 if (!validate_arglist (exp, REAL_TYPE,
2564 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2565 return NULL_RTX;
2566
2567 arg = CALL_EXPR_ARG (exp, 0);
2568 sinp = CALL_EXPR_ARG (exp, 1);
2569 cosp = CALL_EXPR_ARG (exp, 2);
2570
2571 /* Make a suitable register to place result in. */
2572 mode = TYPE_MODE (TREE_TYPE (arg));
2573
2574 /* Check if sincos insn is available, otherwise emit the call. */
2575 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2576 return NULL_RTX;
2577
2578 target1 = gen_reg_rtx (mode);
2579 target2 = gen_reg_rtx (mode);
2580
2581 op0 = expand_normal (arg);
2582 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2583 alias_off = build_int_cst (alias_type, 0);
2584 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2585 sinp, alias_off));
2586 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2587 cosp, alias_off));
2588
2589 /* Compute into target1 and target2.
2590 Set TARGET to wherever the result comes back. */
2591 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2592 gcc_assert (result);
2593
2594 /* Move target1 and target2 to the memory locations indicated
2595 by op1 and op2. */
2596 emit_move_insn (op1, target1);
2597 emit_move_insn (op2, target2);
2598
2599 return const0_rtx;
2600 }
2601
2602 /* Expand a call to the internal cexpi builtin to the sincos math function.
2603 EXP is the expression that is a call to the builtin function; if convenient,
2604 the result should be placed in TARGET. */
2605
2606 static rtx
2607 expand_builtin_cexpi (tree exp, rtx target)
2608 {
2609 tree fndecl = get_callee_fndecl (exp);
2610 tree arg, type;
2611 enum machine_mode mode;
2612 rtx op0, op1, op2;
2613 location_t loc = EXPR_LOCATION (exp);
2614
2615 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2616 return NULL_RTX;
2617
2618 arg = CALL_EXPR_ARG (exp, 0);
2619 type = TREE_TYPE (arg);
2620 mode = TYPE_MODE (TREE_TYPE (arg));
2621
2622 /* Try expanding via a sincos optab, fall back to emitting a libcall
2623 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2624 is only generated from sincos, cexp or if we have either of them. */
2625 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2626 {
2627 op1 = gen_reg_rtx (mode);
2628 op2 = gen_reg_rtx (mode);
2629
2630 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2631
2632 /* Compute into op1 and op2. */
2633 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2634 }
2635 else if (TARGET_HAS_SINCOS)
2636 {
2637 tree call, fn = NULL_TREE;
2638 tree top1, top2;
2639 rtx op1a, op2a;
2640
2641 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2642 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2643 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2644 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2645 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2646 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2647 else
2648 gcc_unreachable ();
2649
2650 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2651 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2652 op1a = copy_addr_to_reg (XEXP (op1, 0));
2653 op2a = copy_addr_to_reg (XEXP (op2, 0));
2654 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2655 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2656
2657 /* Make sure not to fold the sincos call again. */
2658 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2659 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2660 call, 3, arg, top1, top2));
2661 }
2662 else
2663 {
2664 tree call, fn = NULL_TREE, narg;
2665 tree ctype = build_complex_type (type);
2666
2667 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2668 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2669 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2670 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2671 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2672 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2673 else
2674 gcc_unreachable ();
2675
2676 /* If we don't have a decl for cexp create one. This is the
2677 friendliest fallback if the user calls __builtin_cexpi
2678 without full target C99 function support. */
2679 if (fn == NULL_TREE)
2680 {
2681 tree fntype;
2682 const char *name = NULL;
2683
2684 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2685 name = "cexpf";
2686 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2687 name = "cexp";
2688 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2689 name = "cexpl";
2690
2691 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2692 fn = build_fn_decl (name, fntype);
2693 }
2694
2695 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2696 build_real (type, dconst0), arg);
2697
2698 /* Make sure not to fold the cexp call again. */
2699 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2700 return expand_expr (build_call_nary (ctype, call, 1, narg),
2701 target, VOIDmode, EXPAND_NORMAL);
2702 }
2703
2704 /* Now build the proper return type. */
2705 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2706 make_tree (TREE_TYPE (arg), op2),
2707 make_tree (TREE_TYPE (arg), op1)),
2708 target, VOIDmode, EXPAND_NORMAL);
2709 }
2710
2711 /* Conveniently construct a function call expression. FNDECL names the
2712 function to be called, N is the number of arguments, and the "..."
2713 parameters are the argument expressions. Unlike build_call_exr
2714 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2715
2716 static tree
2717 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2718 {
2719 va_list ap;
2720 tree fntype = TREE_TYPE (fndecl);
2721 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2722
2723 va_start (ap, n);
2724 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2725 va_end (ap);
2726 SET_EXPR_LOCATION (fn, loc);
2727 return fn;
2728 }
2729
2730 /* Expand a call to one of the builtin rounding functions gcc defines
2731 as an extension (lfloor and lceil). As these are gcc extensions we
2732 do not need to worry about setting errno to EDOM.
2733 If expanding via optab fails, lower expression to (int)(floor(x)).
2734 EXP is the expression that is a call to the builtin function;
2735 if convenient, the result should be placed in TARGET. */
2736
2737 static rtx
2738 expand_builtin_int_roundingfn (tree exp, rtx target)
2739 {
2740 convert_optab builtin_optab;
2741 rtx op0, insns, tmp;
2742 tree fndecl = get_callee_fndecl (exp);
2743 enum built_in_function fallback_fn;
2744 tree fallback_fndecl;
2745 enum machine_mode mode;
2746 tree arg;
2747
2748 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2749 gcc_unreachable ();
2750
2751 arg = CALL_EXPR_ARG (exp, 0);
2752
2753 switch (DECL_FUNCTION_CODE (fndecl))
2754 {
2755 CASE_FLT_FN (BUILT_IN_ICEIL):
2756 CASE_FLT_FN (BUILT_IN_LCEIL):
2757 CASE_FLT_FN (BUILT_IN_LLCEIL):
2758 builtin_optab = lceil_optab;
2759 fallback_fn = BUILT_IN_CEIL;
2760 break;
2761
2762 CASE_FLT_FN (BUILT_IN_IFLOOR):
2763 CASE_FLT_FN (BUILT_IN_LFLOOR):
2764 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2765 builtin_optab = lfloor_optab;
2766 fallback_fn = BUILT_IN_FLOOR;
2767 break;
2768
2769 default:
2770 gcc_unreachable ();
2771 }
2772
2773 /* Make a suitable register to place result in. */
2774 mode = TYPE_MODE (TREE_TYPE (exp));
2775
2776 target = gen_reg_rtx (mode);
2777
2778 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2779 need to expand the argument again. This way, we will not perform
2780 side-effects more the once. */
2781 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2782
2783 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2784
2785 start_sequence ();
2786
2787 /* Compute into TARGET. */
2788 if (expand_sfix_optab (target, op0, builtin_optab))
2789 {
2790 /* Output the entire sequence. */
2791 insns = get_insns ();
2792 end_sequence ();
2793 emit_insn (insns);
2794 return target;
2795 }
2796
2797 /* If we were unable to expand via the builtin, stop the sequence
2798 (without outputting the insns). */
2799 end_sequence ();
2800
2801 /* Fall back to floating point rounding optab. */
2802 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2803
2804 /* For non-C99 targets we may end up without a fallback fndecl here
2805 if the user called __builtin_lfloor directly. In this case emit
2806 a call to the floor/ceil variants nevertheless. This should result
2807 in the best user experience for not full C99 targets. */
2808 if (fallback_fndecl == NULL_TREE)
2809 {
2810 tree fntype;
2811 const char *name = NULL;
2812
2813 switch (DECL_FUNCTION_CODE (fndecl))
2814 {
2815 case BUILT_IN_ICEIL:
2816 case BUILT_IN_LCEIL:
2817 case BUILT_IN_LLCEIL:
2818 name = "ceil";
2819 break;
2820 case BUILT_IN_ICEILF:
2821 case BUILT_IN_LCEILF:
2822 case BUILT_IN_LLCEILF:
2823 name = "ceilf";
2824 break;
2825 case BUILT_IN_ICEILL:
2826 case BUILT_IN_LCEILL:
2827 case BUILT_IN_LLCEILL:
2828 name = "ceill";
2829 break;
2830 case BUILT_IN_IFLOOR:
2831 case BUILT_IN_LFLOOR:
2832 case BUILT_IN_LLFLOOR:
2833 name = "floor";
2834 break;
2835 case BUILT_IN_IFLOORF:
2836 case BUILT_IN_LFLOORF:
2837 case BUILT_IN_LLFLOORF:
2838 name = "floorf";
2839 break;
2840 case BUILT_IN_IFLOORL:
2841 case BUILT_IN_LFLOORL:
2842 case BUILT_IN_LLFLOORL:
2843 name = "floorl";
2844 break;
2845 default:
2846 gcc_unreachable ();
2847 }
2848
2849 fntype = build_function_type_list (TREE_TYPE (arg),
2850 TREE_TYPE (arg), NULL_TREE);
2851 fallback_fndecl = build_fn_decl (name, fntype);
2852 }
2853
2854 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2855
2856 tmp = expand_normal (exp);
2857
2858 /* Truncate the result of floating point optab to integer
2859 via expand_fix (). */
2860 target = gen_reg_rtx (mode);
2861 expand_fix (target, tmp, 0);
2862
2863 return target;
2864 }
2865
2866 /* Expand a call to one of the builtin math functions doing integer
2867 conversion (lrint).
2868 Return 0 if a normal call should be emitted rather than expanding the
2869 function in-line. EXP is the expression that is a call to the builtin
2870 function; if convenient, the result should be placed in TARGET. */
2871
2872 static rtx
2873 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2874 {
2875 convert_optab builtin_optab;
2876 rtx op0, insns;
2877 tree fndecl = get_callee_fndecl (exp);
2878 tree arg;
2879 enum machine_mode mode;
2880 enum built_in_function fallback_fn = BUILT_IN_NONE;
2881
2882 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2883 gcc_unreachable ();
2884
2885 arg = CALL_EXPR_ARG (exp, 0);
2886
2887 switch (DECL_FUNCTION_CODE (fndecl))
2888 {
2889 CASE_FLT_FN (BUILT_IN_IRINT):
2890 fallback_fn = BUILT_IN_LRINT;
2891 /* FALLTHRU */
2892 CASE_FLT_FN (BUILT_IN_LRINT):
2893 CASE_FLT_FN (BUILT_IN_LLRINT):
2894 builtin_optab = lrint_optab;
2895 break;
2896
2897 CASE_FLT_FN (BUILT_IN_IROUND):
2898 fallback_fn = BUILT_IN_LROUND;
2899 /* FALLTHRU */
2900 CASE_FLT_FN (BUILT_IN_LROUND):
2901 CASE_FLT_FN (BUILT_IN_LLROUND):
2902 builtin_optab = lround_optab;
2903 break;
2904
2905 default:
2906 gcc_unreachable ();
2907 }
2908
2909 /* There's no easy way to detect the case we need to set EDOM. */
2910 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2911 return NULL_RTX;
2912
2913 /* Make a suitable register to place result in. */
2914 mode = TYPE_MODE (TREE_TYPE (exp));
2915
2916 /* There's no easy way to detect the case we need to set EDOM. */
2917 if (!flag_errno_math)
2918 {
2919 target = gen_reg_rtx (mode);
2920
2921 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2922 need to expand the argument again. This way, we will not perform
2923 side-effects more the once. */
2924 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2925
2926 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2927
2928 start_sequence ();
2929
2930 if (expand_sfix_optab (target, op0, builtin_optab))
2931 {
2932 /* Output the entire sequence. */
2933 insns = get_insns ();
2934 end_sequence ();
2935 emit_insn (insns);
2936 return target;
2937 }
2938
2939 /* If we were unable to expand via the builtin, stop the sequence
2940 (without outputting the insns) and call to the library function
2941 with the stabilized argument list. */
2942 end_sequence ();
2943 }
2944
2945 if (fallback_fn != BUILT_IN_NONE)
2946 {
2947 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2948 targets, (int) round (x) should never be transformed into
2949 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2950 a call to lround in the hope that the target provides at least some
2951 C99 functions. This should result in the best user experience for
2952 not full C99 targets. */
2953 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2954 fallback_fn, 0);
2955
2956 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2957 fallback_fndecl, 1, arg);
2958
2959 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2960 return convert_to_mode (mode, target, 0);
2961 }
2962
2963 target = expand_call (exp, target, target == const0_rtx);
2964
2965 return target;
2966 }
2967
2968 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2969 a normal call should be emitted rather than expanding the function
2970 in-line. EXP is the expression that is a call to the builtin
2971 function; if convenient, the result should be placed in TARGET. */
2972
2973 static rtx
2974 expand_builtin_powi (tree exp, rtx target)
2975 {
2976 tree arg0, arg1;
2977 rtx op0, op1;
2978 enum machine_mode mode;
2979 enum machine_mode mode2;
2980
2981 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2982 return NULL_RTX;
2983
2984 arg0 = CALL_EXPR_ARG (exp, 0);
2985 arg1 = CALL_EXPR_ARG (exp, 1);
2986 mode = TYPE_MODE (TREE_TYPE (exp));
2987
2988 /* Emit a libcall to libgcc. */
2989
2990 /* Mode of the 2nd argument must match that of an int. */
2991 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2992
2993 if (target == NULL_RTX)
2994 target = gen_reg_rtx (mode);
2995
2996 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2997 if (GET_MODE (op0) != mode)
2998 op0 = convert_to_mode (mode, op0, 0);
2999 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3000 if (GET_MODE (op1) != mode2)
3001 op1 = convert_to_mode (mode2, op1, 0);
3002
3003 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3004 target, LCT_CONST, mode, 2,
3005 op0, mode, op1, mode2);
3006
3007 return target;
3008 }
3009
3010 /* Expand expression EXP which is a call to the strlen builtin. Return
3011 NULL_RTX if we failed the caller should emit a normal call, otherwise
3012 try to get the result in TARGET, if convenient. */
3013
3014 static rtx
3015 expand_builtin_strlen (tree exp, rtx target,
3016 enum machine_mode target_mode)
3017 {
3018 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3019 return NULL_RTX;
3020 else
3021 {
3022 struct expand_operand ops[4];
3023 rtx pat;
3024 tree len;
3025 tree src = CALL_EXPR_ARG (exp, 0);
3026 rtx src_reg, before_strlen;
3027 enum machine_mode insn_mode = target_mode;
3028 enum insn_code icode = CODE_FOR_nothing;
3029 unsigned int align;
3030
3031 /* If the length can be computed at compile-time, return it. */
3032 len = c_strlen (src, 0);
3033 if (len)
3034 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3035
3036 /* If the length can be computed at compile-time and is constant
3037 integer, but there are side-effects in src, evaluate
3038 src for side-effects, then return len.
3039 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3040 can be optimized into: i++; x = 3; */
3041 len = c_strlen (src, 1);
3042 if (len && TREE_CODE (len) == INTEGER_CST)
3043 {
3044 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3045 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3046 }
3047
3048 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3049
3050 /* If SRC is not a pointer type, don't do this operation inline. */
3051 if (align == 0)
3052 return NULL_RTX;
3053
3054 /* Bail out if we can't compute strlen in the right mode. */
3055 while (insn_mode != VOIDmode)
3056 {
3057 icode = optab_handler (strlen_optab, insn_mode);
3058 if (icode != CODE_FOR_nothing)
3059 break;
3060
3061 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3062 }
3063 if (insn_mode == VOIDmode)
3064 return NULL_RTX;
3065
3066 /* Make a place to hold the source address. We will not expand
3067 the actual source until we are sure that the expansion will
3068 not fail -- there are trees that cannot be expanded twice. */
3069 src_reg = gen_reg_rtx (Pmode);
3070
3071 /* Mark the beginning of the strlen sequence so we can emit the
3072 source operand later. */
3073 before_strlen = get_last_insn ();
3074
3075 create_output_operand (&ops[0], target, insn_mode);
3076 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3077 create_integer_operand (&ops[2], 0);
3078 create_integer_operand (&ops[3], align);
3079 if (!maybe_expand_insn (icode, 4, ops))
3080 return NULL_RTX;
3081
3082 /* Now that we are assured of success, expand the source. */
3083 start_sequence ();
3084 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3085 if (pat != src_reg)
3086 {
3087 #ifdef POINTERS_EXTEND_UNSIGNED
3088 if (GET_MODE (pat) != Pmode)
3089 pat = convert_to_mode (Pmode, pat,
3090 POINTERS_EXTEND_UNSIGNED);
3091 #endif
3092 emit_move_insn (src_reg, pat);
3093 }
3094 pat = get_insns ();
3095 end_sequence ();
3096
3097 if (before_strlen)
3098 emit_insn_after (pat, before_strlen);
3099 else
3100 emit_insn_before (pat, get_insns ());
3101
3102 /* Return the value in the proper mode for this function. */
3103 if (GET_MODE (ops[0].value) == target_mode)
3104 target = ops[0].value;
3105 else if (target != 0)
3106 convert_move (target, ops[0].value, 0);
3107 else
3108 target = convert_to_mode (target_mode, ops[0].value, 0);
3109
3110 return target;
3111 }
3112 }
3113
3114 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3115 bytes from constant string DATA + OFFSET and return it as target
3116 constant. */
3117
3118 static rtx
3119 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3120 enum machine_mode mode)
3121 {
3122 const char *str = (const char *) data;
3123
3124 gcc_assert (offset >= 0
3125 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3126 <= strlen (str) + 1));
3127
3128 return c_readstr (str + offset, mode);
3129 }
3130
3131 /* Expand a call EXP to the memcpy builtin.
3132 Return NULL_RTX if we failed, the caller should emit a normal call,
3133 otherwise try to get the result in TARGET, if convenient (and in
3134 mode MODE if that's convenient). */
3135
3136 static rtx
3137 expand_builtin_memcpy (tree exp, rtx target)
3138 {
3139 if (!validate_arglist (exp,
3140 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3141 return NULL_RTX;
3142 else
3143 {
3144 tree dest = CALL_EXPR_ARG (exp, 0);
3145 tree src = CALL_EXPR_ARG (exp, 1);
3146 tree len = CALL_EXPR_ARG (exp, 2);
3147 const char *src_str;
3148 unsigned int src_align = get_pointer_alignment (src);
3149 unsigned int dest_align = get_pointer_alignment (dest);
3150 rtx dest_mem, src_mem, dest_addr, len_rtx;
3151 HOST_WIDE_INT expected_size = -1;
3152 unsigned int expected_align = 0;
3153
3154 /* If DEST is not a pointer type, call the normal function. */
3155 if (dest_align == 0)
3156 return NULL_RTX;
3157
3158 /* If either SRC is not a pointer type, don't do this
3159 operation in-line. */
3160 if (src_align == 0)
3161 return NULL_RTX;
3162
3163 if (currently_expanding_gimple_stmt)
3164 stringop_block_profile (currently_expanding_gimple_stmt,
3165 &expected_align, &expected_size);
3166
3167 if (expected_align < dest_align)
3168 expected_align = dest_align;
3169 dest_mem = get_memory_rtx (dest, len);
3170 set_mem_align (dest_mem, dest_align);
3171 len_rtx = expand_normal (len);
3172 src_str = c_getstr (src);
3173
3174 /* If SRC is a string constant and block move would be done
3175 by pieces, we can avoid loading the string from memory
3176 and only stored the computed constants. */
3177 if (src_str
3178 && CONST_INT_P (len_rtx)
3179 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3180 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3181 CONST_CAST (char *, src_str),
3182 dest_align, false))
3183 {
3184 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3185 builtin_memcpy_read_str,
3186 CONST_CAST (char *, src_str),
3187 dest_align, false, 0);
3188 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3189 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3190 return dest_mem;
3191 }
3192
3193 src_mem = get_memory_rtx (src, len);
3194 set_mem_align (src_mem, src_align);
3195
3196 /* Copy word part most expediently. */
3197 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3198 CALL_EXPR_TAILCALL (exp)
3199 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3200 expected_align, expected_size);
3201
3202 if (dest_addr == 0)
3203 {
3204 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3205 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3206 }
3207 return dest_addr;
3208 }
3209 }
3210
3211 /* Expand a call EXP to the mempcpy builtin.
3212 Return NULL_RTX if we failed; the caller should emit a normal call,
3213 otherwise try to get the result in TARGET, if convenient (and in
3214 mode MODE if that's convenient). If ENDP is 0 return the
3215 destination pointer, if ENDP is 1 return the end pointer ala
3216 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3217 stpcpy. */
3218
3219 static rtx
3220 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3221 {
3222 if (!validate_arglist (exp,
3223 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3224 return NULL_RTX;
3225 else
3226 {
3227 tree dest = CALL_EXPR_ARG (exp, 0);
3228 tree src = CALL_EXPR_ARG (exp, 1);
3229 tree len = CALL_EXPR_ARG (exp, 2);
3230 return expand_builtin_mempcpy_args (dest, src, len,
3231 target, mode, /*endp=*/ 1);
3232 }
3233 }
3234
3235 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3236 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3237 so that this can also be called without constructing an actual CALL_EXPR.
3238 The other arguments and return value are the same as for
3239 expand_builtin_mempcpy. */
3240
3241 static rtx
3242 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3243 rtx target, enum machine_mode mode, int endp)
3244 {
3245 /* If return value is ignored, transform mempcpy into memcpy. */
3246 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3247 {
3248 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3249 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3250 dest, src, len);
3251 return expand_expr (result, target, mode, EXPAND_NORMAL);
3252 }
3253 else
3254 {
3255 const char *src_str;
3256 unsigned int src_align = get_pointer_alignment (src);
3257 unsigned int dest_align = get_pointer_alignment (dest);
3258 rtx dest_mem, src_mem, len_rtx;
3259
3260 /* If either SRC or DEST is not a pointer type, don't do this
3261 operation in-line. */
3262 if (dest_align == 0 || src_align == 0)
3263 return NULL_RTX;
3264
3265 /* If LEN is not constant, call the normal function. */
3266 if (! host_integerp (len, 1))
3267 return NULL_RTX;
3268
3269 len_rtx = expand_normal (len);
3270 src_str = c_getstr (src);
3271
3272 /* If SRC is a string constant and block move would be done
3273 by pieces, we can avoid loading the string from memory
3274 and only stored the computed constants. */
3275 if (src_str
3276 && CONST_INT_P (len_rtx)
3277 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3278 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3279 CONST_CAST (char *, src_str),
3280 dest_align, false))
3281 {
3282 dest_mem = get_memory_rtx (dest, len);
3283 set_mem_align (dest_mem, dest_align);
3284 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3285 builtin_memcpy_read_str,
3286 CONST_CAST (char *, src_str),
3287 dest_align, false, endp);
3288 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3289 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3290 return dest_mem;
3291 }
3292
3293 if (CONST_INT_P (len_rtx)
3294 && can_move_by_pieces (INTVAL (len_rtx),
3295 MIN (dest_align, src_align)))
3296 {
3297 dest_mem = get_memory_rtx (dest, len);
3298 set_mem_align (dest_mem, dest_align);
3299 src_mem = get_memory_rtx (src, len);
3300 set_mem_align (src_mem, src_align);
3301 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3302 MIN (dest_align, src_align), endp);
3303 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3304 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3305 return dest_mem;
3306 }
3307
3308 return NULL_RTX;
3309 }
3310 }
3311
3312 #ifndef HAVE_movstr
3313 # define HAVE_movstr 0
3314 # define CODE_FOR_movstr CODE_FOR_nothing
3315 #endif
3316
3317 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3318 we failed, the caller should emit a normal call, otherwise try to
3319 get the result in TARGET, if convenient. If ENDP is 0 return the
3320 destination pointer, if ENDP is 1 return the end pointer ala
3321 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3322 stpcpy. */
3323
3324 static rtx
3325 expand_movstr (tree dest, tree src, rtx target, int endp)
3326 {
3327 struct expand_operand ops[3];
3328 rtx dest_mem;
3329 rtx src_mem;
3330
3331 if (!HAVE_movstr)
3332 return NULL_RTX;
3333
3334 dest_mem = get_memory_rtx (dest, NULL);
3335 src_mem = get_memory_rtx (src, NULL);
3336 if (!endp)
3337 {
3338 target = force_reg (Pmode, XEXP (dest_mem, 0));
3339 dest_mem = replace_equiv_address (dest_mem, target);
3340 }
3341
3342 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3343 create_fixed_operand (&ops[1], dest_mem);
3344 create_fixed_operand (&ops[2], src_mem);
3345 expand_insn (CODE_FOR_movstr, 3, ops);
3346
3347 if (endp && target != const0_rtx)
3348 {
3349 target = ops[0].value;
3350 /* movstr is supposed to set end to the address of the NUL
3351 terminator. If the caller requested a mempcpy-like return value,
3352 adjust it. */
3353 if (endp == 1)
3354 {
3355 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), target), 1);
3356 emit_move_insn (target, force_operand (tem, NULL_RTX));
3357 }
3358 }
3359 return target;
3360 }
3361
3362 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3363 NULL_RTX if we failed the caller should emit a normal call, otherwise
3364 try to get the result in TARGET, if convenient (and in mode MODE if that's
3365 convenient). */
3366
3367 static rtx
3368 expand_builtin_strcpy (tree exp, rtx target)
3369 {
3370 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3371 {
3372 tree dest = CALL_EXPR_ARG (exp, 0);
3373 tree src = CALL_EXPR_ARG (exp, 1);
3374 return expand_builtin_strcpy_args (dest, src, target);
3375 }
3376 return NULL_RTX;
3377 }
3378
3379 /* Helper function to do the actual work for expand_builtin_strcpy. The
3380 arguments to the builtin_strcpy call DEST and SRC are broken out
3381 so that this can also be called without constructing an actual CALL_EXPR.
3382 The other arguments and return value are the same as for
3383 expand_builtin_strcpy. */
3384
3385 static rtx
3386 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3387 {
3388 return expand_movstr (dest, src, target, /*endp=*/0);
3389 }
3390
3391 /* Expand a call EXP to the stpcpy builtin.
3392 Return NULL_RTX if we failed the caller should emit a normal call,
3393 otherwise try to get the result in TARGET, if convenient (and in
3394 mode MODE if that's convenient). */
3395
3396 static rtx
3397 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3398 {
3399 tree dst, src;
3400 location_t loc = EXPR_LOCATION (exp);
3401
3402 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3403 return NULL_RTX;
3404
3405 dst = CALL_EXPR_ARG (exp, 0);
3406 src = CALL_EXPR_ARG (exp, 1);
3407
3408 /* If return value is ignored, transform stpcpy into strcpy. */
3409 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3410 {
3411 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3412 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3413 return expand_expr (result, target, mode, EXPAND_NORMAL);
3414 }
3415 else
3416 {
3417 tree len, lenp1;
3418 rtx ret;
3419
3420 /* Ensure we get an actual string whose length can be evaluated at
3421 compile-time, not an expression containing a string. This is
3422 because the latter will potentially produce pessimized code
3423 when used to produce the return value. */
3424 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3425 return expand_movstr (dst, src, target, /*endp=*/2);
3426
3427 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3428 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3429 target, mode, /*endp=*/2);
3430
3431 if (ret)
3432 return ret;
3433
3434 if (TREE_CODE (len) == INTEGER_CST)
3435 {
3436 rtx len_rtx = expand_normal (len);
3437
3438 if (CONST_INT_P (len_rtx))
3439 {
3440 ret = expand_builtin_strcpy_args (dst, src, target);
3441
3442 if (ret)
3443 {
3444 if (! target)
3445 {
3446 if (mode != VOIDmode)
3447 target = gen_reg_rtx (mode);
3448 else
3449 target = gen_reg_rtx (GET_MODE (ret));
3450 }
3451 if (GET_MODE (target) != GET_MODE (ret))
3452 ret = gen_lowpart (GET_MODE (target), ret);
3453
3454 ret = plus_constant (ret, INTVAL (len_rtx));
3455 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3456 gcc_assert (ret);
3457
3458 return target;
3459 }
3460 }
3461 }
3462
3463 return expand_movstr (dst, src, target, /*endp=*/2);
3464 }
3465 }
3466
3467 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3468 bytes from constant string DATA + OFFSET and return it as target
3469 constant. */
3470
3471 rtx
3472 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3473 enum machine_mode mode)
3474 {
3475 const char *str = (const char *) data;
3476
3477 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3478 return const0_rtx;
3479
3480 return c_readstr (str + offset, mode);
3481 }
3482
3483 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3484 NULL_RTX if we failed the caller should emit a normal call. */
3485
3486 static rtx
3487 expand_builtin_strncpy (tree exp, rtx target)
3488 {
3489 location_t loc = EXPR_LOCATION (exp);
3490
3491 if (validate_arglist (exp,
3492 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3493 {
3494 tree dest = CALL_EXPR_ARG (exp, 0);
3495 tree src = CALL_EXPR_ARG (exp, 1);
3496 tree len = CALL_EXPR_ARG (exp, 2);
3497 tree slen = c_strlen (src, 1);
3498
3499 /* We must be passed a constant len and src parameter. */
3500 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3501 return NULL_RTX;
3502
3503 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3504
3505 /* We're required to pad with trailing zeros if the requested
3506 len is greater than strlen(s2)+1. In that case try to
3507 use store_by_pieces, if it fails, punt. */
3508 if (tree_int_cst_lt (slen, len))
3509 {
3510 unsigned int dest_align = get_pointer_alignment (dest);
3511 const char *p = c_getstr (src);
3512 rtx dest_mem;
3513
3514 if (!p || dest_align == 0 || !host_integerp (len, 1)
3515 || !can_store_by_pieces (tree_low_cst (len, 1),
3516 builtin_strncpy_read_str,
3517 CONST_CAST (char *, p),
3518 dest_align, false))
3519 return NULL_RTX;
3520
3521 dest_mem = get_memory_rtx (dest, len);
3522 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3523 builtin_strncpy_read_str,
3524 CONST_CAST (char *, p), dest_align, false, 0);
3525 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3526 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3527 return dest_mem;
3528 }
3529 }
3530 return NULL_RTX;
3531 }
3532
3533 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3534 bytes from constant string DATA + OFFSET and return it as target
3535 constant. */
3536
3537 rtx
3538 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3539 enum machine_mode mode)
3540 {
3541 const char *c = (const char *) data;
3542 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3543
3544 memset (p, *c, GET_MODE_SIZE (mode));
3545
3546 return c_readstr (p, mode);
3547 }
3548
3549 /* Callback routine for store_by_pieces. Return the RTL of a register
3550 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3551 char value given in the RTL register data. For example, if mode is
3552 4 bytes wide, return the RTL for 0x01010101*data. */
3553
3554 static rtx
3555 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3556 enum machine_mode mode)
3557 {
3558 rtx target, coeff;
3559 size_t size;
3560 char *p;
3561
3562 size = GET_MODE_SIZE (mode);
3563 if (size == 1)
3564 return (rtx) data;
3565
3566 p = XALLOCAVEC (char, size);
3567 memset (p, 1, size);
3568 coeff = c_readstr (p, mode);
3569
3570 target = convert_to_mode (mode, (rtx) data, 1);
3571 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3572 return force_reg (mode, target);
3573 }
3574
3575 /* Expand expression EXP, which is a call to the memset builtin. Return
3576 NULL_RTX if we failed the caller should emit a normal call, otherwise
3577 try to get the result in TARGET, if convenient (and in mode MODE if that's
3578 convenient). */
3579
3580 static rtx
3581 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3582 {
3583 if (!validate_arglist (exp,
3584 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3585 return NULL_RTX;
3586 else
3587 {
3588 tree dest = CALL_EXPR_ARG (exp, 0);
3589 tree val = CALL_EXPR_ARG (exp, 1);
3590 tree len = CALL_EXPR_ARG (exp, 2);
3591 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3592 }
3593 }
3594
3595 /* Helper function to do the actual work for expand_builtin_memset. The
3596 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3597 so that this can also be called without constructing an actual CALL_EXPR.
3598 The other arguments and return value are the same as for
3599 expand_builtin_memset. */
3600
3601 static rtx
3602 expand_builtin_memset_args (tree dest, tree val, tree len,
3603 rtx target, enum machine_mode mode, tree orig_exp)
3604 {
3605 tree fndecl, fn;
3606 enum built_in_function fcode;
3607 enum machine_mode val_mode;
3608 char c;
3609 unsigned int dest_align;
3610 rtx dest_mem, dest_addr, len_rtx;
3611 HOST_WIDE_INT expected_size = -1;
3612 unsigned int expected_align = 0;
3613
3614 dest_align = get_pointer_alignment (dest);
3615
3616 /* If DEST is not a pointer type, don't do this operation in-line. */
3617 if (dest_align == 0)
3618 return NULL_RTX;
3619
3620 if (currently_expanding_gimple_stmt)
3621 stringop_block_profile (currently_expanding_gimple_stmt,
3622 &expected_align, &expected_size);
3623
3624 if (expected_align < dest_align)
3625 expected_align = dest_align;
3626
3627 /* If the LEN parameter is zero, return DEST. */
3628 if (integer_zerop (len))
3629 {
3630 /* Evaluate and ignore VAL in case it has side-effects. */
3631 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3632 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3633 }
3634
3635 /* Stabilize the arguments in case we fail. */
3636 dest = builtin_save_expr (dest);
3637 val = builtin_save_expr (val);
3638 len = builtin_save_expr (len);
3639
3640 len_rtx = expand_normal (len);
3641 dest_mem = get_memory_rtx (dest, len);
3642 val_mode = TYPE_MODE (unsigned_char_type_node);
3643
3644 if (TREE_CODE (val) != INTEGER_CST)
3645 {
3646 rtx val_rtx;
3647
3648 val_rtx = expand_normal (val);
3649 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3650
3651 /* Assume that we can memset by pieces if we can store
3652 * the coefficients by pieces (in the required modes).
3653 * We can't pass builtin_memset_gen_str as that emits RTL. */
3654 c = 1;
3655 if (host_integerp (len, 1)
3656 && can_store_by_pieces (tree_low_cst (len, 1),
3657 builtin_memset_read_str, &c, dest_align,
3658 true))
3659 {
3660 val_rtx = force_reg (val_mode, val_rtx);
3661 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3662 builtin_memset_gen_str, val_rtx, dest_align,
3663 true, 0);
3664 }
3665 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3666 dest_align, expected_align,
3667 expected_size))
3668 goto do_libcall;
3669
3670 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3671 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3672 return dest_mem;
3673 }
3674
3675 if (target_char_cast (val, &c))
3676 goto do_libcall;
3677
3678 if (c)
3679 {
3680 if (host_integerp (len, 1)
3681 && can_store_by_pieces (tree_low_cst (len, 1),
3682 builtin_memset_read_str, &c, dest_align,
3683 true))
3684 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3685 builtin_memset_read_str, &c, dest_align, true, 0);
3686 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3687 gen_int_mode (c, val_mode),
3688 dest_align, expected_align,
3689 expected_size))
3690 goto do_libcall;
3691
3692 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3693 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3694 return dest_mem;
3695 }
3696
3697 set_mem_align (dest_mem, dest_align);
3698 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3699 CALL_EXPR_TAILCALL (orig_exp)
3700 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3701 expected_align, expected_size);
3702
3703 if (dest_addr == 0)
3704 {
3705 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3706 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3707 }
3708
3709 return dest_addr;
3710
3711 do_libcall:
3712 fndecl = get_callee_fndecl (orig_exp);
3713 fcode = DECL_FUNCTION_CODE (fndecl);
3714 if (fcode == BUILT_IN_MEMSET)
3715 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3716 dest, val, len);
3717 else if (fcode == BUILT_IN_BZERO)
3718 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3719 dest, len);
3720 else
3721 gcc_unreachable ();
3722 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3723 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3724 return expand_call (fn, target, target == const0_rtx);
3725 }
3726
3727 /* Expand expression EXP, which is a call to the bzero builtin. Return
3728 NULL_RTX if we failed the caller should emit a normal call. */
3729
3730 static rtx
3731 expand_builtin_bzero (tree exp)
3732 {
3733 tree dest, size;
3734 location_t loc = EXPR_LOCATION (exp);
3735
3736 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3737 return NULL_RTX;
3738
3739 dest = CALL_EXPR_ARG (exp, 0);
3740 size = CALL_EXPR_ARG (exp, 1);
3741
3742 /* New argument list transforming bzero(ptr x, int y) to
3743 memset(ptr x, int 0, size_t y). This is done this way
3744 so that if it isn't expanded inline, we fallback to
3745 calling bzero instead of memset. */
3746
3747 return expand_builtin_memset_args (dest, integer_zero_node,
3748 fold_convert_loc (loc,
3749 size_type_node, size),
3750 const0_rtx, VOIDmode, exp);
3751 }
3752
3753 /* Expand expression EXP, which is a call to the memcmp built-in function.
3754 Return NULL_RTX if we failed and the caller should emit a normal call,
3755 otherwise try to get the result in TARGET, if convenient (and in mode
3756 MODE, if that's convenient). */
3757
3758 static rtx
3759 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3760 ATTRIBUTE_UNUSED enum machine_mode mode)
3761 {
3762 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3763
3764 if (!validate_arglist (exp,
3765 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3766 return NULL_RTX;
3767
3768 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3769 implementing memcmp because it will stop if it encounters two
3770 zero bytes. */
3771 #if defined HAVE_cmpmemsi
3772 {
3773 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3774 rtx result;
3775 rtx insn;
3776 tree arg1 = CALL_EXPR_ARG (exp, 0);
3777 tree arg2 = CALL_EXPR_ARG (exp, 1);
3778 tree len = CALL_EXPR_ARG (exp, 2);
3779
3780 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3781 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3782 enum machine_mode insn_mode;
3783
3784 if (HAVE_cmpmemsi)
3785 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3786 else
3787 return NULL_RTX;
3788
3789 /* If we don't have POINTER_TYPE, call the function. */
3790 if (arg1_align == 0 || arg2_align == 0)
3791 return NULL_RTX;
3792
3793 /* Make a place to write the result of the instruction. */
3794 result = target;
3795 if (! (result != 0
3796 && REG_P (result) && GET_MODE (result) == insn_mode
3797 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3798 result = gen_reg_rtx (insn_mode);
3799
3800 arg1_rtx = get_memory_rtx (arg1, len);
3801 arg2_rtx = get_memory_rtx (arg2, len);
3802 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3803
3804 /* Set MEM_SIZE as appropriate. */
3805 if (CONST_INT_P (arg3_rtx))
3806 {
3807 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3808 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3809 }
3810
3811 if (HAVE_cmpmemsi)
3812 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3813 GEN_INT (MIN (arg1_align, arg2_align)));
3814 else
3815 gcc_unreachable ();
3816
3817 if (insn)
3818 emit_insn (insn);
3819 else
3820 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3821 TYPE_MODE (integer_type_node), 3,
3822 XEXP (arg1_rtx, 0), Pmode,
3823 XEXP (arg2_rtx, 0), Pmode,
3824 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3825 TYPE_UNSIGNED (sizetype)),
3826 TYPE_MODE (sizetype));
3827
3828 /* Return the value in the proper mode for this function. */
3829 mode = TYPE_MODE (TREE_TYPE (exp));
3830 if (GET_MODE (result) == mode)
3831 return result;
3832 else if (target != 0)
3833 {
3834 convert_move (target, result, 0);
3835 return target;
3836 }
3837 else
3838 return convert_to_mode (mode, result, 0);
3839 }
3840 #endif /* HAVE_cmpmemsi. */
3841
3842 return NULL_RTX;
3843 }
3844
3845 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3846 if we failed the caller should emit a normal call, otherwise try to get
3847 the result in TARGET, if convenient. */
3848
3849 static rtx
3850 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3851 {
3852 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3853 return NULL_RTX;
3854
3855 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3856 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3857 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3858 {
3859 rtx arg1_rtx, arg2_rtx;
3860 rtx result, insn = NULL_RTX;
3861 tree fndecl, fn;
3862 tree arg1 = CALL_EXPR_ARG (exp, 0);
3863 tree arg2 = CALL_EXPR_ARG (exp, 1);
3864
3865 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3866 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3867
3868 /* If we don't have POINTER_TYPE, call the function. */
3869 if (arg1_align == 0 || arg2_align == 0)
3870 return NULL_RTX;
3871
3872 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3873 arg1 = builtin_save_expr (arg1);
3874 arg2 = builtin_save_expr (arg2);
3875
3876 arg1_rtx = get_memory_rtx (arg1, NULL);
3877 arg2_rtx = get_memory_rtx (arg2, NULL);
3878
3879 #ifdef HAVE_cmpstrsi
3880 /* Try to call cmpstrsi. */
3881 if (HAVE_cmpstrsi)
3882 {
3883 enum machine_mode insn_mode
3884 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3885
3886 /* Make a place to write the result of the instruction. */
3887 result = target;
3888 if (! (result != 0
3889 && REG_P (result) && GET_MODE (result) == insn_mode
3890 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3891 result = gen_reg_rtx (insn_mode);
3892
3893 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3894 GEN_INT (MIN (arg1_align, arg2_align)));
3895 }
3896 #endif
3897 #ifdef HAVE_cmpstrnsi
3898 /* Try to determine at least one length and call cmpstrnsi. */
3899 if (!insn && HAVE_cmpstrnsi)
3900 {
3901 tree len;
3902 rtx arg3_rtx;
3903
3904 enum machine_mode insn_mode
3905 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3906 tree len1 = c_strlen (arg1, 1);
3907 tree len2 = c_strlen (arg2, 1);
3908
3909 if (len1)
3910 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3911 if (len2)
3912 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3913
3914 /* If we don't have a constant length for the first, use the length
3915 of the second, if we know it. We don't require a constant for
3916 this case; some cost analysis could be done if both are available
3917 but neither is constant. For now, assume they're equally cheap,
3918 unless one has side effects. If both strings have constant lengths,
3919 use the smaller. */
3920
3921 if (!len1)
3922 len = len2;
3923 else if (!len2)
3924 len = len1;
3925 else if (TREE_SIDE_EFFECTS (len1))
3926 len = len2;
3927 else if (TREE_SIDE_EFFECTS (len2))
3928 len = len1;
3929 else if (TREE_CODE (len1) != INTEGER_CST)
3930 len = len2;
3931 else if (TREE_CODE (len2) != INTEGER_CST)
3932 len = len1;
3933 else if (tree_int_cst_lt (len1, len2))
3934 len = len1;
3935 else
3936 len = len2;
3937
3938 /* If both arguments have side effects, we cannot optimize. */
3939 if (!len || TREE_SIDE_EFFECTS (len))
3940 goto do_libcall;
3941
3942 arg3_rtx = expand_normal (len);
3943
3944 /* Make a place to write the result of the instruction. */
3945 result = target;
3946 if (! (result != 0
3947 && REG_P (result) && GET_MODE (result) == insn_mode
3948 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3949 result = gen_reg_rtx (insn_mode);
3950
3951 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3952 GEN_INT (MIN (arg1_align, arg2_align)));
3953 }
3954 #endif
3955
3956 if (insn)
3957 {
3958 enum machine_mode mode;
3959 emit_insn (insn);
3960
3961 /* Return the value in the proper mode for this function. */
3962 mode = TYPE_MODE (TREE_TYPE (exp));
3963 if (GET_MODE (result) == mode)
3964 return result;
3965 if (target == 0)
3966 return convert_to_mode (mode, result, 0);
3967 convert_move (target, result, 0);
3968 return target;
3969 }
3970
3971 /* Expand the library call ourselves using a stabilized argument
3972 list to avoid re-evaluating the function's arguments twice. */
3973 #ifdef HAVE_cmpstrnsi
3974 do_libcall:
3975 #endif
3976 fndecl = get_callee_fndecl (exp);
3977 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3978 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3979 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3980 return expand_call (fn, target, target == const0_rtx);
3981 }
3982 #endif
3983 return NULL_RTX;
3984 }
3985
3986 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3987 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3988 the result in TARGET, if convenient. */
3989
3990 static rtx
3991 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3992 ATTRIBUTE_UNUSED enum machine_mode mode)
3993 {
3994 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3995
3996 if (!validate_arglist (exp,
3997 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3998 return NULL_RTX;
3999
4000 /* If c_strlen can determine an expression for one of the string
4001 lengths, and it doesn't have side effects, then emit cmpstrnsi
4002 using length MIN(strlen(string)+1, arg3). */
4003 #ifdef HAVE_cmpstrnsi
4004 if (HAVE_cmpstrnsi)
4005 {
4006 tree len, len1, len2;
4007 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4008 rtx result, insn;
4009 tree fndecl, fn;
4010 tree arg1 = CALL_EXPR_ARG (exp, 0);
4011 tree arg2 = CALL_EXPR_ARG (exp, 1);
4012 tree arg3 = CALL_EXPR_ARG (exp, 2);
4013
4014 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4015 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4016 enum machine_mode insn_mode
4017 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4018
4019 len1 = c_strlen (arg1, 1);
4020 len2 = c_strlen (arg2, 1);
4021
4022 if (len1)
4023 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4024 if (len2)
4025 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4026
4027 /* If we don't have a constant length for the first, use the length
4028 of the second, if we know it. We don't require a constant for
4029 this case; some cost analysis could be done if both are available
4030 but neither is constant. For now, assume they're equally cheap,
4031 unless one has side effects. If both strings have constant lengths,
4032 use the smaller. */
4033
4034 if (!len1)
4035 len = len2;
4036 else if (!len2)
4037 len = len1;
4038 else if (TREE_SIDE_EFFECTS (len1))
4039 len = len2;
4040 else if (TREE_SIDE_EFFECTS (len2))
4041 len = len1;
4042 else if (TREE_CODE (len1) != INTEGER_CST)
4043 len = len2;
4044 else if (TREE_CODE (len2) != INTEGER_CST)
4045 len = len1;
4046 else if (tree_int_cst_lt (len1, len2))
4047 len = len1;
4048 else
4049 len = len2;
4050
4051 /* If both arguments have side effects, we cannot optimize. */
4052 if (!len || TREE_SIDE_EFFECTS (len))
4053 return NULL_RTX;
4054
4055 /* The actual new length parameter is MIN(len,arg3). */
4056 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4057 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4058
4059 /* If we don't have POINTER_TYPE, call the function. */
4060 if (arg1_align == 0 || arg2_align == 0)
4061 return NULL_RTX;
4062
4063 /* Make a place to write the result of the instruction. */
4064 result = target;
4065 if (! (result != 0
4066 && REG_P (result) && GET_MODE (result) == insn_mode
4067 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4068 result = gen_reg_rtx (insn_mode);
4069
4070 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4071 arg1 = builtin_save_expr (arg1);
4072 arg2 = builtin_save_expr (arg2);
4073 len = builtin_save_expr (len);
4074
4075 arg1_rtx = get_memory_rtx (arg1, len);
4076 arg2_rtx = get_memory_rtx (arg2, len);
4077 arg3_rtx = expand_normal (len);
4078 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4079 GEN_INT (MIN (arg1_align, arg2_align)));
4080 if (insn)
4081 {
4082 emit_insn (insn);
4083
4084 /* Return the value in the proper mode for this function. */
4085 mode = TYPE_MODE (TREE_TYPE (exp));
4086 if (GET_MODE (result) == mode)
4087 return result;
4088 if (target == 0)
4089 return convert_to_mode (mode, result, 0);
4090 convert_move (target, result, 0);
4091 return target;
4092 }
4093
4094 /* Expand the library call ourselves using a stabilized argument
4095 list to avoid re-evaluating the function's arguments twice. */
4096 fndecl = get_callee_fndecl (exp);
4097 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4098 arg1, arg2, len);
4099 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4100 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4101 return expand_call (fn, target, target == const0_rtx);
4102 }
4103 #endif
4104 return NULL_RTX;
4105 }
4106
4107 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4108 if that's convenient. */
4109
4110 rtx
4111 expand_builtin_saveregs (void)
4112 {
4113 rtx val, seq;
4114
4115 /* Don't do __builtin_saveregs more than once in a function.
4116 Save the result of the first call and reuse it. */
4117 if (saveregs_value != 0)
4118 return saveregs_value;
4119
4120 /* When this function is called, it means that registers must be
4121 saved on entry to this function. So we migrate the call to the
4122 first insn of this function. */
4123
4124 start_sequence ();
4125
4126 /* Do whatever the machine needs done in this case. */
4127 val = targetm.calls.expand_builtin_saveregs ();
4128
4129 seq = get_insns ();
4130 end_sequence ();
4131
4132 saveregs_value = val;
4133
4134 /* Put the insns after the NOTE that starts the function. If this
4135 is inside a start_sequence, make the outer-level insn chain current, so
4136 the code is placed at the start of the function. */
4137 push_topmost_sequence ();
4138 emit_insn_after (seq, entry_of_function ());
4139 pop_topmost_sequence ();
4140
4141 return val;
4142 }
4143
4144 /* Expand a call to __builtin_next_arg. */
4145
4146 static rtx
4147 expand_builtin_next_arg (void)
4148 {
4149 /* Checking arguments is already done in fold_builtin_next_arg
4150 that must be called before this function. */
4151 return expand_binop (ptr_mode, add_optab,
4152 crtl->args.internal_arg_pointer,
4153 crtl->args.arg_offset_rtx,
4154 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4155 }
4156
4157 /* Make it easier for the backends by protecting the valist argument
4158 from multiple evaluations. */
4159
4160 static tree
4161 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4162 {
4163 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4164
4165 /* The current way of determining the type of valist is completely
4166 bogus. We should have the information on the va builtin instead. */
4167 if (!vatype)
4168 vatype = targetm.fn_abi_va_list (cfun->decl);
4169
4170 if (TREE_CODE (vatype) == ARRAY_TYPE)
4171 {
4172 if (TREE_SIDE_EFFECTS (valist))
4173 valist = save_expr (valist);
4174
4175 /* For this case, the backends will be expecting a pointer to
4176 vatype, but it's possible we've actually been given an array
4177 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4178 So fix it. */
4179 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4180 {
4181 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4182 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4183 }
4184 }
4185 else
4186 {
4187 tree pt = build_pointer_type (vatype);
4188
4189 if (! needs_lvalue)
4190 {
4191 if (! TREE_SIDE_EFFECTS (valist))
4192 return valist;
4193
4194 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4195 TREE_SIDE_EFFECTS (valist) = 1;
4196 }
4197
4198 if (TREE_SIDE_EFFECTS (valist))
4199 valist = save_expr (valist);
4200 valist = fold_build2_loc (loc, MEM_REF,
4201 vatype, valist, build_int_cst (pt, 0));
4202 }
4203
4204 return valist;
4205 }
4206
4207 /* The "standard" definition of va_list is void*. */
4208
4209 tree
4210 std_build_builtin_va_list (void)
4211 {
4212 return ptr_type_node;
4213 }
4214
4215 /* The "standard" abi va_list is va_list_type_node. */
4216
4217 tree
4218 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4219 {
4220 return va_list_type_node;
4221 }
4222
4223 /* The "standard" type of va_list is va_list_type_node. */
4224
4225 tree
4226 std_canonical_va_list_type (tree type)
4227 {
4228 tree wtype, htype;
4229
4230 if (INDIRECT_REF_P (type))
4231 type = TREE_TYPE (type);
4232 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4233 type = TREE_TYPE (type);
4234 wtype = va_list_type_node;
4235 htype = type;
4236 /* Treat structure va_list types. */
4237 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4238 htype = TREE_TYPE (htype);
4239 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4240 {
4241 /* If va_list is an array type, the argument may have decayed
4242 to a pointer type, e.g. by being passed to another function.
4243 In that case, unwrap both types so that we can compare the
4244 underlying records. */
4245 if (TREE_CODE (htype) == ARRAY_TYPE
4246 || POINTER_TYPE_P (htype))
4247 {
4248 wtype = TREE_TYPE (wtype);
4249 htype = TREE_TYPE (htype);
4250 }
4251 }
4252 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4253 return va_list_type_node;
4254
4255 return NULL_TREE;
4256 }
4257
4258 /* The "standard" implementation of va_start: just assign `nextarg' to
4259 the variable. */
4260
4261 void
4262 std_expand_builtin_va_start (tree valist, rtx nextarg)
4263 {
4264 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4265 convert_move (va_r, nextarg, 0);
4266 }
4267
4268 /* Expand EXP, a call to __builtin_va_start. */
4269
4270 static rtx
4271 expand_builtin_va_start (tree exp)
4272 {
4273 rtx nextarg;
4274 tree valist;
4275 location_t loc = EXPR_LOCATION (exp);
4276
4277 if (call_expr_nargs (exp) < 2)
4278 {
4279 error_at (loc, "too few arguments to function %<va_start%>");
4280 return const0_rtx;
4281 }
4282
4283 if (fold_builtin_next_arg (exp, true))
4284 return const0_rtx;
4285
4286 nextarg = expand_builtin_next_arg ();
4287 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4288
4289 if (targetm.expand_builtin_va_start)
4290 targetm.expand_builtin_va_start (valist, nextarg);
4291 else
4292 std_expand_builtin_va_start (valist, nextarg);
4293
4294 return const0_rtx;
4295 }
4296
4297 /* The "standard" implementation of va_arg: read the value from the
4298 current (padded) address and increment by the (padded) size. */
4299
4300 tree
4301 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4302 gimple_seq *post_p)
4303 {
4304 tree addr, t, type_size, rounded_size, valist_tmp;
4305 unsigned HOST_WIDE_INT align, boundary;
4306 bool indirect;
4307
4308 #ifdef ARGS_GROW_DOWNWARD
4309 /* All of the alignment and movement below is for args-grow-up machines.
4310 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4311 implement their own specialized gimplify_va_arg_expr routines. */
4312 gcc_unreachable ();
4313 #endif
4314
4315 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4316 if (indirect)
4317 type = build_pointer_type (type);
4318
4319 align = PARM_BOUNDARY / BITS_PER_UNIT;
4320 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
4321
4322 /* When we align parameter on stack for caller, if the parameter
4323 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4324 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4325 here with caller. */
4326 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4327 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4328
4329 boundary /= BITS_PER_UNIT;
4330
4331 /* Hoist the valist value into a temporary for the moment. */
4332 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4333
4334 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4335 requires greater alignment, we must perform dynamic alignment. */
4336 if (boundary > align
4337 && !integer_zerop (TYPE_SIZE (type)))
4338 {
4339 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4340 fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
4341 gimplify_and_add (t, pre_p);
4342
4343 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4344 fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist),
4345 valist_tmp,
4346 build_int_cst (TREE_TYPE (valist), -boundary)));
4347 gimplify_and_add (t, pre_p);
4348 }
4349 else
4350 boundary = align;
4351
4352 /* If the actual alignment is less than the alignment of the type,
4353 adjust the type accordingly so that we don't assume strict alignment
4354 when dereferencing the pointer. */
4355 boundary *= BITS_PER_UNIT;
4356 if (boundary < TYPE_ALIGN (type))
4357 {
4358 type = build_variant_type_copy (type);
4359 TYPE_ALIGN (type) = boundary;
4360 }
4361
4362 /* Compute the rounded size of the type. */
4363 type_size = size_in_bytes (type);
4364 rounded_size = round_up (type_size, align);
4365
4366 /* Reduce rounded_size so it's sharable with the postqueue. */
4367 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4368
4369 /* Get AP. */
4370 addr = valist_tmp;
4371 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4372 {
4373 /* Small args are padded downward. */
4374 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4375 rounded_size, size_int (align));
4376 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4377 size_binop (MINUS_EXPR, rounded_size, type_size));
4378 addr = fold_build_pointer_plus (addr, t);
4379 }
4380
4381 /* Compute new value for AP. */
4382 t = fold_build_pointer_plus (valist_tmp, rounded_size);
4383 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4384 gimplify_and_add (t, pre_p);
4385
4386 addr = fold_convert (build_pointer_type (type), addr);
4387
4388 if (indirect)
4389 addr = build_va_arg_indirect_ref (addr);
4390
4391 return build_va_arg_indirect_ref (addr);
4392 }
4393
4394 /* Build an indirect-ref expression over the given TREE, which represents a
4395 piece of a va_arg() expansion. */
4396 tree
4397 build_va_arg_indirect_ref (tree addr)
4398 {
4399 addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
4400
4401 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4402 mf_mark (addr);
4403
4404 return addr;
4405 }
4406
4407 /* Return a dummy expression of type TYPE in order to keep going after an
4408 error. */
4409
4410 static tree
4411 dummy_object (tree type)
4412 {
4413 tree t = build_int_cst (build_pointer_type (type), 0);
4414 return build2 (MEM_REF, type, t, t);
4415 }
4416
4417 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4418 builtin function, but a very special sort of operator. */
4419
4420 enum gimplify_status
4421 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4422 {
4423 tree promoted_type, have_va_type;
4424 tree valist = TREE_OPERAND (*expr_p, 0);
4425 tree type = TREE_TYPE (*expr_p);
4426 tree t;
4427 location_t loc = EXPR_LOCATION (*expr_p);
4428
4429 /* Verify that valist is of the proper type. */
4430 have_va_type = TREE_TYPE (valist);
4431 if (have_va_type == error_mark_node)
4432 return GS_ERROR;
4433 have_va_type = targetm.canonical_va_list_type (have_va_type);
4434
4435 if (have_va_type == NULL_TREE)
4436 {
4437 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4438 return GS_ERROR;
4439 }
4440
4441 /* Generate a diagnostic for requesting data of a type that cannot
4442 be passed through `...' due to type promotion at the call site. */
4443 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4444 != type)
4445 {
4446 static bool gave_help;
4447 bool warned;
4448
4449 /* Unfortunately, this is merely undefined, rather than a constraint
4450 violation, so we cannot make this an error. If this call is never
4451 executed, the program is still strictly conforming. */
4452 warned = warning_at (loc, 0,
4453 "%qT is promoted to %qT when passed through %<...%>",
4454 type, promoted_type);
4455 if (!gave_help && warned)
4456 {
4457 gave_help = true;
4458 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4459 promoted_type, type);
4460 }
4461
4462 /* We can, however, treat "undefined" any way we please.
4463 Call abort to encourage the user to fix the program. */
4464 if (warned)
4465 inform (loc, "if this code is reached, the program will abort");
4466 /* Before the abort, allow the evaluation of the va_list
4467 expression to exit or longjmp. */
4468 gimplify_and_add (valist, pre_p);
4469 t = build_call_expr_loc (loc,
4470 builtin_decl_implicit (BUILT_IN_TRAP), 0);
4471 gimplify_and_add (t, pre_p);
4472
4473 /* This is dead code, but go ahead and finish so that the
4474 mode of the result comes out right. */
4475 *expr_p = dummy_object (type);
4476 return GS_ALL_DONE;
4477 }
4478 else
4479 {
4480 /* Make it easier for the backends by protecting the valist argument
4481 from multiple evaluations. */
4482 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4483 {
4484 /* For this case, the backends will be expecting a pointer to
4485 TREE_TYPE (abi), but it's possible we've
4486 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4487 So fix it. */
4488 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4489 {
4490 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4491 valist = fold_convert_loc (loc, p1,
4492 build_fold_addr_expr_loc (loc, valist));
4493 }
4494
4495 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4496 }
4497 else
4498 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4499
4500 if (!targetm.gimplify_va_arg_expr)
4501 /* FIXME: Once most targets are converted we should merely
4502 assert this is non-null. */
4503 return GS_ALL_DONE;
4504
4505 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4506 return GS_OK;
4507 }
4508 }
4509
4510 /* Expand EXP, a call to __builtin_va_end. */
4511
4512 static rtx
4513 expand_builtin_va_end (tree exp)
4514 {
4515 tree valist = CALL_EXPR_ARG (exp, 0);
4516
4517 /* Evaluate for side effects, if needed. I hate macros that don't
4518 do that. */
4519 if (TREE_SIDE_EFFECTS (valist))
4520 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4521
4522 return const0_rtx;
4523 }
4524
4525 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4526 builtin rather than just as an assignment in stdarg.h because of the
4527 nastiness of array-type va_list types. */
4528
4529 static rtx
4530 expand_builtin_va_copy (tree exp)
4531 {
4532 tree dst, src, t;
4533 location_t loc = EXPR_LOCATION (exp);
4534
4535 dst = CALL_EXPR_ARG (exp, 0);
4536 src = CALL_EXPR_ARG (exp, 1);
4537
4538 dst = stabilize_va_list_loc (loc, dst, 1);
4539 src = stabilize_va_list_loc (loc, src, 0);
4540
4541 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4542
4543 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4544 {
4545 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4546 TREE_SIDE_EFFECTS (t) = 1;
4547 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4548 }
4549 else
4550 {
4551 rtx dstb, srcb, size;
4552
4553 /* Evaluate to pointers. */
4554 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4555 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4556 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4557 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4558
4559 dstb = convert_memory_address (Pmode, dstb);
4560 srcb = convert_memory_address (Pmode, srcb);
4561
4562 /* "Dereference" to BLKmode memories. */
4563 dstb = gen_rtx_MEM (BLKmode, dstb);
4564 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4565 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4566 srcb = gen_rtx_MEM (BLKmode, srcb);
4567 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4568 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4569
4570 /* Copy. */
4571 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4572 }
4573
4574 return const0_rtx;
4575 }
4576
4577 /* Expand a call to one of the builtin functions __builtin_frame_address or
4578 __builtin_return_address. */
4579
4580 static rtx
4581 expand_builtin_frame_address (tree fndecl, tree exp)
4582 {
4583 /* The argument must be a nonnegative integer constant.
4584 It counts the number of frames to scan up the stack.
4585 The value is the return address saved in that frame. */
4586 if (call_expr_nargs (exp) == 0)
4587 /* Warning about missing arg was already issued. */
4588 return const0_rtx;
4589 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4590 {
4591 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4592 error ("invalid argument to %<__builtin_frame_address%>");
4593 else
4594 error ("invalid argument to %<__builtin_return_address%>");
4595 return const0_rtx;
4596 }
4597 else
4598 {
4599 rtx tem
4600 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4601 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4602
4603 /* Some ports cannot access arbitrary stack frames. */
4604 if (tem == NULL)
4605 {
4606 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4607 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4608 else
4609 warning (0, "unsupported argument to %<__builtin_return_address%>");
4610 return const0_rtx;
4611 }
4612
4613 /* For __builtin_frame_address, return what we've got. */
4614 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4615 return tem;
4616
4617 if (!REG_P (tem)
4618 && ! CONSTANT_P (tem))
4619 tem = copy_addr_to_reg (tem);
4620 return tem;
4621 }
4622 }
4623
4624 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4625 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4626 is the same as for allocate_dynamic_stack_space. */
4627
4628 static rtx
4629 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4630 {
4631 rtx op0;
4632 rtx result;
4633 bool valid_arglist;
4634 unsigned int align;
4635 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4636 == BUILT_IN_ALLOCA_WITH_ALIGN);
4637
4638 /* Emit normal call if we use mudflap. */
4639 if (flag_mudflap)
4640 return NULL_RTX;
4641
4642 valid_arglist
4643 = (alloca_with_align
4644 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4645 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4646
4647 if (!valid_arglist)
4648 return NULL_RTX;
4649
4650 /* Compute the argument. */
4651 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4652
4653 /* Compute the alignment. */
4654 align = (alloca_with_align
4655 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4656 : BIGGEST_ALIGNMENT);
4657
4658 /* Allocate the desired space. */
4659 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4660 result = convert_memory_address (ptr_mode, result);
4661
4662 return result;
4663 }
4664
4665 /* Expand a call to bswap builtin in EXP.
4666 Return NULL_RTX if a normal call should be emitted rather than expanding the
4667 function in-line. If convenient, the result should be placed in TARGET.
4668 SUBTARGET may be used as the target for computing one of EXP's operands. */
4669
4670 static rtx
4671 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4672 rtx subtarget)
4673 {
4674 tree arg;
4675 rtx op0;
4676
4677 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4678 return NULL_RTX;
4679
4680 arg = CALL_EXPR_ARG (exp, 0);
4681 op0 = expand_expr (arg,
4682 subtarget && GET_MODE (subtarget) == target_mode
4683 ? subtarget : NULL_RTX,
4684 target_mode, EXPAND_NORMAL);
4685 if (GET_MODE (op0) != target_mode)
4686 op0 = convert_to_mode (target_mode, op0, 1);
4687
4688 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4689
4690 gcc_assert (target);
4691
4692 return convert_to_mode (target_mode, target, 1);
4693 }
4694
4695 /* Expand a call to a unary builtin in EXP.
4696 Return NULL_RTX if a normal call should be emitted rather than expanding the
4697 function in-line. If convenient, the result should be placed in TARGET.
4698 SUBTARGET may be used as the target for computing one of EXP's operands. */
4699
4700 static rtx
4701 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4702 rtx subtarget, optab op_optab)
4703 {
4704 rtx op0;
4705
4706 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4707 return NULL_RTX;
4708
4709 /* Compute the argument. */
4710 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4711 (subtarget
4712 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4713 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4714 VOIDmode, EXPAND_NORMAL);
4715 /* Compute op, into TARGET if possible.
4716 Set TARGET to wherever the result comes back. */
4717 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4718 op_optab, op0, target, op_optab != clrsb_optab);
4719 gcc_assert (target);
4720
4721 return convert_to_mode (target_mode, target, 0);
4722 }
4723
4724 /* Expand a call to __builtin_expect. We just return our argument
4725 as the builtin_expect semantic should've been already executed by
4726 tree branch prediction pass. */
4727
4728 static rtx
4729 expand_builtin_expect (tree exp, rtx target)
4730 {
4731 tree arg;
4732
4733 if (call_expr_nargs (exp) < 2)
4734 return const0_rtx;
4735 arg = CALL_EXPR_ARG (exp, 0);
4736
4737 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4738 /* When guessing was done, the hints should be already stripped away. */
4739 gcc_assert (!flag_guess_branch_prob
4740 || optimize == 0 || seen_error ());
4741 return target;
4742 }
4743
4744 /* Expand a call to __builtin_assume_aligned. We just return our first
4745 argument as the builtin_assume_aligned semantic should've been already
4746 executed by CCP. */
4747
4748 static rtx
4749 expand_builtin_assume_aligned (tree exp, rtx target)
4750 {
4751 if (call_expr_nargs (exp) < 2)
4752 return const0_rtx;
4753 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4754 EXPAND_NORMAL);
4755 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4756 && (call_expr_nargs (exp) < 3
4757 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4758 return target;
4759 }
4760
4761 void
4762 expand_builtin_trap (void)
4763 {
4764 #ifdef HAVE_trap
4765 if (HAVE_trap)
4766 emit_insn (gen_trap ());
4767 else
4768 #endif
4769 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4770 emit_barrier ();
4771 }
4772
4773 /* Expand a call to __builtin_unreachable. We do nothing except emit
4774 a barrier saying that control flow will not pass here.
4775
4776 It is the responsibility of the program being compiled to ensure
4777 that control flow does never reach __builtin_unreachable. */
4778 static void
4779 expand_builtin_unreachable (void)
4780 {
4781 emit_barrier ();
4782 }
4783
4784 /* Expand EXP, a call to fabs, fabsf or fabsl.
4785 Return NULL_RTX if a normal call should be emitted rather than expanding
4786 the function inline. If convenient, the result should be placed
4787 in TARGET. SUBTARGET may be used as the target for computing
4788 the operand. */
4789
4790 static rtx
4791 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4792 {
4793 enum machine_mode mode;
4794 tree arg;
4795 rtx op0;
4796
4797 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4798 return NULL_RTX;
4799
4800 arg = CALL_EXPR_ARG (exp, 0);
4801 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4802 mode = TYPE_MODE (TREE_TYPE (arg));
4803 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4804 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4805 }
4806
4807 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4808 Return NULL is a normal call should be emitted rather than expanding the
4809 function inline. If convenient, the result should be placed in TARGET.
4810 SUBTARGET may be used as the target for computing the operand. */
4811
4812 static rtx
4813 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4814 {
4815 rtx op0, op1;
4816 tree arg;
4817
4818 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4819 return NULL_RTX;
4820
4821 arg = CALL_EXPR_ARG (exp, 0);
4822 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4823
4824 arg = CALL_EXPR_ARG (exp, 1);
4825 op1 = expand_normal (arg);
4826
4827 return expand_copysign (op0, op1, target);
4828 }
4829
4830 /* Create a new constant string literal and return a char* pointer to it.
4831 The STRING_CST value is the LEN characters at STR. */
4832 tree
4833 build_string_literal (int len, const char *str)
4834 {
4835 tree t, elem, index, type;
4836
4837 t = build_string (len, str);
4838 elem = build_type_variant (char_type_node, 1, 0);
4839 index = build_index_type (size_int (len - 1));
4840 type = build_array_type (elem, index);
4841 TREE_TYPE (t) = type;
4842 TREE_CONSTANT (t) = 1;
4843 TREE_READONLY (t) = 1;
4844 TREE_STATIC (t) = 1;
4845
4846 type = build_pointer_type (elem);
4847 t = build1 (ADDR_EXPR, type,
4848 build4 (ARRAY_REF, elem,
4849 t, integer_zero_node, NULL_TREE, NULL_TREE));
4850 return t;
4851 }
4852
4853 /* Expand a call to __builtin___clear_cache. */
4854
4855 static rtx
4856 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4857 {
4858 #ifndef HAVE_clear_cache
4859 #ifdef CLEAR_INSN_CACHE
4860 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4861 does something. Just do the default expansion to a call to
4862 __clear_cache(). */
4863 return NULL_RTX;
4864 #else
4865 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4866 does nothing. There is no need to call it. Do nothing. */
4867 return const0_rtx;
4868 #endif /* CLEAR_INSN_CACHE */
4869 #else
4870 /* We have a "clear_cache" insn, and it will handle everything. */
4871 tree begin, end;
4872 rtx begin_rtx, end_rtx;
4873
4874 /* We must not expand to a library call. If we did, any
4875 fallback library function in libgcc that might contain a call to
4876 __builtin___clear_cache() would recurse infinitely. */
4877 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4878 {
4879 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4880 return const0_rtx;
4881 }
4882
4883 if (HAVE_clear_cache)
4884 {
4885 struct expand_operand ops[2];
4886
4887 begin = CALL_EXPR_ARG (exp, 0);
4888 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4889
4890 end = CALL_EXPR_ARG (exp, 1);
4891 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4892
4893 create_address_operand (&ops[0], begin_rtx);
4894 create_address_operand (&ops[1], end_rtx);
4895 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4896 return const0_rtx;
4897 }
4898 return const0_rtx;
4899 #endif /* HAVE_clear_cache */
4900 }
4901
4902 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4903
4904 static rtx
4905 round_trampoline_addr (rtx tramp)
4906 {
4907 rtx temp, addend, mask;
4908
4909 /* If we don't need too much alignment, we'll have been guaranteed
4910 proper alignment by get_trampoline_type. */
4911 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4912 return tramp;
4913
4914 /* Round address up to desired boundary. */
4915 temp = gen_reg_rtx (Pmode);
4916 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
4917 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
4918
4919 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4920 temp, 0, OPTAB_LIB_WIDEN);
4921 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4922 temp, 0, OPTAB_LIB_WIDEN);
4923
4924 return tramp;
4925 }
4926
4927 static rtx
4928 expand_builtin_init_trampoline (tree exp, bool onstack)
4929 {
4930 tree t_tramp, t_func, t_chain;
4931 rtx m_tramp, r_tramp, r_chain, tmp;
4932
4933 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4934 POINTER_TYPE, VOID_TYPE))
4935 return NULL_RTX;
4936
4937 t_tramp = CALL_EXPR_ARG (exp, 0);
4938 t_func = CALL_EXPR_ARG (exp, 1);
4939 t_chain = CALL_EXPR_ARG (exp, 2);
4940
4941 r_tramp = expand_normal (t_tramp);
4942 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4943 MEM_NOTRAP_P (m_tramp) = 1;
4944
4945 /* If ONSTACK, the TRAMP argument should be the address of a field
4946 within the local function's FRAME decl. Either way, let's see if
4947 we can fill in the MEM_ATTRs for this memory. */
4948 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4949 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
4950 true, 0);
4951
4952 /* Creator of a heap trampoline is responsible for making sure the
4953 address is aligned to at least STACK_BOUNDARY. Normally malloc
4954 will ensure this anyhow. */
4955 tmp = round_trampoline_addr (r_tramp);
4956 if (tmp != r_tramp)
4957 {
4958 m_tramp = change_address (m_tramp, BLKmode, tmp);
4959 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4960 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4961 }
4962
4963 /* The FUNC argument should be the address of the nested function.
4964 Extract the actual function decl to pass to the hook. */
4965 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4966 t_func = TREE_OPERAND (t_func, 0);
4967 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4968
4969 r_chain = expand_normal (t_chain);
4970
4971 /* Generate insns to initialize the trampoline. */
4972 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4973
4974 if (onstack)
4975 {
4976 trampolines_created = 1;
4977
4978 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4979 "trampoline generated for nested function %qD", t_func);
4980 }
4981
4982 return const0_rtx;
4983 }
4984
4985 static rtx
4986 expand_builtin_adjust_trampoline (tree exp)
4987 {
4988 rtx tramp;
4989
4990 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4991 return NULL_RTX;
4992
4993 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4994 tramp = round_trampoline_addr (tramp);
4995 if (targetm.calls.trampoline_adjust_address)
4996 tramp = targetm.calls.trampoline_adjust_address (tramp);
4997
4998 return tramp;
4999 }
5000
5001 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5002 function. The function first checks whether the back end provides
5003 an insn to implement signbit for the respective mode. If not, it
5004 checks whether the floating point format of the value is such that
5005 the sign bit can be extracted. If that is not the case, the
5006 function returns NULL_RTX to indicate that a normal call should be
5007 emitted rather than expanding the function in-line. EXP is the
5008 expression that is a call to the builtin function; if convenient,
5009 the result should be placed in TARGET. */
5010 static rtx
5011 expand_builtin_signbit (tree exp, rtx target)
5012 {
5013 const struct real_format *fmt;
5014 enum machine_mode fmode, imode, rmode;
5015 tree arg;
5016 int word, bitpos;
5017 enum insn_code icode;
5018 rtx temp;
5019 location_t loc = EXPR_LOCATION (exp);
5020
5021 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5022 return NULL_RTX;
5023
5024 arg = CALL_EXPR_ARG (exp, 0);
5025 fmode = TYPE_MODE (TREE_TYPE (arg));
5026 rmode = TYPE_MODE (TREE_TYPE (exp));
5027 fmt = REAL_MODE_FORMAT (fmode);
5028
5029 arg = builtin_save_expr (arg);
5030
5031 /* Expand the argument yielding a RTX expression. */
5032 temp = expand_normal (arg);
5033
5034 /* Check if the back end provides an insn that handles signbit for the
5035 argument's mode. */
5036 icode = optab_handler (signbit_optab, fmode);
5037 if (icode != CODE_FOR_nothing)
5038 {
5039 rtx last = get_last_insn ();
5040 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5041 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5042 return target;
5043 delete_insns_since (last);
5044 }
5045
5046 /* For floating point formats without a sign bit, implement signbit
5047 as "ARG < 0.0". */
5048 bitpos = fmt->signbit_ro;
5049 if (bitpos < 0)
5050 {
5051 /* But we can't do this if the format supports signed zero. */
5052 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5053 return NULL_RTX;
5054
5055 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5056 build_real (TREE_TYPE (arg), dconst0));
5057 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5058 }
5059
5060 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5061 {
5062 imode = int_mode_for_mode (fmode);
5063 if (imode == BLKmode)
5064 return NULL_RTX;
5065 temp = gen_lowpart (imode, temp);
5066 }
5067 else
5068 {
5069 imode = word_mode;
5070 /* Handle targets with different FP word orders. */
5071 if (FLOAT_WORDS_BIG_ENDIAN)
5072 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5073 else
5074 word = bitpos / BITS_PER_WORD;
5075 temp = operand_subword_force (temp, word, fmode);
5076 bitpos = bitpos % BITS_PER_WORD;
5077 }
5078
5079 /* Force the intermediate word_mode (or narrower) result into a
5080 register. This avoids attempting to create paradoxical SUBREGs
5081 of floating point modes below. */
5082 temp = force_reg (imode, temp);
5083
5084 /* If the bitpos is within the "result mode" lowpart, the operation
5085 can be implement with a single bitwise AND. Otherwise, we need
5086 a right shift and an AND. */
5087
5088 if (bitpos < GET_MODE_BITSIZE (rmode))
5089 {
5090 double_int mask = double_int_setbit (double_int_zero, bitpos);
5091
5092 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5093 temp = gen_lowpart (rmode, temp);
5094 temp = expand_binop (rmode, and_optab, temp,
5095 immed_double_int_const (mask, rmode),
5096 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5097 }
5098 else
5099 {
5100 /* Perform a logical right shift to place the signbit in the least
5101 significant bit, then truncate the result to the desired mode
5102 and mask just this bit. */
5103 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5104 temp = gen_lowpart (rmode, temp);
5105 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5106 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5107 }
5108
5109 return temp;
5110 }
5111
5112 /* Expand fork or exec calls. TARGET is the desired target of the
5113 call. EXP is the call. FN is the
5114 identificator of the actual function. IGNORE is nonzero if the
5115 value is to be ignored. */
5116
5117 static rtx
5118 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5119 {
5120 tree id, decl;
5121 tree call;
5122
5123 /* If we are not profiling, just call the function. */
5124 if (!profile_arc_flag)
5125 return NULL_RTX;
5126
5127 /* Otherwise call the wrapper. This should be equivalent for the rest of
5128 compiler, so the code does not diverge, and the wrapper may run the
5129 code necessary for keeping the profiling sane. */
5130
5131 switch (DECL_FUNCTION_CODE (fn))
5132 {
5133 case BUILT_IN_FORK:
5134 id = get_identifier ("__gcov_fork");
5135 break;
5136
5137 case BUILT_IN_EXECL:
5138 id = get_identifier ("__gcov_execl");
5139 break;
5140
5141 case BUILT_IN_EXECV:
5142 id = get_identifier ("__gcov_execv");
5143 break;
5144
5145 case BUILT_IN_EXECLP:
5146 id = get_identifier ("__gcov_execlp");
5147 break;
5148
5149 case BUILT_IN_EXECLE:
5150 id = get_identifier ("__gcov_execle");
5151 break;
5152
5153 case BUILT_IN_EXECVP:
5154 id = get_identifier ("__gcov_execvp");
5155 break;
5156
5157 case BUILT_IN_EXECVE:
5158 id = get_identifier ("__gcov_execve");
5159 break;
5160
5161 default:
5162 gcc_unreachable ();
5163 }
5164
5165 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5166 FUNCTION_DECL, id, TREE_TYPE (fn));
5167 DECL_EXTERNAL (decl) = 1;
5168 TREE_PUBLIC (decl) = 1;
5169 DECL_ARTIFICIAL (decl) = 1;
5170 TREE_NOTHROW (decl) = 1;
5171 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5172 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5173 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5174 return expand_call (call, target, ignore);
5175 }
5176
5177
5178 \f
5179 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5180 the pointer in these functions is void*, the tree optimizers may remove
5181 casts. The mode computed in expand_builtin isn't reliable either, due
5182 to __sync_bool_compare_and_swap.
5183
5184 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5185 group of builtins. This gives us log2 of the mode size. */
5186
5187 static inline enum machine_mode
5188 get_builtin_sync_mode (int fcode_diff)
5189 {
5190 /* The size is not negotiable, so ask not to get BLKmode in return
5191 if the target indicates that a smaller size would be better. */
5192 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5193 }
5194
5195 /* Expand the memory expression LOC and return the appropriate memory operand
5196 for the builtin_sync operations. */
5197
5198 static rtx
5199 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5200 {
5201 rtx addr, mem;
5202
5203 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5204 addr = convert_memory_address (Pmode, addr);
5205
5206 /* Note that we explicitly do not want any alias information for this
5207 memory, so that we kill all other live memories. Otherwise we don't
5208 satisfy the full barrier semantics of the intrinsic. */
5209 mem = validize_mem (gen_rtx_MEM (mode, addr));
5210
5211 /* The alignment needs to be at least according to that of the mode. */
5212 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5213 get_pointer_alignment (loc)));
5214 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5215 MEM_VOLATILE_P (mem) = 1;
5216
5217 return mem;
5218 }
5219
5220 /* Make sure an argument is in the right mode.
5221 EXP is the tree argument.
5222 MODE is the mode it should be in. */
5223
5224 static rtx
5225 expand_expr_force_mode (tree exp, enum machine_mode mode)
5226 {
5227 rtx val;
5228 enum machine_mode old_mode;
5229
5230 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5231 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5232 of CONST_INTs, where we know the old_mode only from the call argument. */
5233
5234 old_mode = GET_MODE (val);
5235 if (old_mode == VOIDmode)
5236 old_mode = TYPE_MODE (TREE_TYPE (exp));
5237 val = convert_modes (mode, old_mode, val, 1);
5238 return val;
5239 }
5240
5241
5242 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5243 EXP is the CALL_EXPR. CODE is the rtx code
5244 that corresponds to the arithmetic or logical operation from the name;
5245 an exception here is that NOT actually means NAND. TARGET is an optional
5246 place for us to store the results; AFTER is true if this is the
5247 fetch_and_xxx form. */
5248
5249 static rtx
5250 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5251 enum rtx_code code, bool after,
5252 rtx target)
5253 {
5254 rtx val, mem;
5255 location_t loc = EXPR_LOCATION (exp);
5256
5257 if (code == NOT && warn_sync_nand)
5258 {
5259 tree fndecl = get_callee_fndecl (exp);
5260 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5261
5262 static bool warned_f_a_n, warned_n_a_f;
5263
5264 switch (fcode)
5265 {
5266 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5267 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5268 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5269 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5270 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5271 if (warned_f_a_n)
5272 break;
5273
5274 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5275 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5276 warned_f_a_n = true;
5277 break;
5278
5279 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5280 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5281 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5282 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5283 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5284 if (warned_n_a_f)
5285 break;
5286
5287 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5288 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5289 warned_n_a_f = true;
5290 break;
5291
5292 default:
5293 gcc_unreachable ();
5294 }
5295 }
5296
5297 /* Expand the operands. */
5298 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5299 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5300
5301 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5302 after);
5303 }
5304
5305 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5306 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5307 true if this is the boolean form. TARGET is a place for us to store the
5308 results; this is NOT optional if IS_BOOL is true. */
5309
5310 static rtx
5311 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5312 bool is_bool, rtx target)
5313 {
5314 rtx old_val, new_val, mem;
5315 rtx *pbool, *poval;
5316
5317 /* Expand the operands. */
5318 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5319 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5320 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5321
5322 pbool = poval = NULL;
5323 if (target != const0_rtx)
5324 {
5325 if (is_bool)
5326 pbool = &target;
5327 else
5328 poval = &target;
5329 }
5330 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5331 false, MEMMODEL_SEQ_CST,
5332 MEMMODEL_SEQ_CST))
5333 return NULL_RTX;
5334
5335 return target;
5336 }
5337
5338 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5339 general form is actually an atomic exchange, and some targets only
5340 support a reduced form with the second argument being a constant 1.
5341 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5342 the results. */
5343
5344 static rtx
5345 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5346 rtx target)
5347 {
5348 rtx val, mem;
5349
5350 /* Expand the operands. */
5351 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5352 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5353
5354 return expand_sync_lock_test_and_set (target, mem, val);
5355 }
5356
5357 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5358
5359 static void
5360 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5361 {
5362 rtx mem;
5363
5364 /* Expand the operands. */
5365 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5366
5367 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5368 }
5369
5370 /* Given an integer representing an ``enum memmodel'', verify its
5371 correctness and return the memory model enum. */
5372
5373 static enum memmodel
5374 get_memmodel (tree exp)
5375 {
5376 rtx op;
5377 unsigned HOST_WIDE_INT val;
5378
5379 /* If the parameter is not a constant, it's a run time value so we'll just
5380 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5381 if (TREE_CODE (exp) != INTEGER_CST)
5382 return MEMMODEL_SEQ_CST;
5383
5384 op = expand_normal (exp);
5385
5386 val = INTVAL (op);
5387 if (targetm.memmodel_check)
5388 val = targetm.memmodel_check (val);
5389 else if (val & ~MEMMODEL_MASK)
5390 {
5391 warning (OPT_Winvalid_memory_model,
5392 "Unknown architecture specifier in memory model to builtin.");
5393 return MEMMODEL_SEQ_CST;
5394 }
5395
5396 if ((INTVAL(op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5397 {
5398 warning (OPT_Winvalid_memory_model,
5399 "invalid memory model argument to builtin");
5400 return MEMMODEL_SEQ_CST;
5401 }
5402
5403 return (enum memmodel) val;
5404 }
5405
5406 /* Expand the __atomic_exchange intrinsic:
5407 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5408 EXP is the CALL_EXPR.
5409 TARGET is an optional place for us to store the results. */
5410
5411 static rtx
5412 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5413 {
5414 rtx val, mem;
5415 enum memmodel model;
5416
5417 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5418 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5419 {
5420 error ("invalid memory model for %<__atomic_exchange%>");
5421 return NULL_RTX;
5422 }
5423
5424 if (!flag_inline_atomics)
5425 return NULL_RTX;
5426
5427 /* Expand the operands. */
5428 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5429 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5430
5431 return expand_atomic_exchange (target, mem, val, model);
5432 }
5433
5434 /* Expand the __atomic_compare_exchange intrinsic:
5435 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5436 TYPE desired, BOOL weak,
5437 enum memmodel success,
5438 enum memmodel failure)
5439 EXP is the CALL_EXPR.
5440 TARGET is an optional place for us to store the results. */
5441
5442 static rtx
5443 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5444 rtx target)
5445 {
5446 rtx expect, desired, mem, oldval;
5447 enum memmodel success, failure;
5448 tree weak;
5449 bool is_weak;
5450
5451 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5452 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5453
5454 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5455 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5456 {
5457 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5458 return NULL_RTX;
5459 }
5460
5461 if (failure > success)
5462 {
5463 error ("failure memory model cannot be stronger than success "
5464 "memory model for %<__atomic_compare_exchange%>");
5465 return NULL_RTX;
5466 }
5467
5468 if (!flag_inline_atomics)
5469 return NULL_RTX;
5470
5471 /* Expand the operands. */
5472 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5473
5474 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5475 expect = convert_memory_address (Pmode, expect);
5476 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5477
5478 weak = CALL_EXPR_ARG (exp, 3);
5479 is_weak = false;
5480 if (host_integerp (weak, 0) && tree_low_cst (weak, 0) != 0)
5481 is_weak = true;
5482
5483 oldval = copy_to_reg (gen_rtx_MEM (mode, expect));
5484
5485 if (!expand_atomic_compare_and_swap ((target == const0_rtx ? NULL : &target),
5486 &oldval, mem, oldval, desired,
5487 is_weak, success, failure))
5488 return NULL_RTX;
5489
5490 emit_move_insn (gen_rtx_MEM (mode, expect), oldval);
5491 return target;
5492 }
5493
5494 /* Expand the __atomic_load intrinsic:
5495 TYPE __atomic_load (TYPE *object, enum memmodel)
5496 EXP is the CALL_EXPR.
5497 TARGET is an optional place for us to store the results. */
5498
5499 static rtx
5500 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5501 {
5502 rtx mem;
5503 enum memmodel model;
5504
5505 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5506 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5507 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5508 {
5509 error ("invalid memory model for %<__atomic_load%>");
5510 return NULL_RTX;
5511 }
5512
5513 if (!flag_inline_atomics)
5514 return NULL_RTX;
5515
5516 /* Expand the operand. */
5517 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5518
5519 return expand_atomic_load (target, mem, model);
5520 }
5521
5522
5523 /* Expand the __atomic_store intrinsic:
5524 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5525 EXP is the CALL_EXPR.
5526 TARGET is an optional place for us to store the results. */
5527
5528 static rtx
5529 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5530 {
5531 rtx mem, val;
5532 enum memmodel model;
5533
5534 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5535 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5536 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5537 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5538 {
5539 error ("invalid memory model for %<__atomic_store%>");
5540 return NULL_RTX;
5541 }
5542
5543 if (!flag_inline_atomics)
5544 return NULL_RTX;
5545
5546 /* Expand the operands. */
5547 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5548 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5549
5550 return expand_atomic_store (mem, val, model, false);
5551 }
5552
5553 /* Expand the __atomic_fetch_XXX intrinsic:
5554 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5555 EXP is the CALL_EXPR.
5556 TARGET is an optional place for us to store the results.
5557 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5558 FETCH_AFTER is true if returning the result of the operation.
5559 FETCH_AFTER is false if returning the value before the operation.
5560 IGNORE is true if the result is not used.
5561 EXT_CALL is the correct builtin for an external call if this cannot be
5562 resolved to an instruction sequence. */
5563
5564 static rtx
5565 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5566 enum rtx_code code, bool fetch_after,
5567 bool ignore, enum built_in_function ext_call)
5568 {
5569 rtx val, mem, ret;
5570 enum memmodel model;
5571 tree fndecl;
5572 tree addr;
5573
5574 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5575
5576 /* Expand the operands. */
5577 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5578 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5579
5580 /* Only try generating instructions if inlining is turned on. */
5581 if (flag_inline_atomics)
5582 {
5583 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5584 if (ret)
5585 return ret;
5586 }
5587
5588 /* Return if a different routine isn't needed for the library call. */
5589 if (ext_call == BUILT_IN_NONE)
5590 return NULL_RTX;
5591
5592 /* Change the call to the specified function. */
5593 fndecl = get_callee_fndecl (exp);
5594 addr = CALL_EXPR_FN (exp);
5595 STRIP_NOPS (addr);
5596
5597 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5598 TREE_OPERAND (addr, 0) = builtin_decl_explicit(ext_call);
5599
5600 /* Expand the call here so we can emit trailing code. */
5601 ret = expand_call (exp, target, ignore);
5602
5603 /* Replace the original function just in case it matters. */
5604 TREE_OPERAND (addr, 0) = fndecl;
5605
5606 /* Then issue the arithmetic correction to return the right result. */
5607 if (!ignore)
5608 {
5609 if (code == NOT)
5610 {
5611 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5612 OPTAB_LIB_WIDEN);
5613 ret = expand_simple_unop (mode, NOT, ret, target, true);
5614 }
5615 else
5616 ret = expand_simple_binop (mode, code, ret, val, target, true,
5617 OPTAB_LIB_WIDEN);
5618 }
5619 return ret;
5620 }
5621
5622
5623 #ifndef HAVE_atomic_clear
5624 # define HAVE_atomic_clear 0
5625 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5626 #endif
5627
5628 /* Expand an atomic clear operation.
5629 void _atomic_clear (BOOL *obj, enum memmodel)
5630 EXP is the call expression. */
5631
5632 static rtx
5633 expand_builtin_atomic_clear (tree exp)
5634 {
5635 enum machine_mode mode;
5636 rtx mem, ret;
5637 enum memmodel model;
5638
5639 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5640 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5641 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5642
5643 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5644 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5645 {
5646 error ("invalid memory model for %<__atomic_store%>");
5647 return const0_rtx;
5648 }
5649
5650 if (HAVE_atomic_clear)
5651 {
5652 emit_insn (gen_atomic_clear (mem, model));
5653 return const0_rtx;
5654 }
5655
5656 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5657 Failing that, a store is issued by __atomic_store. The only way this can
5658 fail is if the bool type is larger than a word size. Unlikely, but
5659 handle it anyway for completeness. Assume a single threaded model since
5660 there is no atomic support in this case, and no barriers are required. */
5661 ret = expand_atomic_store (mem, const0_rtx, model, true);
5662 if (!ret)
5663 emit_move_insn (mem, const0_rtx);
5664 return const0_rtx;
5665 }
5666
5667 /* Expand an atomic test_and_set operation.
5668 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5669 EXP is the call expression. */
5670
5671 static rtx
5672 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5673 {
5674 rtx mem;
5675 enum memmodel model;
5676 enum machine_mode mode;
5677
5678 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5679 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5680 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5681
5682 return expand_atomic_test_and_set (target, mem, model);
5683 }
5684
5685
5686 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5687 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5688
5689 static tree
5690 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5691 {
5692 int size;
5693 enum machine_mode mode;
5694 unsigned int mode_align, type_align;
5695
5696 if (TREE_CODE (arg0) != INTEGER_CST)
5697 return NULL_TREE;
5698
5699 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5700 mode = mode_for_size (size, MODE_INT, 0);
5701 mode_align = GET_MODE_ALIGNMENT (mode);
5702
5703 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5704 type_align = mode_align;
5705 else
5706 {
5707 tree ttype = TREE_TYPE (arg1);
5708
5709 /* This function is usually invoked and folded immediately by the front
5710 end before anything else has a chance to look at it. The pointer
5711 parameter at this point is usually cast to a void *, so check for that
5712 and look past the cast. */
5713 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5714 && VOID_TYPE_P (TREE_TYPE (ttype)))
5715 arg1 = TREE_OPERAND (arg1, 0);
5716
5717 ttype = TREE_TYPE (arg1);
5718 gcc_assert (POINTER_TYPE_P (ttype));
5719
5720 /* Get the underlying type of the object. */
5721 ttype = TREE_TYPE (ttype);
5722 type_align = TYPE_ALIGN (ttype);
5723 }
5724
5725 /* If the object has smaller alignment, the the lock free routines cannot
5726 be used. */
5727 if (type_align < mode_align)
5728 return boolean_false_node;
5729
5730 /* Check if a compare_and_swap pattern exists for the mode which represents
5731 the required size. The pattern is not allowed to fail, so the existence
5732 of the pattern indicates support is present. */
5733 if (can_compare_and_swap_p (mode, true))
5734 return boolean_true_node;
5735 else
5736 return boolean_false_node;
5737 }
5738
5739 /* Return true if the parameters to call EXP represent an object which will
5740 always generate lock free instructions. The first argument represents the
5741 size of the object, and the second parameter is a pointer to the object
5742 itself. If NULL is passed for the object, then the result is based on
5743 typical alignment for an object of the specified size. Otherwise return
5744 false. */
5745
5746 static rtx
5747 expand_builtin_atomic_always_lock_free (tree exp)
5748 {
5749 tree size;
5750 tree arg0 = CALL_EXPR_ARG (exp, 0);
5751 tree arg1 = CALL_EXPR_ARG (exp, 1);
5752
5753 if (TREE_CODE (arg0) != INTEGER_CST)
5754 {
5755 error ("non-constant argument 1 to __atomic_always_lock_free");
5756 return const0_rtx;
5757 }
5758
5759 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5760 if (size == boolean_true_node)
5761 return const1_rtx;
5762 return const0_rtx;
5763 }
5764
5765 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5766 is lock free on this architecture. */
5767
5768 static tree
5769 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5770 {
5771 if (!flag_inline_atomics)
5772 return NULL_TREE;
5773
5774 /* If it isn't always lock free, don't generate a result. */
5775 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5776 return boolean_true_node;
5777
5778 return NULL_TREE;
5779 }
5780
5781 /* Return true if the parameters to call EXP represent an object which will
5782 always generate lock free instructions. The first argument represents the
5783 size of the object, and the second parameter is a pointer to the object
5784 itself. If NULL is passed for the object, then the result is based on
5785 typical alignment for an object of the specified size. Otherwise return
5786 NULL*/
5787
5788 static rtx
5789 expand_builtin_atomic_is_lock_free (tree exp)
5790 {
5791 tree size;
5792 tree arg0 = CALL_EXPR_ARG (exp, 0);
5793 tree arg1 = CALL_EXPR_ARG (exp, 1);
5794
5795 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5796 {
5797 error ("non-integer argument 1 to __atomic_is_lock_free");
5798 return NULL_RTX;
5799 }
5800
5801 if (!flag_inline_atomics)
5802 return NULL_RTX;
5803
5804 /* If the value is known at compile time, return the RTX for it. */
5805 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5806 if (size == boolean_true_node)
5807 return const1_rtx;
5808
5809 return NULL_RTX;
5810 }
5811
5812 /* Expand the __atomic_thread_fence intrinsic:
5813 void __atomic_thread_fence (enum memmodel)
5814 EXP is the CALL_EXPR. */
5815
5816 static void
5817 expand_builtin_atomic_thread_fence (tree exp)
5818 {
5819 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5820 expand_mem_thread_fence (model);
5821 }
5822
5823 /* Expand the __atomic_signal_fence intrinsic:
5824 void __atomic_signal_fence (enum memmodel)
5825 EXP is the CALL_EXPR. */
5826
5827 static void
5828 expand_builtin_atomic_signal_fence (tree exp)
5829 {
5830 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5831 expand_mem_signal_fence (model);
5832 }
5833
5834 /* Expand the __sync_synchronize intrinsic. */
5835
5836 static void
5837 expand_builtin_sync_synchronize (void)
5838 {
5839 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5840 }
5841
5842 \f
5843 /* Expand an expression EXP that calls a built-in function,
5844 with result going to TARGET if that's convenient
5845 (and in mode MODE if that's convenient).
5846 SUBTARGET may be used as the target for computing one of EXP's operands.
5847 IGNORE is nonzero if the value is to be ignored. */
5848
5849 rtx
5850 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5851 int ignore)
5852 {
5853 tree fndecl = get_callee_fndecl (exp);
5854 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5855 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5856 int flags;
5857
5858 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5859 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5860
5861 /* When not optimizing, generate calls to library functions for a certain
5862 set of builtins. */
5863 if (!optimize
5864 && !called_as_built_in (fndecl)
5865 && fcode != BUILT_IN_ALLOCA
5866 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5867 && fcode != BUILT_IN_FREE)
5868 return expand_call (exp, target, ignore);
5869
5870 /* The built-in function expanders test for target == const0_rtx
5871 to determine whether the function's result will be ignored. */
5872 if (ignore)
5873 target = const0_rtx;
5874
5875 /* If the result of a pure or const built-in function is ignored, and
5876 none of its arguments are volatile, we can avoid expanding the
5877 built-in call and just evaluate the arguments for side-effects. */
5878 if (target == const0_rtx
5879 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5880 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5881 {
5882 bool volatilep = false;
5883 tree arg;
5884 call_expr_arg_iterator iter;
5885
5886 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5887 if (TREE_THIS_VOLATILE (arg))
5888 {
5889 volatilep = true;
5890 break;
5891 }
5892
5893 if (! volatilep)
5894 {
5895 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5896 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5897 return const0_rtx;
5898 }
5899 }
5900
5901 switch (fcode)
5902 {
5903 CASE_FLT_FN (BUILT_IN_FABS):
5904 target = expand_builtin_fabs (exp, target, subtarget);
5905 if (target)
5906 return target;
5907 break;
5908
5909 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5910 target = expand_builtin_copysign (exp, target, subtarget);
5911 if (target)
5912 return target;
5913 break;
5914
5915 /* Just do a normal library call if we were unable to fold
5916 the values. */
5917 CASE_FLT_FN (BUILT_IN_CABS):
5918 break;
5919
5920 CASE_FLT_FN (BUILT_IN_EXP):
5921 CASE_FLT_FN (BUILT_IN_EXP10):
5922 CASE_FLT_FN (BUILT_IN_POW10):
5923 CASE_FLT_FN (BUILT_IN_EXP2):
5924 CASE_FLT_FN (BUILT_IN_EXPM1):
5925 CASE_FLT_FN (BUILT_IN_LOGB):
5926 CASE_FLT_FN (BUILT_IN_LOG):
5927 CASE_FLT_FN (BUILT_IN_LOG10):
5928 CASE_FLT_FN (BUILT_IN_LOG2):
5929 CASE_FLT_FN (BUILT_IN_LOG1P):
5930 CASE_FLT_FN (BUILT_IN_TAN):
5931 CASE_FLT_FN (BUILT_IN_ASIN):
5932 CASE_FLT_FN (BUILT_IN_ACOS):
5933 CASE_FLT_FN (BUILT_IN_ATAN):
5934 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5935 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5936 because of possible accuracy problems. */
5937 if (! flag_unsafe_math_optimizations)
5938 break;
5939 CASE_FLT_FN (BUILT_IN_SQRT):
5940 CASE_FLT_FN (BUILT_IN_FLOOR):
5941 CASE_FLT_FN (BUILT_IN_CEIL):
5942 CASE_FLT_FN (BUILT_IN_TRUNC):
5943 CASE_FLT_FN (BUILT_IN_ROUND):
5944 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5945 CASE_FLT_FN (BUILT_IN_RINT):
5946 target = expand_builtin_mathfn (exp, target, subtarget);
5947 if (target)
5948 return target;
5949 break;
5950
5951 CASE_FLT_FN (BUILT_IN_FMA):
5952 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5953 if (target)
5954 return target;
5955 break;
5956
5957 CASE_FLT_FN (BUILT_IN_ILOGB):
5958 if (! flag_unsafe_math_optimizations)
5959 break;
5960 CASE_FLT_FN (BUILT_IN_ISINF):
5961 CASE_FLT_FN (BUILT_IN_FINITE):
5962 case BUILT_IN_ISFINITE:
5963 case BUILT_IN_ISNORMAL:
5964 target = expand_builtin_interclass_mathfn (exp, target);
5965 if (target)
5966 return target;
5967 break;
5968
5969 CASE_FLT_FN (BUILT_IN_ICEIL):
5970 CASE_FLT_FN (BUILT_IN_LCEIL):
5971 CASE_FLT_FN (BUILT_IN_LLCEIL):
5972 CASE_FLT_FN (BUILT_IN_LFLOOR):
5973 CASE_FLT_FN (BUILT_IN_IFLOOR):
5974 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5975 target = expand_builtin_int_roundingfn (exp, target);
5976 if (target)
5977 return target;
5978 break;
5979
5980 CASE_FLT_FN (BUILT_IN_IRINT):
5981 CASE_FLT_FN (BUILT_IN_LRINT):
5982 CASE_FLT_FN (BUILT_IN_LLRINT):
5983 CASE_FLT_FN (BUILT_IN_IROUND):
5984 CASE_FLT_FN (BUILT_IN_LROUND):
5985 CASE_FLT_FN (BUILT_IN_LLROUND):
5986 target = expand_builtin_int_roundingfn_2 (exp, target);
5987 if (target)
5988 return target;
5989 break;
5990
5991 CASE_FLT_FN (BUILT_IN_POWI):
5992 target = expand_builtin_powi (exp, target);
5993 if (target)
5994 return target;
5995 break;
5996
5997 CASE_FLT_FN (BUILT_IN_ATAN2):
5998 CASE_FLT_FN (BUILT_IN_LDEXP):
5999 CASE_FLT_FN (BUILT_IN_SCALB):
6000 CASE_FLT_FN (BUILT_IN_SCALBN):
6001 CASE_FLT_FN (BUILT_IN_SCALBLN):
6002 if (! flag_unsafe_math_optimizations)
6003 break;
6004
6005 CASE_FLT_FN (BUILT_IN_FMOD):
6006 CASE_FLT_FN (BUILT_IN_REMAINDER):
6007 CASE_FLT_FN (BUILT_IN_DREM):
6008 CASE_FLT_FN (BUILT_IN_POW):
6009 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6010 if (target)
6011 return target;
6012 break;
6013
6014 CASE_FLT_FN (BUILT_IN_CEXPI):
6015 target = expand_builtin_cexpi (exp, target);
6016 gcc_assert (target);
6017 return target;
6018
6019 CASE_FLT_FN (BUILT_IN_SIN):
6020 CASE_FLT_FN (BUILT_IN_COS):
6021 if (! flag_unsafe_math_optimizations)
6022 break;
6023 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6024 if (target)
6025 return target;
6026 break;
6027
6028 CASE_FLT_FN (BUILT_IN_SINCOS):
6029 if (! flag_unsafe_math_optimizations)
6030 break;
6031 target = expand_builtin_sincos (exp);
6032 if (target)
6033 return target;
6034 break;
6035
6036 case BUILT_IN_APPLY_ARGS:
6037 return expand_builtin_apply_args ();
6038
6039 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6040 FUNCTION with a copy of the parameters described by
6041 ARGUMENTS, and ARGSIZE. It returns a block of memory
6042 allocated on the stack into which is stored all the registers
6043 that might possibly be used for returning the result of a
6044 function. ARGUMENTS is the value returned by
6045 __builtin_apply_args. ARGSIZE is the number of bytes of
6046 arguments that must be copied. ??? How should this value be
6047 computed? We'll also need a safe worst case value for varargs
6048 functions. */
6049 case BUILT_IN_APPLY:
6050 if (!validate_arglist (exp, POINTER_TYPE,
6051 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6052 && !validate_arglist (exp, REFERENCE_TYPE,
6053 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6054 return const0_rtx;
6055 else
6056 {
6057 rtx ops[3];
6058
6059 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6060 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6061 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6062
6063 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6064 }
6065
6066 /* __builtin_return (RESULT) causes the function to return the
6067 value described by RESULT. RESULT is address of the block of
6068 memory returned by __builtin_apply. */
6069 case BUILT_IN_RETURN:
6070 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6071 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6072 return const0_rtx;
6073
6074 case BUILT_IN_SAVEREGS:
6075 return expand_builtin_saveregs ();
6076
6077 case BUILT_IN_VA_ARG_PACK:
6078 /* All valid uses of __builtin_va_arg_pack () are removed during
6079 inlining. */
6080 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6081 return const0_rtx;
6082
6083 case BUILT_IN_VA_ARG_PACK_LEN:
6084 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6085 inlining. */
6086 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6087 return const0_rtx;
6088
6089 /* Return the address of the first anonymous stack arg. */
6090 case BUILT_IN_NEXT_ARG:
6091 if (fold_builtin_next_arg (exp, false))
6092 return const0_rtx;
6093 return expand_builtin_next_arg ();
6094
6095 case BUILT_IN_CLEAR_CACHE:
6096 target = expand_builtin___clear_cache (exp);
6097 if (target)
6098 return target;
6099 break;
6100
6101 case BUILT_IN_CLASSIFY_TYPE:
6102 return expand_builtin_classify_type (exp);
6103
6104 case BUILT_IN_CONSTANT_P:
6105 return const0_rtx;
6106
6107 case BUILT_IN_FRAME_ADDRESS:
6108 case BUILT_IN_RETURN_ADDRESS:
6109 return expand_builtin_frame_address (fndecl, exp);
6110
6111 /* Returns the address of the area where the structure is returned.
6112 0 otherwise. */
6113 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6114 if (call_expr_nargs (exp) != 0
6115 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6116 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6117 return const0_rtx;
6118 else
6119 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6120
6121 case BUILT_IN_ALLOCA:
6122 case BUILT_IN_ALLOCA_WITH_ALIGN:
6123 /* If the allocation stems from the declaration of a variable-sized
6124 object, it cannot accumulate. */
6125 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6126 if (target)
6127 return target;
6128 break;
6129
6130 case BUILT_IN_STACK_SAVE:
6131 return expand_stack_save ();
6132
6133 case BUILT_IN_STACK_RESTORE:
6134 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6135 return const0_rtx;
6136
6137 case BUILT_IN_BSWAP16:
6138 case BUILT_IN_BSWAP32:
6139 case BUILT_IN_BSWAP64:
6140 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6141 if (target)
6142 return target;
6143 break;
6144
6145 CASE_INT_FN (BUILT_IN_FFS):
6146 case BUILT_IN_FFSIMAX:
6147 target = expand_builtin_unop (target_mode, exp, target,
6148 subtarget, ffs_optab);
6149 if (target)
6150 return target;
6151 break;
6152
6153 CASE_INT_FN (BUILT_IN_CLZ):
6154 case BUILT_IN_CLZIMAX:
6155 target = expand_builtin_unop (target_mode, exp, target,
6156 subtarget, clz_optab);
6157 if (target)
6158 return target;
6159 break;
6160
6161 CASE_INT_FN (BUILT_IN_CTZ):
6162 case BUILT_IN_CTZIMAX:
6163 target = expand_builtin_unop (target_mode, exp, target,
6164 subtarget, ctz_optab);
6165 if (target)
6166 return target;
6167 break;
6168
6169 CASE_INT_FN (BUILT_IN_CLRSB):
6170 case BUILT_IN_CLRSBIMAX:
6171 target = expand_builtin_unop (target_mode, exp, target,
6172 subtarget, clrsb_optab);
6173 if (target)
6174 return target;
6175 break;
6176
6177 CASE_INT_FN (BUILT_IN_POPCOUNT):
6178 case BUILT_IN_POPCOUNTIMAX:
6179 target = expand_builtin_unop (target_mode, exp, target,
6180 subtarget, popcount_optab);
6181 if (target)
6182 return target;
6183 break;
6184
6185 CASE_INT_FN (BUILT_IN_PARITY):
6186 case BUILT_IN_PARITYIMAX:
6187 target = expand_builtin_unop (target_mode, exp, target,
6188 subtarget, parity_optab);
6189 if (target)
6190 return target;
6191 break;
6192
6193 case BUILT_IN_STRLEN:
6194 target = expand_builtin_strlen (exp, target, target_mode);
6195 if (target)
6196 return target;
6197 break;
6198
6199 case BUILT_IN_STRCPY:
6200 target = expand_builtin_strcpy (exp, target);
6201 if (target)
6202 return target;
6203 break;
6204
6205 case BUILT_IN_STRNCPY:
6206 target = expand_builtin_strncpy (exp, target);
6207 if (target)
6208 return target;
6209 break;
6210
6211 case BUILT_IN_STPCPY:
6212 target = expand_builtin_stpcpy (exp, target, mode);
6213 if (target)
6214 return target;
6215 break;
6216
6217 case BUILT_IN_MEMCPY:
6218 target = expand_builtin_memcpy (exp, target);
6219 if (target)
6220 return target;
6221 break;
6222
6223 case BUILT_IN_MEMPCPY:
6224 target = expand_builtin_mempcpy (exp, target, mode);
6225 if (target)
6226 return target;
6227 break;
6228
6229 case BUILT_IN_MEMSET:
6230 target = expand_builtin_memset (exp, target, mode);
6231 if (target)
6232 return target;
6233 break;
6234
6235 case BUILT_IN_BZERO:
6236 target = expand_builtin_bzero (exp);
6237 if (target)
6238 return target;
6239 break;
6240
6241 case BUILT_IN_STRCMP:
6242 target = expand_builtin_strcmp (exp, target);
6243 if (target)
6244 return target;
6245 break;
6246
6247 case BUILT_IN_STRNCMP:
6248 target = expand_builtin_strncmp (exp, target, mode);
6249 if (target)
6250 return target;
6251 break;
6252
6253 case BUILT_IN_BCMP:
6254 case BUILT_IN_MEMCMP:
6255 target = expand_builtin_memcmp (exp, target, mode);
6256 if (target)
6257 return target;
6258 break;
6259
6260 case BUILT_IN_SETJMP:
6261 /* This should have been lowered to the builtins below. */
6262 gcc_unreachable ();
6263
6264 case BUILT_IN_SETJMP_SETUP:
6265 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6266 and the receiver label. */
6267 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6268 {
6269 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6270 VOIDmode, EXPAND_NORMAL);
6271 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6272 rtx label_r = label_rtx (label);
6273
6274 /* This is copied from the handling of non-local gotos. */
6275 expand_builtin_setjmp_setup (buf_addr, label_r);
6276 nonlocal_goto_handler_labels
6277 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6278 nonlocal_goto_handler_labels);
6279 /* ??? Do not let expand_label treat us as such since we would
6280 not want to be both on the list of non-local labels and on
6281 the list of forced labels. */
6282 FORCED_LABEL (label) = 0;
6283 return const0_rtx;
6284 }
6285 break;
6286
6287 case BUILT_IN_SETJMP_DISPATCHER:
6288 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6289 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6290 {
6291 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6292 rtx label_r = label_rtx (label);
6293
6294 /* Remove the dispatcher label from the list of non-local labels
6295 since the receiver labels have been added to it above. */
6296 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6297 return const0_rtx;
6298 }
6299 break;
6300
6301 case BUILT_IN_SETJMP_RECEIVER:
6302 /* __builtin_setjmp_receiver is passed the receiver label. */
6303 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6304 {
6305 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6306 rtx label_r = label_rtx (label);
6307
6308 expand_builtin_setjmp_receiver (label_r);
6309 return const0_rtx;
6310 }
6311 break;
6312
6313 /* __builtin_longjmp is passed a pointer to an array of five words.
6314 It's similar to the C library longjmp function but works with
6315 __builtin_setjmp above. */
6316 case BUILT_IN_LONGJMP:
6317 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6318 {
6319 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6320 VOIDmode, EXPAND_NORMAL);
6321 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6322
6323 if (value != const1_rtx)
6324 {
6325 error ("%<__builtin_longjmp%> second argument must be 1");
6326 return const0_rtx;
6327 }
6328
6329 expand_builtin_longjmp (buf_addr, value);
6330 return const0_rtx;
6331 }
6332 break;
6333
6334 case BUILT_IN_NONLOCAL_GOTO:
6335 target = expand_builtin_nonlocal_goto (exp);
6336 if (target)
6337 return target;
6338 break;
6339
6340 /* This updates the setjmp buffer that is its argument with the value
6341 of the current stack pointer. */
6342 case BUILT_IN_UPDATE_SETJMP_BUF:
6343 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6344 {
6345 rtx buf_addr
6346 = expand_normal (CALL_EXPR_ARG (exp, 0));
6347
6348 expand_builtin_update_setjmp_buf (buf_addr);
6349 return const0_rtx;
6350 }
6351 break;
6352
6353 case BUILT_IN_TRAP:
6354 expand_builtin_trap ();
6355 return const0_rtx;
6356
6357 case BUILT_IN_UNREACHABLE:
6358 expand_builtin_unreachable ();
6359 return const0_rtx;
6360
6361 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6362 case BUILT_IN_SIGNBITD32:
6363 case BUILT_IN_SIGNBITD64:
6364 case BUILT_IN_SIGNBITD128:
6365 target = expand_builtin_signbit (exp, target);
6366 if (target)
6367 return target;
6368 break;
6369
6370 /* Various hooks for the DWARF 2 __throw routine. */
6371 case BUILT_IN_UNWIND_INIT:
6372 expand_builtin_unwind_init ();
6373 return const0_rtx;
6374 case BUILT_IN_DWARF_CFA:
6375 return virtual_cfa_rtx;
6376 #ifdef DWARF2_UNWIND_INFO
6377 case BUILT_IN_DWARF_SP_COLUMN:
6378 return expand_builtin_dwarf_sp_column ();
6379 case BUILT_IN_INIT_DWARF_REG_SIZES:
6380 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6381 return const0_rtx;
6382 #endif
6383 case BUILT_IN_FROB_RETURN_ADDR:
6384 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6385 case BUILT_IN_EXTRACT_RETURN_ADDR:
6386 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6387 case BUILT_IN_EH_RETURN:
6388 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6389 CALL_EXPR_ARG (exp, 1));
6390 return const0_rtx;
6391 #ifdef EH_RETURN_DATA_REGNO
6392 case BUILT_IN_EH_RETURN_DATA_REGNO:
6393 return expand_builtin_eh_return_data_regno (exp);
6394 #endif
6395 case BUILT_IN_EXTEND_POINTER:
6396 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6397 case BUILT_IN_EH_POINTER:
6398 return expand_builtin_eh_pointer (exp);
6399 case BUILT_IN_EH_FILTER:
6400 return expand_builtin_eh_filter (exp);
6401 case BUILT_IN_EH_COPY_VALUES:
6402 return expand_builtin_eh_copy_values (exp);
6403
6404 case BUILT_IN_VA_START:
6405 return expand_builtin_va_start (exp);
6406 case BUILT_IN_VA_END:
6407 return expand_builtin_va_end (exp);
6408 case BUILT_IN_VA_COPY:
6409 return expand_builtin_va_copy (exp);
6410 case BUILT_IN_EXPECT:
6411 return expand_builtin_expect (exp, target);
6412 case BUILT_IN_ASSUME_ALIGNED:
6413 return expand_builtin_assume_aligned (exp, target);
6414 case BUILT_IN_PREFETCH:
6415 expand_builtin_prefetch (exp);
6416 return const0_rtx;
6417
6418 case BUILT_IN_INIT_TRAMPOLINE:
6419 return expand_builtin_init_trampoline (exp, true);
6420 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6421 return expand_builtin_init_trampoline (exp, false);
6422 case BUILT_IN_ADJUST_TRAMPOLINE:
6423 return expand_builtin_adjust_trampoline (exp);
6424
6425 case BUILT_IN_FORK:
6426 case BUILT_IN_EXECL:
6427 case BUILT_IN_EXECV:
6428 case BUILT_IN_EXECLP:
6429 case BUILT_IN_EXECLE:
6430 case BUILT_IN_EXECVP:
6431 case BUILT_IN_EXECVE:
6432 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6433 if (target)
6434 return target;
6435 break;
6436
6437 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6438 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6439 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6440 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6441 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6442 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6443 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6444 if (target)
6445 return target;
6446 break;
6447
6448 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6449 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6450 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6451 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6452 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6453 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6454 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6455 if (target)
6456 return target;
6457 break;
6458
6459 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6460 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6461 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6462 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6463 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6464 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6465 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6466 if (target)
6467 return target;
6468 break;
6469
6470 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6471 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6472 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6473 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6474 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6475 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6476 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6477 if (target)
6478 return target;
6479 break;
6480
6481 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6482 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6483 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6484 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6485 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6486 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6487 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6488 if (target)
6489 return target;
6490 break;
6491
6492 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6493 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6494 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6495 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6496 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6497 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6498 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6499 if (target)
6500 return target;
6501 break;
6502
6503 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6504 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6505 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6506 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6507 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6508 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6509 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6510 if (target)
6511 return target;
6512 break;
6513
6514 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6515 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6516 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6517 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6518 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6519 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6520 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6521 if (target)
6522 return target;
6523 break;
6524
6525 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6526 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6527 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6528 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6529 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6530 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6531 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6532 if (target)
6533 return target;
6534 break;
6535
6536 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6537 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6538 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6539 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6540 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6541 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6542 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6543 if (target)
6544 return target;
6545 break;
6546
6547 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6548 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6549 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6550 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6551 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6552 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6553 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6554 if (target)
6555 return target;
6556 break;
6557
6558 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6559 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6560 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6561 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6562 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6563 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6564 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6565 if (target)
6566 return target;
6567 break;
6568
6569 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6570 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6571 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6572 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6573 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6574 if (mode == VOIDmode)
6575 mode = TYPE_MODE (boolean_type_node);
6576 if (!target || !register_operand (target, mode))
6577 target = gen_reg_rtx (mode);
6578
6579 mode = get_builtin_sync_mode
6580 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6581 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6582 if (target)
6583 return target;
6584 break;
6585
6586 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6587 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6588 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6589 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6590 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6591 mode = get_builtin_sync_mode
6592 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6593 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6594 if (target)
6595 return target;
6596 break;
6597
6598 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6599 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6600 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6601 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6602 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6603 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6604 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6605 if (target)
6606 return target;
6607 break;
6608
6609 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6610 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6611 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6612 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6613 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6614 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6615 expand_builtin_sync_lock_release (mode, exp);
6616 return const0_rtx;
6617
6618 case BUILT_IN_SYNC_SYNCHRONIZE:
6619 expand_builtin_sync_synchronize ();
6620 return const0_rtx;
6621
6622 case BUILT_IN_ATOMIC_EXCHANGE_1:
6623 case BUILT_IN_ATOMIC_EXCHANGE_2:
6624 case BUILT_IN_ATOMIC_EXCHANGE_4:
6625 case BUILT_IN_ATOMIC_EXCHANGE_8:
6626 case BUILT_IN_ATOMIC_EXCHANGE_16:
6627 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6628 target = expand_builtin_atomic_exchange (mode, exp, target);
6629 if (target)
6630 return target;
6631 break;
6632
6633 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6634 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6635 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6636 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6637 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6638 {
6639 unsigned int nargs, z;
6640 VEC(tree,gc) *vec;
6641
6642 mode =
6643 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6644 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6645 if (target)
6646 return target;
6647
6648 /* If this is turned into an external library call, the weak parameter
6649 must be dropped to match the expected parameter list. */
6650 nargs = call_expr_nargs (exp);
6651 vec = VEC_alloc (tree, gc, nargs - 1);
6652 for (z = 0; z < 3; z++)
6653 VEC_quick_push (tree, vec, CALL_EXPR_ARG (exp, z));
6654 /* Skip the boolean weak parameter. */
6655 for (z = 4; z < 6; z++)
6656 VEC_quick_push (tree, vec, CALL_EXPR_ARG (exp, z));
6657 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6658 break;
6659 }
6660
6661 case BUILT_IN_ATOMIC_LOAD_1:
6662 case BUILT_IN_ATOMIC_LOAD_2:
6663 case BUILT_IN_ATOMIC_LOAD_4:
6664 case BUILT_IN_ATOMIC_LOAD_8:
6665 case BUILT_IN_ATOMIC_LOAD_16:
6666 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6667 target = expand_builtin_atomic_load (mode, exp, target);
6668 if (target)
6669 return target;
6670 break;
6671
6672 case BUILT_IN_ATOMIC_STORE_1:
6673 case BUILT_IN_ATOMIC_STORE_2:
6674 case BUILT_IN_ATOMIC_STORE_4:
6675 case BUILT_IN_ATOMIC_STORE_8:
6676 case BUILT_IN_ATOMIC_STORE_16:
6677 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6678 target = expand_builtin_atomic_store (mode, exp);
6679 if (target)
6680 return const0_rtx;
6681 break;
6682
6683 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6684 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6685 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6686 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6687 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6688 {
6689 enum built_in_function lib;
6690 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6691 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6692 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6693 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6694 ignore, lib);
6695 if (target)
6696 return target;
6697 break;
6698 }
6699 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6700 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6701 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6702 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6703 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6704 {
6705 enum built_in_function lib;
6706 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6707 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6708 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6709 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6710 ignore, lib);
6711 if (target)
6712 return target;
6713 break;
6714 }
6715 case BUILT_IN_ATOMIC_AND_FETCH_1:
6716 case BUILT_IN_ATOMIC_AND_FETCH_2:
6717 case BUILT_IN_ATOMIC_AND_FETCH_4:
6718 case BUILT_IN_ATOMIC_AND_FETCH_8:
6719 case BUILT_IN_ATOMIC_AND_FETCH_16:
6720 {
6721 enum built_in_function lib;
6722 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6723 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6724 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6725 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6726 ignore, lib);
6727 if (target)
6728 return target;
6729 break;
6730 }
6731 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6732 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6733 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6734 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6735 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6736 {
6737 enum built_in_function lib;
6738 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6739 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6740 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6741 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6742 ignore, lib);
6743 if (target)
6744 return target;
6745 break;
6746 }
6747 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6748 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6749 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6750 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6751 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6752 {
6753 enum built_in_function lib;
6754 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6755 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6756 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6757 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6758 ignore, lib);
6759 if (target)
6760 return target;
6761 break;
6762 }
6763 case BUILT_IN_ATOMIC_OR_FETCH_1:
6764 case BUILT_IN_ATOMIC_OR_FETCH_2:
6765 case BUILT_IN_ATOMIC_OR_FETCH_4:
6766 case BUILT_IN_ATOMIC_OR_FETCH_8:
6767 case BUILT_IN_ATOMIC_OR_FETCH_16:
6768 {
6769 enum built_in_function lib;
6770 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6771 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6772 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6773 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6774 ignore, lib);
6775 if (target)
6776 return target;
6777 break;
6778 }
6779 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6780 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6781 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6782 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6783 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6784 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6785 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6786 ignore, BUILT_IN_NONE);
6787 if (target)
6788 return target;
6789 break;
6790
6791 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6792 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6793 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6794 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6795 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6796 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6797 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6798 ignore, BUILT_IN_NONE);
6799 if (target)
6800 return target;
6801 break;
6802
6803 case BUILT_IN_ATOMIC_FETCH_AND_1:
6804 case BUILT_IN_ATOMIC_FETCH_AND_2:
6805 case BUILT_IN_ATOMIC_FETCH_AND_4:
6806 case BUILT_IN_ATOMIC_FETCH_AND_8:
6807 case BUILT_IN_ATOMIC_FETCH_AND_16:
6808 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6809 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6810 ignore, BUILT_IN_NONE);
6811 if (target)
6812 return target;
6813 break;
6814
6815 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6816 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6817 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6818 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6819 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6820 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6821 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6822 ignore, BUILT_IN_NONE);
6823 if (target)
6824 return target;
6825 break;
6826
6827 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6828 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6829 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6830 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6831 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6832 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6833 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6834 ignore, BUILT_IN_NONE);
6835 if (target)
6836 return target;
6837 break;
6838
6839 case BUILT_IN_ATOMIC_FETCH_OR_1:
6840 case BUILT_IN_ATOMIC_FETCH_OR_2:
6841 case BUILT_IN_ATOMIC_FETCH_OR_4:
6842 case BUILT_IN_ATOMIC_FETCH_OR_8:
6843 case BUILT_IN_ATOMIC_FETCH_OR_16:
6844 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6845 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6846 ignore, BUILT_IN_NONE);
6847 if (target)
6848 return target;
6849 break;
6850
6851 case BUILT_IN_ATOMIC_TEST_AND_SET:
6852 return expand_builtin_atomic_test_and_set (exp, target);
6853
6854 case BUILT_IN_ATOMIC_CLEAR:
6855 return expand_builtin_atomic_clear (exp);
6856
6857 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6858 return expand_builtin_atomic_always_lock_free (exp);
6859
6860 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6861 target = expand_builtin_atomic_is_lock_free (exp);
6862 if (target)
6863 return target;
6864 break;
6865
6866 case BUILT_IN_ATOMIC_THREAD_FENCE:
6867 expand_builtin_atomic_thread_fence (exp);
6868 return const0_rtx;
6869
6870 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6871 expand_builtin_atomic_signal_fence (exp);
6872 return const0_rtx;
6873
6874 case BUILT_IN_OBJECT_SIZE:
6875 return expand_builtin_object_size (exp);
6876
6877 case BUILT_IN_MEMCPY_CHK:
6878 case BUILT_IN_MEMPCPY_CHK:
6879 case BUILT_IN_MEMMOVE_CHK:
6880 case BUILT_IN_MEMSET_CHK:
6881 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6882 if (target)
6883 return target;
6884 break;
6885
6886 case BUILT_IN_STRCPY_CHK:
6887 case BUILT_IN_STPCPY_CHK:
6888 case BUILT_IN_STRNCPY_CHK:
6889 case BUILT_IN_STPNCPY_CHK:
6890 case BUILT_IN_STRCAT_CHK:
6891 case BUILT_IN_STRNCAT_CHK:
6892 case BUILT_IN_SNPRINTF_CHK:
6893 case BUILT_IN_VSNPRINTF_CHK:
6894 maybe_emit_chk_warning (exp, fcode);
6895 break;
6896
6897 case BUILT_IN_SPRINTF_CHK:
6898 case BUILT_IN_VSPRINTF_CHK:
6899 maybe_emit_sprintf_chk_warning (exp, fcode);
6900 break;
6901
6902 case BUILT_IN_FREE:
6903 if (warn_free_nonheap_object)
6904 maybe_emit_free_warning (exp);
6905 break;
6906
6907 default: /* just do library call, if unknown builtin */
6908 break;
6909 }
6910
6911 /* The switch statement above can drop through to cause the function
6912 to be called normally. */
6913 return expand_call (exp, target, ignore);
6914 }
6915
6916 /* Determine whether a tree node represents a call to a built-in
6917 function. If the tree T is a call to a built-in function with
6918 the right number of arguments of the appropriate types, return
6919 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6920 Otherwise the return value is END_BUILTINS. */
6921
6922 enum built_in_function
6923 builtin_mathfn_code (const_tree t)
6924 {
6925 const_tree fndecl, arg, parmlist;
6926 const_tree argtype, parmtype;
6927 const_call_expr_arg_iterator iter;
6928
6929 if (TREE_CODE (t) != CALL_EXPR
6930 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6931 return END_BUILTINS;
6932
6933 fndecl = get_callee_fndecl (t);
6934 if (fndecl == NULL_TREE
6935 || TREE_CODE (fndecl) != FUNCTION_DECL
6936 || ! DECL_BUILT_IN (fndecl)
6937 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6938 return END_BUILTINS;
6939
6940 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6941 init_const_call_expr_arg_iterator (t, &iter);
6942 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6943 {
6944 /* If a function doesn't take a variable number of arguments,
6945 the last element in the list will have type `void'. */
6946 parmtype = TREE_VALUE (parmlist);
6947 if (VOID_TYPE_P (parmtype))
6948 {
6949 if (more_const_call_expr_args_p (&iter))
6950 return END_BUILTINS;
6951 return DECL_FUNCTION_CODE (fndecl);
6952 }
6953
6954 if (! more_const_call_expr_args_p (&iter))
6955 return END_BUILTINS;
6956
6957 arg = next_const_call_expr_arg (&iter);
6958 argtype = TREE_TYPE (arg);
6959
6960 if (SCALAR_FLOAT_TYPE_P (parmtype))
6961 {
6962 if (! SCALAR_FLOAT_TYPE_P (argtype))
6963 return END_BUILTINS;
6964 }
6965 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6966 {
6967 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6968 return END_BUILTINS;
6969 }
6970 else if (POINTER_TYPE_P (parmtype))
6971 {
6972 if (! POINTER_TYPE_P (argtype))
6973 return END_BUILTINS;
6974 }
6975 else if (INTEGRAL_TYPE_P (parmtype))
6976 {
6977 if (! INTEGRAL_TYPE_P (argtype))
6978 return END_BUILTINS;
6979 }
6980 else
6981 return END_BUILTINS;
6982 }
6983
6984 /* Variable-length argument list. */
6985 return DECL_FUNCTION_CODE (fndecl);
6986 }
6987
6988 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6989 evaluate to a constant. */
6990
6991 static tree
6992 fold_builtin_constant_p (tree arg)
6993 {
6994 /* We return 1 for a numeric type that's known to be a constant
6995 value at compile-time or for an aggregate type that's a
6996 literal constant. */
6997 STRIP_NOPS (arg);
6998
6999 /* If we know this is a constant, emit the constant of one. */
7000 if (CONSTANT_CLASS_P (arg)
7001 || (TREE_CODE (arg) == CONSTRUCTOR
7002 && TREE_CONSTANT (arg)))
7003 return integer_one_node;
7004 if (TREE_CODE (arg) == ADDR_EXPR)
7005 {
7006 tree op = TREE_OPERAND (arg, 0);
7007 if (TREE_CODE (op) == STRING_CST
7008 || (TREE_CODE (op) == ARRAY_REF
7009 && integer_zerop (TREE_OPERAND (op, 1))
7010 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7011 return integer_one_node;
7012 }
7013
7014 /* If this expression has side effects, show we don't know it to be a
7015 constant. Likewise if it's a pointer or aggregate type since in
7016 those case we only want literals, since those are only optimized
7017 when generating RTL, not later.
7018 And finally, if we are compiling an initializer, not code, we
7019 need to return a definite result now; there's not going to be any
7020 more optimization done. */
7021 if (TREE_SIDE_EFFECTS (arg)
7022 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7023 || POINTER_TYPE_P (TREE_TYPE (arg))
7024 || cfun == 0
7025 || folding_initializer)
7026 return integer_zero_node;
7027
7028 return NULL_TREE;
7029 }
7030
7031 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7032 return it as a truthvalue. */
7033
7034 static tree
7035 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
7036 {
7037 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7038
7039 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7040 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7041 ret_type = TREE_TYPE (TREE_TYPE (fn));
7042 pred_type = TREE_VALUE (arg_types);
7043 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7044
7045 pred = fold_convert_loc (loc, pred_type, pred);
7046 expected = fold_convert_loc (loc, expected_type, expected);
7047 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
7048
7049 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7050 build_int_cst (ret_type, 0));
7051 }
7052
7053 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7054 NULL_TREE if no simplification is possible. */
7055
7056 static tree
7057 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
7058 {
7059 tree inner, fndecl, inner_arg0;
7060 enum tree_code code;
7061
7062 /* Distribute the expected value over short-circuiting operators.
7063 See through the cast from truthvalue_type_node to long. */
7064 inner_arg0 = arg0;
7065 while (TREE_CODE (inner_arg0) == NOP_EXPR
7066 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7067 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7068 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7069
7070 /* If this is a builtin_expect within a builtin_expect keep the
7071 inner one. See through a comparison against a constant. It
7072 might have been added to create a thruthvalue. */
7073 inner = inner_arg0;
7074
7075 if (COMPARISON_CLASS_P (inner)
7076 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7077 inner = TREE_OPERAND (inner, 0);
7078
7079 if (TREE_CODE (inner) == CALL_EXPR
7080 && (fndecl = get_callee_fndecl (inner))
7081 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7082 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7083 return arg0;
7084
7085 inner = inner_arg0;
7086 code = TREE_CODE (inner);
7087 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7088 {
7089 tree op0 = TREE_OPERAND (inner, 0);
7090 tree op1 = TREE_OPERAND (inner, 1);
7091
7092 op0 = build_builtin_expect_predicate (loc, op0, arg1);
7093 op1 = build_builtin_expect_predicate (loc, op1, arg1);
7094 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7095
7096 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7097 }
7098
7099 /* If the argument isn't invariant then there's nothing else we can do. */
7100 if (!TREE_CONSTANT (inner_arg0))
7101 return NULL_TREE;
7102
7103 /* If we expect that a comparison against the argument will fold to
7104 a constant return the constant. In practice, this means a true
7105 constant or the address of a non-weak symbol. */
7106 inner = inner_arg0;
7107 STRIP_NOPS (inner);
7108 if (TREE_CODE (inner) == ADDR_EXPR)
7109 {
7110 do
7111 {
7112 inner = TREE_OPERAND (inner, 0);
7113 }
7114 while (TREE_CODE (inner) == COMPONENT_REF
7115 || TREE_CODE (inner) == ARRAY_REF);
7116 if ((TREE_CODE (inner) == VAR_DECL
7117 || TREE_CODE (inner) == FUNCTION_DECL)
7118 && DECL_WEAK (inner))
7119 return NULL_TREE;
7120 }
7121
7122 /* Otherwise, ARG0 already has the proper type for the return value. */
7123 return arg0;
7124 }
7125
7126 /* Fold a call to __builtin_classify_type with argument ARG. */
7127
7128 static tree
7129 fold_builtin_classify_type (tree arg)
7130 {
7131 if (arg == 0)
7132 return build_int_cst (integer_type_node, no_type_class);
7133
7134 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7135 }
7136
7137 /* Fold a call to __builtin_strlen with argument ARG. */
7138
7139 static tree
7140 fold_builtin_strlen (location_t loc, tree type, tree arg)
7141 {
7142 if (!validate_arg (arg, POINTER_TYPE))
7143 return NULL_TREE;
7144 else
7145 {
7146 tree len = c_strlen (arg, 0);
7147
7148 if (len)
7149 return fold_convert_loc (loc, type, len);
7150
7151 return NULL_TREE;
7152 }
7153 }
7154
7155 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7156
7157 static tree
7158 fold_builtin_inf (location_t loc, tree type, int warn)
7159 {
7160 REAL_VALUE_TYPE real;
7161
7162 /* __builtin_inff is intended to be usable to define INFINITY on all
7163 targets. If an infinity is not available, INFINITY expands "to a
7164 positive constant of type float that overflows at translation
7165 time", footnote "In this case, using INFINITY will violate the
7166 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7167 Thus we pedwarn to ensure this constraint violation is
7168 diagnosed. */
7169 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7170 pedwarn (loc, 0, "target format does not support infinity");
7171
7172 real_inf (&real);
7173 return build_real (type, real);
7174 }
7175
7176 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7177
7178 static tree
7179 fold_builtin_nan (tree arg, tree type, int quiet)
7180 {
7181 REAL_VALUE_TYPE real;
7182 const char *str;
7183
7184 if (!validate_arg (arg, POINTER_TYPE))
7185 return NULL_TREE;
7186 str = c_getstr (arg);
7187 if (!str)
7188 return NULL_TREE;
7189
7190 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7191 return NULL_TREE;
7192
7193 return build_real (type, real);
7194 }
7195
7196 /* Return true if the floating point expression T has an integer value.
7197 We also allow +Inf, -Inf and NaN to be considered integer values. */
7198
7199 static bool
7200 integer_valued_real_p (tree t)
7201 {
7202 switch (TREE_CODE (t))
7203 {
7204 case FLOAT_EXPR:
7205 return true;
7206
7207 case ABS_EXPR:
7208 case SAVE_EXPR:
7209 return integer_valued_real_p (TREE_OPERAND (t, 0));
7210
7211 case COMPOUND_EXPR:
7212 case MODIFY_EXPR:
7213 case BIND_EXPR:
7214 return integer_valued_real_p (TREE_OPERAND (t, 1));
7215
7216 case PLUS_EXPR:
7217 case MINUS_EXPR:
7218 case MULT_EXPR:
7219 case MIN_EXPR:
7220 case MAX_EXPR:
7221 return integer_valued_real_p (TREE_OPERAND (t, 0))
7222 && integer_valued_real_p (TREE_OPERAND (t, 1));
7223
7224 case COND_EXPR:
7225 return integer_valued_real_p (TREE_OPERAND (t, 1))
7226 && integer_valued_real_p (TREE_OPERAND (t, 2));
7227
7228 case REAL_CST:
7229 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7230
7231 case NOP_EXPR:
7232 {
7233 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7234 if (TREE_CODE (type) == INTEGER_TYPE)
7235 return true;
7236 if (TREE_CODE (type) == REAL_TYPE)
7237 return integer_valued_real_p (TREE_OPERAND (t, 0));
7238 break;
7239 }
7240
7241 case CALL_EXPR:
7242 switch (builtin_mathfn_code (t))
7243 {
7244 CASE_FLT_FN (BUILT_IN_CEIL):
7245 CASE_FLT_FN (BUILT_IN_FLOOR):
7246 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7247 CASE_FLT_FN (BUILT_IN_RINT):
7248 CASE_FLT_FN (BUILT_IN_ROUND):
7249 CASE_FLT_FN (BUILT_IN_TRUNC):
7250 return true;
7251
7252 CASE_FLT_FN (BUILT_IN_FMIN):
7253 CASE_FLT_FN (BUILT_IN_FMAX):
7254 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7255 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7256
7257 default:
7258 break;
7259 }
7260 break;
7261
7262 default:
7263 break;
7264 }
7265 return false;
7266 }
7267
7268 /* FNDECL is assumed to be a builtin where truncation can be propagated
7269 across (for instance floor((double)f) == (double)floorf (f).
7270 Do the transformation for a call with argument ARG. */
7271
7272 static tree
7273 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7274 {
7275 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7276
7277 if (!validate_arg (arg, REAL_TYPE))
7278 return NULL_TREE;
7279
7280 /* Integer rounding functions are idempotent. */
7281 if (fcode == builtin_mathfn_code (arg))
7282 return arg;
7283
7284 /* If argument is already integer valued, and we don't need to worry
7285 about setting errno, there's no need to perform rounding. */
7286 if (! flag_errno_math && integer_valued_real_p (arg))
7287 return arg;
7288
7289 if (optimize)
7290 {
7291 tree arg0 = strip_float_extensions (arg);
7292 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7293 tree newtype = TREE_TYPE (arg0);
7294 tree decl;
7295
7296 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7297 && (decl = mathfn_built_in (newtype, fcode)))
7298 return fold_convert_loc (loc, ftype,
7299 build_call_expr_loc (loc, decl, 1,
7300 fold_convert_loc (loc,
7301 newtype,
7302 arg0)));
7303 }
7304 return NULL_TREE;
7305 }
7306
7307 /* FNDECL is assumed to be builtin which can narrow the FP type of
7308 the argument, for instance lround((double)f) -> lroundf (f).
7309 Do the transformation for a call with argument ARG. */
7310
7311 static tree
7312 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7313 {
7314 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7315
7316 if (!validate_arg (arg, REAL_TYPE))
7317 return NULL_TREE;
7318
7319 /* If argument is already integer valued, and we don't need to worry
7320 about setting errno, there's no need to perform rounding. */
7321 if (! flag_errno_math && integer_valued_real_p (arg))
7322 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7323 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7324
7325 if (optimize)
7326 {
7327 tree ftype = TREE_TYPE (arg);
7328 tree arg0 = strip_float_extensions (arg);
7329 tree newtype = TREE_TYPE (arg0);
7330 tree decl;
7331
7332 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7333 && (decl = mathfn_built_in (newtype, fcode)))
7334 return build_call_expr_loc (loc, decl, 1,
7335 fold_convert_loc (loc, newtype, arg0));
7336 }
7337
7338 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7339 sizeof (int) == sizeof (long). */
7340 if (TYPE_PRECISION (integer_type_node)
7341 == TYPE_PRECISION (long_integer_type_node))
7342 {
7343 tree newfn = NULL_TREE;
7344 switch (fcode)
7345 {
7346 CASE_FLT_FN (BUILT_IN_ICEIL):
7347 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7348 break;
7349
7350 CASE_FLT_FN (BUILT_IN_IFLOOR):
7351 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7352 break;
7353
7354 CASE_FLT_FN (BUILT_IN_IROUND):
7355 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7356 break;
7357
7358 CASE_FLT_FN (BUILT_IN_IRINT):
7359 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7360 break;
7361
7362 default:
7363 break;
7364 }
7365
7366 if (newfn)
7367 {
7368 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7369 return fold_convert_loc (loc,
7370 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7371 }
7372 }
7373
7374 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7375 sizeof (long long) == sizeof (long). */
7376 if (TYPE_PRECISION (long_long_integer_type_node)
7377 == TYPE_PRECISION (long_integer_type_node))
7378 {
7379 tree newfn = NULL_TREE;
7380 switch (fcode)
7381 {
7382 CASE_FLT_FN (BUILT_IN_LLCEIL):
7383 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7384 break;
7385
7386 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7387 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7388 break;
7389
7390 CASE_FLT_FN (BUILT_IN_LLROUND):
7391 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7392 break;
7393
7394 CASE_FLT_FN (BUILT_IN_LLRINT):
7395 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7396 break;
7397
7398 default:
7399 break;
7400 }
7401
7402 if (newfn)
7403 {
7404 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7405 return fold_convert_loc (loc,
7406 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7407 }
7408 }
7409
7410 return NULL_TREE;
7411 }
7412
7413 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7414 return type. Return NULL_TREE if no simplification can be made. */
7415
7416 static tree
7417 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7418 {
7419 tree res;
7420
7421 if (!validate_arg (arg, COMPLEX_TYPE)
7422 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7423 return NULL_TREE;
7424
7425 /* Calculate the result when the argument is a constant. */
7426 if (TREE_CODE (arg) == COMPLEX_CST
7427 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7428 type, mpfr_hypot)))
7429 return res;
7430
7431 if (TREE_CODE (arg) == COMPLEX_EXPR)
7432 {
7433 tree real = TREE_OPERAND (arg, 0);
7434 tree imag = TREE_OPERAND (arg, 1);
7435
7436 /* If either part is zero, cabs is fabs of the other. */
7437 if (real_zerop (real))
7438 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7439 if (real_zerop (imag))
7440 return fold_build1_loc (loc, ABS_EXPR, type, real);
7441
7442 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7443 if (flag_unsafe_math_optimizations
7444 && operand_equal_p (real, imag, OEP_PURE_SAME))
7445 {
7446 const REAL_VALUE_TYPE sqrt2_trunc
7447 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7448 STRIP_NOPS (real);
7449 return fold_build2_loc (loc, MULT_EXPR, type,
7450 fold_build1_loc (loc, ABS_EXPR, type, real),
7451 build_real (type, sqrt2_trunc));
7452 }
7453 }
7454
7455 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7456 if (TREE_CODE (arg) == NEGATE_EXPR
7457 || TREE_CODE (arg) == CONJ_EXPR)
7458 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7459
7460 /* Don't do this when optimizing for size. */
7461 if (flag_unsafe_math_optimizations
7462 && optimize && optimize_function_for_speed_p (cfun))
7463 {
7464 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7465
7466 if (sqrtfn != NULL_TREE)
7467 {
7468 tree rpart, ipart, result;
7469
7470 arg = builtin_save_expr (arg);
7471
7472 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7473 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7474
7475 rpart = builtin_save_expr (rpart);
7476 ipart = builtin_save_expr (ipart);
7477
7478 result = fold_build2_loc (loc, PLUS_EXPR, type,
7479 fold_build2_loc (loc, MULT_EXPR, type,
7480 rpart, rpart),
7481 fold_build2_loc (loc, MULT_EXPR, type,
7482 ipart, ipart));
7483
7484 return build_call_expr_loc (loc, sqrtfn, 1, result);
7485 }
7486 }
7487
7488 return NULL_TREE;
7489 }
7490
7491 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7492 complex tree type of the result. If NEG is true, the imaginary
7493 zero is negative. */
7494
7495 static tree
7496 build_complex_cproj (tree type, bool neg)
7497 {
7498 REAL_VALUE_TYPE rinf, rzero = dconst0;
7499
7500 real_inf (&rinf);
7501 rzero.sign = neg;
7502 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7503 build_real (TREE_TYPE (type), rzero));
7504 }
7505
7506 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7507 return type. Return NULL_TREE if no simplification can be made. */
7508
7509 static tree
7510 fold_builtin_cproj (location_t loc, tree arg, tree type)
7511 {
7512 if (!validate_arg (arg, COMPLEX_TYPE)
7513 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7514 return NULL_TREE;
7515
7516 /* If there are no infinities, return arg. */
7517 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7518 return non_lvalue_loc (loc, arg);
7519
7520 /* Calculate the result when the argument is a constant. */
7521 if (TREE_CODE (arg) == COMPLEX_CST)
7522 {
7523 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7524 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7525
7526 if (real_isinf (real) || real_isinf (imag))
7527 return build_complex_cproj (type, imag->sign);
7528 else
7529 return arg;
7530 }
7531 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7532 {
7533 tree real = TREE_OPERAND (arg, 0);
7534 tree imag = TREE_OPERAND (arg, 1);
7535
7536 STRIP_NOPS (real);
7537 STRIP_NOPS (imag);
7538
7539 /* If the real part is inf and the imag part is known to be
7540 nonnegative, return (inf + 0i). Remember side-effects are
7541 possible in the imag part. */
7542 if (TREE_CODE (real) == REAL_CST
7543 && real_isinf (TREE_REAL_CST_PTR (real))
7544 && tree_expr_nonnegative_p (imag))
7545 return omit_one_operand_loc (loc, type,
7546 build_complex_cproj (type, false),
7547 arg);
7548
7549 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7550 Remember side-effects are possible in the real part. */
7551 if (TREE_CODE (imag) == REAL_CST
7552 && real_isinf (TREE_REAL_CST_PTR (imag)))
7553 return
7554 omit_one_operand_loc (loc, type,
7555 build_complex_cproj (type, TREE_REAL_CST_PTR
7556 (imag)->sign), arg);
7557 }
7558
7559 return NULL_TREE;
7560 }
7561
7562 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7563 Return NULL_TREE if no simplification can be made. */
7564
7565 static tree
7566 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7567 {
7568
7569 enum built_in_function fcode;
7570 tree res;
7571
7572 if (!validate_arg (arg, REAL_TYPE))
7573 return NULL_TREE;
7574
7575 /* Calculate the result when the argument is a constant. */
7576 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7577 return res;
7578
7579 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7580 fcode = builtin_mathfn_code (arg);
7581 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7582 {
7583 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7584 arg = fold_build2_loc (loc, MULT_EXPR, type,
7585 CALL_EXPR_ARG (arg, 0),
7586 build_real (type, dconsthalf));
7587 return build_call_expr_loc (loc, expfn, 1, arg);
7588 }
7589
7590 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7591 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7592 {
7593 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7594
7595 if (powfn)
7596 {
7597 tree arg0 = CALL_EXPR_ARG (arg, 0);
7598 tree tree_root;
7599 /* The inner root was either sqrt or cbrt. */
7600 /* This was a conditional expression but it triggered a bug
7601 in Sun C 5.5. */
7602 REAL_VALUE_TYPE dconstroot;
7603 if (BUILTIN_SQRT_P (fcode))
7604 dconstroot = dconsthalf;
7605 else
7606 dconstroot = dconst_third ();
7607
7608 /* Adjust for the outer root. */
7609 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7610 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7611 tree_root = build_real (type, dconstroot);
7612 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7613 }
7614 }
7615
7616 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7617 if (flag_unsafe_math_optimizations
7618 && (fcode == BUILT_IN_POW
7619 || fcode == BUILT_IN_POWF
7620 || fcode == BUILT_IN_POWL))
7621 {
7622 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7623 tree arg0 = CALL_EXPR_ARG (arg, 0);
7624 tree arg1 = CALL_EXPR_ARG (arg, 1);
7625 tree narg1;
7626 if (!tree_expr_nonnegative_p (arg0))
7627 arg0 = build1 (ABS_EXPR, type, arg0);
7628 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7629 build_real (type, dconsthalf));
7630 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7631 }
7632
7633 return NULL_TREE;
7634 }
7635
7636 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7637 Return NULL_TREE if no simplification can be made. */
7638
7639 static tree
7640 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7641 {
7642 const enum built_in_function fcode = builtin_mathfn_code (arg);
7643 tree res;
7644
7645 if (!validate_arg (arg, REAL_TYPE))
7646 return NULL_TREE;
7647
7648 /* Calculate the result when the argument is a constant. */
7649 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7650 return res;
7651
7652 if (flag_unsafe_math_optimizations)
7653 {
7654 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7655 if (BUILTIN_EXPONENT_P (fcode))
7656 {
7657 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7658 const REAL_VALUE_TYPE third_trunc =
7659 real_value_truncate (TYPE_MODE (type), dconst_third ());
7660 arg = fold_build2_loc (loc, MULT_EXPR, type,
7661 CALL_EXPR_ARG (arg, 0),
7662 build_real (type, third_trunc));
7663 return build_call_expr_loc (loc, expfn, 1, arg);
7664 }
7665
7666 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7667 if (BUILTIN_SQRT_P (fcode))
7668 {
7669 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7670
7671 if (powfn)
7672 {
7673 tree arg0 = CALL_EXPR_ARG (arg, 0);
7674 tree tree_root;
7675 REAL_VALUE_TYPE dconstroot = dconst_third ();
7676
7677 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7678 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7679 tree_root = build_real (type, dconstroot);
7680 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7681 }
7682 }
7683
7684 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7685 if (BUILTIN_CBRT_P (fcode))
7686 {
7687 tree arg0 = CALL_EXPR_ARG (arg, 0);
7688 if (tree_expr_nonnegative_p (arg0))
7689 {
7690 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7691
7692 if (powfn)
7693 {
7694 tree tree_root;
7695 REAL_VALUE_TYPE dconstroot;
7696
7697 real_arithmetic (&dconstroot, MULT_EXPR,
7698 dconst_third_ptr (), dconst_third_ptr ());
7699 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7700 tree_root = build_real (type, dconstroot);
7701 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7702 }
7703 }
7704 }
7705
7706 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7707 if (fcode == BUILT_IN_POW
7708 || fcode == BUILT_IN_POWF
7709 || fcode == BUILT_IN_POWL)
7710 {
7711 tree arg00 = CALL_EXPR_ARG (arg, 0);
7712 tree arg01 = CALL_EXPR_ARG (arg, 1);
7713 if (tree_expr_nonnegative_p (arg00))
7714 {
7715 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7716 const REAL_VALUE_TYPE dconstroot
7717 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7718 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7719 build_real (type, dconstroot));
7720 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7721 }
7722 }
7723 }
7724 return NULL_TREE;
7725 }
7726
7727 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7728 TYPE is the type of the return value. Return NULL_TREE if no
7729 simplification can be made. */
7730
7731 static tree
7732 fold_builtin_cos (location_t loc,
7733 tree arg, tree type, tree fndecl)
7734 {
7735 tree res, narg;
7736
7737 if (!validate_arg (arg, REAL_TYPE))
7738 return NULL_TREE;
7739
7740 /* Calculate the result when the argument is a constant. */
7741 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7742 return res;
7743
7744 /* Optimize cos(-x) into cos (x). */
7745 if ((narg = fold_strip_sign_ops (arg)))
7746 return build_call_expr_loc (loc, fndecl, 1, narg);
7747
7748 return NULL_TREE;
7749 }
7750
7751 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7752 Return NULL_TREE if no simplification can be made. */
7753
7754 static tree
7755 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7756 {
7757 if (validate_arg (arg, REAL_TYPE))
7758 {
7759 tree res, narg;
7760
7761 /* Calculate the result when the argument is a constant. */
7762 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7763 return res;
7764
7765 /* Optimize cosh(-x) into cosh (x). */
7766 if ((narg = fold_strip_sign_ops (arg)))
7767 return build_call_expr_loc (loc, fndecl, 1, narg);
7768 }
7769
7770 return NULL_TREE;
7771 }
7772
7773 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7774 argument ARG. TYPE is the type of the return value. Return
7775 NULL_TREE if no simplification can be made. */
7776
7777 static tree
7778 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7779 bool hyper)
7780 {
7781 if (validate_arg (arg, COMPLEX_TYPE)
7782 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7783 {
7784 tree tmp;
7785
7786 /* Calculate the result when the argument is a constant. */
7787 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7788 return tmp;
7789
7790 /* Optimize fn(-x) into fn(x). */
7791 if ((tmp = fold_strip_sign_ops (arg)))
7792 return build_call_expr_loc (loc, fndecl, 1, tmp);
7793 }
7794
7795 return NULL_TREE;
7796 }
7797
7798 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7799 Return NULL_TREE if no simplification can be made. */
7800
7801 static tree
7802 fold_builtin_tan (tree arg, tree type)
7803 {
7804 enum built_in_function fcode;
7805 tree res;
7806
7807 if (!validate_arg (arg, REAL_TYPE))
7808 return NULL_TREE;
7809
7810 /* Calculate the result when the argument is a constant. */
7811 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7812 return res;
7813
7814 /* Optimize tan(atan(x)) = x. */
7815 fcode = builtin_mathfn_code (arg);
7816 if (flag_unsafe_math_optimizations
7817 && (fcode == BUILT_IN_ATAN
7818 || fcode == BUILT_IN_ATANF
7819 || fcode == BUILT_IN_ATANL))
7820 return CALL_EXPR_ARG (arg, 0);
7821
7822 return NULL_TREE;
7823 }
7824
7825 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7826 NULL_TREE if no simplification can be made. */
7827
7828 static tree
7829 fold_builtin_sincos (location_t loc,
7830 tree arg0, tree arg1, tree arg2)
7831 {
7832 tree type;
7833 tree res, fn, call;
7834
7835 if (!validate_arg (arg0, REAL_TYPE)
7836 || !validate_arg (arg1, POINTER_TYPE)
7837 || !validate_arg (arg2, POINTER_TYPE))
7838 return NULL_TREE;
7839
7840 type = TREE_TYPE (arg0);
7841
7842 /* Calculate the result when the argument is a constant. */
7843 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7844 return res;
7845
7846 /* Canonicalize sincos to cexpi. */
7847 if (!TARGET_C99_FUNCTIONS)
7848 return NULL_TREE;
7849 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7850 if (!fn)
7851 return NULL_TREE;
7852
7853 call = build_call_expr_loc (loc, fn, 1, arg0);
7854 call = builtin_save_expr (call);
7855
7856 return build2 (COMPOUND_EXPR, void_type_node,
7857 build2 (MODIFY_EXPR, void_type_node,
7858 build_fold_indirect_ref_loc (loc, arg1),
7859 build1 (IMAGPART_EXPR, type, call)),
7860 build2 (MODIFY_EXPR, void_type_node,
7861 build_fold_indirect_ref_loc (loc, arg2),
7862 build1 (REALPART_EXPR, type, call)));
7863 }
7864
7865 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7866 NULL_TREE if no simplification can be made. */
7867
7868 static tree
7869 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7870 {
7871 tree rtype;
7872 tree realp, imagp, ifn;
7873 tree res;
7874
7875 if (!validate_arg (arg0, COMPLEX_TYPE)
7876 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7877 return NULL_TREE;
7878
7879 /* Calculate the result when the argument is a constant. */
7880 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7881 return res;
7882
7883 rtype = TREE_TYPE (TREE_TYPE (arg0));
7884
7885 /* In case we can figure out the real part of arg0 and it is constant zero
7886 fold to cexpi. */
7887 if (!TARGET_C99_FUNCTIONS)
7888 return NULL_TREE;
7889 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7890 if (!ifn)
7891 return NULL_TREE;
7892
7893 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7894 && real_zerop (realp))
7895 {
7896 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7897 return build_call_expr_loc (loc, ifn, 1, narg);
7898 }
7899
7900 /* In case we can easily decompose real and imaginary parts split cexp
7901 to exp (r) * cexpi (i). */
7902 if (flag_unsafe_math_optimizations
7903 && realp)
7904 {
7905 tree rfn, rcall, icall;
7906
7907 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7908 if (!rfn)
7909 return NULL_TREE;
7910
7911 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7912 if (!imagp)
7913 return NULL_TREE;
7914
7915 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7916 icall = builtin_save_expr (icall);
7917 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7918 rcall = builtin_save_expr (rcall);
7919 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7920 fold_build2_loc (loc, MULT_EXPR, rtype,
7921 rcall,
7922 fold_build1_loc (loc, REALPART_EXPR,
7923 rtype, icall)),
7924 fold_build2_loc (loc, MULT_EXPR, rtype,
7925 rcall,
7926 fold_build1_loc (loc, IMAGPART_EXPR,
7927 rtype, icall)));
7928 }
7929
7930 return NULL_TREE;
7931 }
7932
7933 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7934 Return NULL_TREE if no simplification can be made. */
7935
7936 static tree
7937 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7938 {
7939 if (!validate_arg (arg, REAL_TYPE))
7940 return NULL_TREE;
7941
7942 /* Optimize trunc of constant value. */
7943 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7944 {
7945 REAL_VALUE_TYPE r, x;
7946 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7947
7948 x = TREE_REAL_CST (arg);
7949 real_trunc (&r, TYPE_MODE (type), &x);
7950 return build_real (type, r);
7951 }
7952
7953 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7954 }
7955
7956 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7957 Return NULL_TREE if no simplification can be made. */
7958
7959 static tree
7960 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7961 {
7962 if (!validate_arg (arg, REAL_TYPE))
7963 return NULL_TREE;
7964
7965 /* Optimize floor of constant value. */
7966 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7967 {
7968 REAL_VALUE_TYPE x;
7969
7970 x = TREE_REAL_CST (arg);
7971 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7972 {
7973 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7974 REAL_VALUE_TYPE r;
7975
7976 real_floor (&r, TYPE_MODE (type), &x);
7977 return build_real (type, r);
7978 }
7979 }
7980
7981 /* Fold floor (x) where x is nonnegative to trunc (x). */
7982 if (tree_expr_nonnegative_p (arg))
7983 {
7984 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7985 if (truncfn)
7986 return build_call_expr_loc (loc, truncfn, 1, arg);
7987 }
7988
7989 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7990 }
7991
7992 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7993 Return NULL_TREE if no simplification can be made. */
7994
7995 static tree
7996 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7997 {
7998 if (!validate_arg (arg, REAL_TYPE))
7999 return NULL_TREE;
8000
8001 /* Optimize ceil of constant value. */
8002 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8003 {
8004 REAL_VALUE_TYPE x;
8005
8006 x = TREE_REAL_CST (arg);
8007 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8008 {
8009 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8010 REAL_VALUE_TYPE r;
8011
8012 real_ceil (&r, TYPE_MODE (type), &x);
8013 return build_real (type, r);
8014 }
8015 }
8016
8017 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8018 }
8019
8020 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8021 Return NULL_TREE if no simplification can be made. */
8022
8023 static tree
8024 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8025 {
8026 if (!validate_arg (arg, REAL_TYPE))
8027 return NULL_TREE;
8028
8029 /* Optimize round of constant value. */
8030 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8031 {
8032 REAL_VALUE_TYPE x;
8033
8034 x = TREE_REAL_CST (arg);
8035 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8036 {
8037 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8038 REAL_VALUE_TYPE r;
8039
8040 real_round (&r, TYPE_MODE (type), &x);
8041 return build_real (type, r);
8042 }
8043 }
8044
8045 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8046 }
8047
8048 /* Fold function call to builtin lround, lroundf or lroundl (or the
8049 corresponding long long versions) and other rounding functions. ARG
8050 is the argument to the call. Return NULL_TREE if no simplification
8051 can be made. */
8052
8053 static tree
8054 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8055 {
8056 if (!validate_arg (arg, REAL_TYPE))
8057 return NULL_TREE;
8058
8059 /* Optimize lround of constant value. */
8060 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8061 {
8062 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8063
8064 if (real_isfinite (&x))
8065 {
8066 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8067 tree ftype = TREE_TYPE (arg);
8068 double_int val;
8069 REAL_VALUE_TYPE r;
8070
8071 switch (DECL_FUNCTION_CODE (fndecl))
8072 {
8073 CASE_FLT_FN (BUILT_IN_IFLOOR):
8074 CASE_FLT_FN (BUILT_IN_LFLOOR):
8075 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8076 real_floor (&r, TYPE_MODE (ftype), &x);
8077 break;
8078
8079 CASE_FLT_FN (BUILT_IN_ICEIL):
8080 CASE_FLT_FN (BUILT_IN_LCEIL):
8081 CASE_FLT_FN (BUILT_IN_LLCEIL):
8082 real_ceil (&r, TYPE_MODE (ftype), &x);
8083 break;
8084
8085 CASE_FLT_FN (BUILT_IN_IROUND):
8086 CASE_FLT_FN (BUILT_IN_LROUND):
8087 CASE_FLT_FN (BUILT_IN_LLROUND):
8088 real_round (&r, TYPE_MODE (ftype), &x);
8089 break;
8090
8091 default:
8092 gcc_unreachable ();
8093 }
8094
8095 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
8096 if (double_int_fits_to_tree_p (itype, val))
8097 return double_int_to_tree (itype, val);
8098 }
8099 }
8100
8101 switch (DECL_FUNCTION_CODE (fndecl))
8102 {
8103 CASE_FLT_FN (BUILT_IN_LFLOOR):
8104 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8105 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8106 if (tree_expr_nonnegative_p (arg))
8107 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8108 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8109 break;
8110 default:;
8111 }
8112
8113 return fold_fixed_mathfn (loc, fndecl, arg);
8114 }
8115
8116 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8117 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8118 the argument to the call. Return NULL_TREE if no simplification can
8119 be made. */
8120
8121 static tree
8122 fold_builtin_bitop (tree fndecl, tree arg)
8123 {
8124 if (!validate_arg (arg, INTEGER_TYPE))
8125 return NULL_TREE;
8126
8127 /* Optimize for constant argument. */
8128 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8129 {
8130 HOST_WIDE_INT hi, width, result;
8131 unsigned HOST_WIDE_INT lo;
8132 tree type;
8133
8134 type = TREE_TYPE (arg);
8135 width = TYPE_PRECISION (type);
8136 lo = TREE_INT_CST_LOW (arg);
8137
8138 /* Clear all the bits that are beyond the type's precision. */
8139 if (width > HOST_BITS_PER_WIDE_INT)
8140 {
8141 hi = TREE_INT_CST_HIGH (arg);
8142 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8143 hi &= ~((unsigned HOST_WIDE_INT) (-1)
8144 << (width - HOST_BITS_PER_WIDE_INT));
8145 }
8146 else
8147 {
8148 hi = 0;
8149 if (width < HOST_BITS_PER_WIDE_INT)
8150 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8151 }
8152
8153 switch (DECL_FUNCTION_CODE (fndecl))
8154 {
8155 CASE_INT_FN (BUILT_IN_FFS):
8156 if (lo != 0)
8157 result = ffs_hwi (lo);
8158 else if (hi != 0)
8159 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
8160 else
8161 result = 0;
8162 break;
8163
8164 CASE_INT_FN (BUILT_IN_CLZ):
8165 if (hi != 0)
8166 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8167 else if (lo != 0)
8168 result = width - floor_log2 (lo) - 1;
8169 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8170 result = width;
8171 break;
8172
8173 CASE_INT_FN (BUILT_IN_CTZ):
8174 if (lo != 0)
8175 result = ctz_hwi (lo);
8176 else if (hi != 0)
8177 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
8178 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8179 result = width;
8180 break;
8181
8182 CASE_INT_FN (BUILT_IN_CLRSB):
8183 if (width > HOST_BITS_PER_WIDE_INT
8184 && (hi & ((unsigned HOST_WIDE_INT) 1
8185 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
8186 {
8187 hi = ~hi & ~((unsigned HOST_WIDE_INT) (-1)
8188 << (width - HOST_BITS_PER_WIDE_INT - 1));
8189 lo = ~lo;
8190 }
8191 else if (width <= HOST_BITS_PER_WIDE_INT
8192 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
8193 lo = ~lo & ~((unsigned HOST_WIDE_INT) (-1) << (width - 1));
8194 if (hi != 0)
8195 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
8196 else if (lo != 0)
8197 result = width - floor_log2 (lo) - 2;
8198 else
8199 result = width - 1;
8200 break;
8201
8202 CASE_INT_FN (BUILT_IN_POPCOUNT):
8203 result = 0;
8204 while (lo)
8205 result++, lo &= lo - 1;
8206 while (hi)
8207 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8208 break;
8209
8210 CASE_INT_FN (BUILT_IN_PARITY):
8211 result = 0;
8212 while (lo)
8213 result++, lo &= lo - 1;
8214 while (hi)
8215 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8216 result &= 1;
8217 break;
8218
8219 default:
8220 gcc_unreachable ();
8221 }
8222
8223 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8224 }
8225
8226 return NULL_TREE;
8227 }
8228
8229 /* Fold function call to builtin_bswap and the short, long and long long
8230 variants. Return NULL_TREE if no simplification can be made. */
8231 static tree
8232 fold_builtin_bswap (tree fndecl, tree arg)
8233 {
8234 if (! validate_arg (arg, INTEGER_TYPE))
8235 return NULL_TREE;
8236
8237 /* Optimize constant value. */
8238 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8239 {
8240 HOST_WIDE_INT hi, width, r_hi = 0;
8241 unsigned HOST_WIDE_INT lo, r_lo = 0;
8242 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8243
8244 width = TYPE_PRECISION (type);
8245 lo = TREE_INT_CST_LOW (arg);
8246 hi = TREE_INT_CST_HIGH (arg);
8247
8248 switch (DECL_FUNCTION_CODE (fndecl))
8249 {
8250 case BUILT_IN_BSWAP16:
8251 case BUILT_IN_BSWAP32:
8252 case BUILT_IN_BSWAP64:
8253 {
8254 int s;
8255
8256 for (s = 0; s < width; s += 8)
8257 {
8258 int d = width - s - 8;
8259 unsigned HOST_WIDE_INT byte;
8260
8261 if (s < HOST_BITS_PER_WIDE_INT)
8262 byte = (lo >> s) & 0xff;
8263 else
8264 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8265
8266 if (d < HOST_BITS_PER_WIDE_INT)
8267 r_lo |= byte << d;
8268 else
8269 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8270 }
8271 }
8272
8273 break;
8274
8275 default:
8276 gcc_unreachable ();
8277 }
8278
8279 if (width < HOST_BITS_PER_WIDE_INT)
8280 return build_int_cst (type, r_lo);
8281 else
8282 return build_int_cst_wide (type, r_lo, r_hi);
8283 }
8284
8285 return NULL_TREE;
8286 }
8287
8288 /* A subroutine of fold_builtin to fold the various logarithmic
8289 functions. Return NULL_TREE if no simplification can me made.
8290 FUNC is the corresponding MPFR logarithm function. */
8291
8292 static tree
8293 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8294 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8295 {
8296 if (validate_arg (arg, REAL_TYPE))
8297 {
8298 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8299 tree res;
8300 const enum built_in_function fcode = builtin_mathfn_code (arg);
8301
8302 /* Calculate the result when the argument is a constant. */
8303 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8304 return res;
8305
8306 /* Special case, optimize logN(expN(x)) = x. */
8307 if (flag_unsafe_math_optimizations
8308 && ((func == mpfr_log
8309 && (fcode == BUILT_IN_EXP
8310 || fcode == BUILT_IN_EXPF
8311 || fcode == BUILT_IN_EXPL))
8312 || (func == mpfr_log2
8313 && (fcode == BUILT_IN_EXP2
8314 || fcode == BUILT_IN_EXP2F
8315 || fcode == BUILT_IN_EXP2L))
8316 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8317 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8318
8319 /* Optimize logN(func()) for various exponential functions. We
8320 want to determine the value "x" and the power "exponent" in
8321 order to transform logN(x**exponent) into exponent*logN(x). */
8322 if (flag_unsafe_math_optimizations)
8323 {
8324 tree exponent = 0, x = 0;
8325
8326 switch (fcode)
8327 {
8328 CASE_FLT_FN (BUILT_IN_EXP):
8329 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8330 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8331 dconst_e ()));
8332 exponent = CALL_EXPR_ARG (arg, 0);
8333 break;
8334 CASE_FLT_FN (BUILT_IN_EXP2):
8335 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8336 x = build_real (type, dconst2);
8337 exponent = CALL_EXPR_ARG (arg, 0);
8338 break;
8339 CASE_FLT_FN (BUILT_IN_EXP10):
8340 CASE_FLT_FN (BUILT_IN_POW10):
8341 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8342 {
8343 REAL_VALUE_TYPE dconst10;
8344 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8345 x = build_real (type, dconst10);
8346 }
8347 exponent = CALL_EXPR_ARG (arg, 0);
8348 break;
8349 CASE_FLT_FN (BUILT_IN_SQRT):
8350 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8351 x = CALL_EXPR_ARG (arg, 0);
8352 exponent = build_real (type, dconsthalf);
8353 break;
8354 CASE_FLT_FN (BUILT_IN_CBRT):
8355 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8356 x = CALL_EXPR_ARG (arg, 0);
8357 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8358 dconst_third ()));
8359 break;
8360 CASE_FLT_FN (BUILT_IN_POW):
8361 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8362 x = CALL_EXPR_ARG (arg, 0);
8363 exponent = CALL_EXPR_ARG (arg, 1);
8364 break;
8365 default:
8366 break;
8367 }
8368
8369 /* Now perform the optimization. */
8370 if (x && exponent)
8371 {
8372 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8373 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8374 }
8375 }
8376 }
8377
8378 return NULL_TREE;
8379 }
8380
8381 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8382 NULL_TREE if no simplification can be made. */
8383
8384 static tree
8385 fold_builtin_hypot (location_t loc, tree fndecl,
8386 tree arg0, tree arg1, tree type)
8387 {
8388 tree res, narg0, narg1;
8389
8390 if (!validate_arg (arg0, REAL_TYPE)
8391 || !validate_arg (arg1, REAL_TYPE))
8392 return NULL_TREE;
8393
8394 /* Calculate the result when the argument is a constant. */
8395 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8396 return res;
8397
8398 /* If either argument to hypot has a negate or abs, strip that off.
8399 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8400 narg0 = fold_strip_sign_ops (arg0);
8401 narg1 = fold_strip_sign_ops (arg1);
8402 if (narg0 || narg1)
8403 {
8404 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8405 narg1 ? narg1 : arg1);
8406 }
8407
8408 /* If either argument is zero, hypot is fabs of the other. */
8409 if (real_zerop (arg0))
8410 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8411 else if (real_zerop (arg1))
8412 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8413
8414 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8415 if (flag_unsafe_math_optimizations
8416 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8417 {
8418 const REAL_VALUE_TYPE sqrt2_trunc
8419 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8420 return fold_build2_loc (loc, MULT_EXPR, type,
8421 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8422 build_real (type, sqrt2_trunc));
8423 }
8424
8425 return NULL_TREE;
8426 }
8427
8428
8429 /* Fold a builtin function call to pow, powf, or powl. Return
8430 NULL_TREE if no simplification can be made. */
8431 static tree
8432 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8433 {
8434 tree res;
8435
8436 if (!validate_arg (arg0, REAL_TYPE)
8437 || !validate_arg (arg1, REAL_TYPE))
8438 return NULL_TREE;
8439
8440 /* Calculate the result when the argument is a constant. */
8441 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8442 return res;
8443
8444 /* Optimize pow(1.0,y) = 1.0. */
8445 if (real_onep (arg0))
8446 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8447
8448 if (TREE_CODE (arg1) == REAL_CST
8449 && !TREE_OVERFLOW (arg1))
8450 {
8451 REAL_VALUE_TYPE cint;
8452 REAL_VALUE_TYPE c;
8453 HOST_WIDE_INT n;
8454
8455 c = TREE_REAL_CST (arg1);
8456
8457 /* Optimize pow(x,0.0) = 1.0. */
8458 if (REAL_VALUES_EQUAL (c, dconst0))
8459 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8460 arg0);
8461
8462 /* Optimize pow(x,1.0) = x. */
8463 if (REAL_VALUES_EQUAL (c, dconst1))
8464 return arg0;
8465
8466 /* Optimize pow(x,-1.0) = 1.0/x. */
8467 if (REAL_VALUES_EQUAL (c, dconstm1))
8468 return fold_build2_loc (loc, RDIV_EXPR, type,
8469 build_real (type, dconst1), arg0);
8470
8471 /* Optimize pow(x,0.5) = sqrt(x). */
8472 if (flag_unsafe_math_optimizations
8473 && REAL_VALUES_EQUAL (c, dconsthalf))
8474 {
8475 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8476
8477 if (sqrtfn != NULL_TREE)
8478 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8479 }
8480
8481 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8482 if (flag_unsafe_math_optimizations)
8483 {
8484 const REAL_VALUE_TYPE dconstroot
8485 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8486
8487 if (REAL_VALUES_EQUAL (c, dconstroot))
8488 {
8489 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8490 if (cbrtfn != NULL_TREE)
8491 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8492 }
8493 }
8494
8495 /* Check for an integer exponent. */
8496 n = real_to_integer (&c);
8497 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8498 if (real_identical (&c, &cint))
8499 {
8500 /* Attempt to evaluate pow at compile-time, unless this should
8501 raise an exception. */
8502 if (TREE_CODE (arg0) == REAL_CST
8503 && !TREE_OVERFLOW (arg0)
8504 && (n > 0
8505 || (!flag_trapping_math && !flag_errno_math)
8506 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8507 {
8508 REAL_VALUE_TYPE x;
8509 bool inexact;
8510
8511 x = TREE_REAL_CST (arg0);
8512 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8513 if (flag_unsafe_math_optimizations || !inexact)
8514 return build_real (type, x);
8515 }
8516
8517 /* Strip sign ops from even integer powers. */
8518 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8519 {
8520 tree narg0 = fold_strip_sign_ops (arg0);
8521 if (narg0)
8522 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8523 }
8524 }
8525 }
8526
8527 if (flag_unsafe_math_optimizations)
8528 {
8529 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8530
8531 /* Optimize pow(expN(x),y) = expN(x*y). */
8532 if (BUILTIN_EXPONENT_P (fcode))
8533 {
8534 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8535 tree arg = CALL_EXPR_ARG (arg0, 0);
8536 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8537 return build_call_expr_loc (loc, expfn, 1, arg);
8538 }
8539
8540 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8541 if (BUILTIN_SQRT_P (fcode))
8542 {
8543 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8544 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8545 build_real (type, dconsthalf));
8546 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8547 }
8548
8549 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8550 if (BUILTIN_CBRT_P (fcode))
8551 {
8552 tree arg = CALL_EXPR_ARG (arg0, 0);
8553 if (tree_expr_nonnegative_p (arg))
8554 {
8555 const REAL_VALUE_TYPE dconstroot
8556 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8557 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8558 build_real (type, dconstroot));
8559 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8560 }
8561 }
8562
8563 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8564 if (fcode == BUILT_IN_POW
8565 || fcode == BUILT_IN_POWF
8566 || fcode == BUILT_IN_POWL)
8567 {
8568 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8569 if (tree_expr_nonnegative_p (arg00))
8570 {
8571 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8572 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8573 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8574 }
8575 }
8576 }
8577
8578 return NULL_TREE;
8579 }
8580
8581 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8582 Return NULL_TREE if no simplification can be made. */
8583 static tree
8584 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8585 tree arg0, tree arg1, tree type)
8586 {
8587 if (!validate_arg (arg0, REAL_TYPE)
8588 || !validate_arg (arg1, INTEGER_TYPE))
8589 return NULL_TREE;
8590
8591 /* Optimize pow(1.0,y) = 1.0. */
8592 if (real_onep (arg0))
8593 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8594
8595 if (host_integerp (arg1, 0))
8596 {
8597 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8598
8599 /* Evaluate powi at compile-time. */
8600 if (TREE_CODE (arg0) == REAL_CST
8601 && !TREE_OVERFLOW (arg0))
8602 {
8603 REAL_VALUE_TYPE x;
8604 x = TREE_REAL_CST (arg0);
8605 real_powi (&x, TYPE_MODE (type), &x, c);
8606 return build_real (type, x);
8607 }
8608
8609 /* Optimize pow(x,0) = 1.0. */
8610 if (c == 0)
8611 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8612 arg0);
8613
8614 /* Optimize pow(x,1) = x. */
8615 if (c == 1)
8616 return arg0;
8617
8618 /* Optimize pow(x,-1) = 1.0/x. */
8619 if (c == -1)
8620 return fold_build2_loc (loc, RDIV_EXPR, type,
8621 build_real (type, dconst1), arg0);
8622 }
8623
8624 return NULL_TREE;
8625 }
8626
8627 /* A subroutine of fold_builtin to fold the various exponent
8628 functions. Return NULL_TREE if no simplification can be made.
8629 FUNC is the corresponding MPFR exponent function. */
8630
8631 static tree
8632 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8633 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8634 {
8635 if (validate_arg (arg, REAL_TYPE))
8636 {
8637 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8638 tree res;
8639
8640 /* Calculate the result when the argument is a constant. */
8641 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8642 return res;
8643
8644 /* Optimize expN(logN(x)) = x. */
8645 if (flag_unsafe_math_optimizations)
8646 {
8647 const enum built_in_function fcode = builtin_mathfn_code (arg);
8648
8649 if ((func == mpfr_exp
8650 && (fcode == BUILT_IN_LOG
8651 || fcode == BUILT_IN_LOGF
8652 || fcode == BUILT_IN_LOGL))
8653 || (func == mpfr_exp2
8654 && (fcode == BUILT_IN_LOG2
8655 || fcode == BUILT_IN_LOG2F
8656 || fcode == BUILT_IN_LOG2L))
8657 || (func == mpfr_exp10
8658 && (fcode == BUILT_IN_LOG10
8659 || fcode == BUILT_IN_LOG10F
8660 || fcode == BUILT_IN_LOG10L)))
8661 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8662 }
8663 }
8664
8665 return NULL_TREE;
8666 }
8667
8668 /* Return true if VAR is a VAR_DECL or a component thereof. */
8669
8670 static bool
8671 var_decl_component_p (tree var)
8672 {
8673 tree inner = var;
8674 while (handled_component_p (inner))
8675 inner = TREE_OPERAND (inner, 0);
8676 return SSA_VAR_P (inner);
8677 }
8678
8679 /* Fold function call to builtin memset. Return
8680 NULL_TREE if no simplification can be made. */
8681
8682 static tree
8683 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8684 tree type, bool ignore)
8685 {
8686 tree var, ret, etype;
8687 unsigned HOST_WIDE_INT length, cval;
8688
8689 if (! validate_arg (dest, POINTER_TYPE)
8690 || ! validate_arg (c, INTEGER_TYPE)
8691 || ! validate_arg (len, INTEGER_TYPE))
8692 return NULL_TREE;
8693
8694 if (! host_integerp (len, 1))
8695 return NULL_TREE;
8696
8697 /* If the LEN parameter is zero, return DEST. */
8698 if (integer_zerop (len))
8699 return omit_one_operand_loc (loc, type, dest, c);
8700
8701 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8702 return NULL_TREE;
8703
8704 var = dest;
8705 STRIP_NOPS (var);
8706 if (TREE_CODE (var) != ADDR_EXPR)
8707 return NULL_TREE;
8708
8709 var = TREE_OPERAND (var, 0);
8710 if (TREE_THIS_VOLATILE (var))
8711 return NULL_TREE;
8712
8713 etype = TREE_TYPE (var);
8714 if (TREE_CODE (etype) == ARRAY_TYPE)
8715 etype = TREE_TYPE (etype);
8716
8717 if (!INTEGRAL_TYPE_P (etype)
8718 && !POINTER_TYPE_P (etype))
8719 return NULL_TREE;
8720
8721 if (! var_decl_component_p (var))
8722 return NULL_TREE;
8723
8724 length = tree_low_cst (len, 1);
8725 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8726 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
8727 return NULL_TREE;
8728
8729 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8730 return NULL_TREE;
8731
8732 if (integer_zerop (c))
8733 cval = 0;
8734 else
8735 {
8736 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8737 return NULL_TREE;
8738
8739 cval = TREE_INT_CST_LOW (c);
8740 cval &= 0xff;
8741 cval |= cval << 8;
8742 cval |= cval << 16;
8743 cval |= (cval << 31) << 1;
8744 }
8745
8746 ret = build_int_cst_type (etype, cval);
8747 var = build_fold_indirect_ref_loc (loc,
8748 fold_convert_loc (loc,
8749 build_pointer_type (etype),
8750 dest));
8751 ret = build2 (MODIFY_EXPR, etype, var, ret);
8752 if (ignore)
8753 return ret;
8754
8755 return omit_one_operand_loc (loc, type, dest, ret);
8756 }
8757
8758 /* Fold function call to builtin memset. Return
8759 NULL_TREE if no simplification can be made. */
8760
8761 static tree
8762 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8763 {
8764 if (! validate_arg (dest, POINTER_TYPE)
8765 || ! validate_arg (size, INTEGER_TYPE))
8766 return NULL_TREE;
8767
8768 if (!ignore)
8769 return NULL_TREE;
8770
8771 /* New argument list transforming bzero(ptr x, int y) to
8772 memset(ptr x, int 0, size_t y). This is done this way
8773 so that if it isn't expanded inline, we fallback to
8774 calling bzero instead of memset. */
8775
8776 return fold_builtin_memset (loc, dest, integer_zero_node,
8777 fold_convert_loc (loc, size_type_node, size),
8778 void_type_node, ignore);
8779 }
8780
8781 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8782 NULL_TREE if no simplification can be made.
8783 If ENDP is 0, return DEST (like memcpy).
8784 If ENDP is 1, return DEST+LEN (like mempcpy).
8785 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8786 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8787 (memmove). */
8788
8789 static tree
8790 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8791 tree len, tree type, bool ignore, int endp)
8792 {
8793 tree destvar, srcvar, expr;
8794
8795 if (! validate_arg (dest, POINTER_TYPE)
8796 || ! validate_arg (src, POINTER_TYPE)
8797 || ! validate_arg (len, INTEGER_TYPE))
8798 return NULL_TREE;
8799
8800 /* If the LEN parameter is zero, return DEST. */
8801 if (integer_zerop (len))
8802 return omit_one_operand_loc (loc, type, dest, src);
8803
8804 /* If SRC and DEST are the same (and not volatile), return
8805 DEST{,+LEN,+LEN-1}. */
8806 if (operand_equal_p (src, dest, 0))
8807 expr = len;
8808 else
8809 {
8810 tree srctype, desttype;
8811 unsigned int src_align, dest_align;
8812 tree off0;
8813
8814 if (endp == 3)
8815 {
8816 src_align = get_pointer_alignment (src);
8817 dest_align = get_pointer_alignment (dest);
8818
8819 /* Both DEST and SRC must be pointer types.
8820 ??? This is what old code did. Is the testing for pointer types
8821 really mandatory?
8822
8823 If either SRC is readonly or length is 1, we can use memcpy. */
8824 if (!dest_align || !src_align)
8825 return NULL_TREE;
8826 if (readonly_data_expr (src)
8827 || (host_integerp (len, 1)
8828 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8829 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
8830 {
8831 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8832 if (!fn)
8833 return NULL_TREE;
8834 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8835 }
8836
8837 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8838 if (TREE_CODE (src) == ADDR_EXPR
8839 && TREE_CODE (dest) == ADDR_EXPR)
8840 {
8841 tree src_base, dest_base, fn;
8842 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8843 HOST_WIDE_INT size = -1;
8844 HOST_WIDE_INT maxsize = -1;
8845
8846 srcvar = TREE_OPERAND (src, 0);
8847 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8848 &size, &maxsize);
8849 destvar = TREE_OPERAND (dest, 0);
8850 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8851 &size, &maxsize);
8852 if (host_integerp (len, 1))
8853 maxsize = tree_low_cst (len, 1);
8854 else
8855 maxsize = -1;
8856 src_offset /= BITS_PER_UNIT;
8857 dest_offset /= BITS_PER_UNIT;
8858 if (SSA_VAR_P (src_base)
8859 && SSA_VAR_P (dest_base))
8860 {
8861 if (operand_equal_p (src_base, dest_base, 0)
8862 && ranges_overlap_p (src_offset, maxsize,
8863 dest_offset, maxsize))
8864 return NULL_TREE;
8865 }
8866 else if (TREE_CODE (src_base) == MEM_REF
8867 && TREE_CODE (dest_base) == MEM_REF)
8868 {
8869 double_int off;
8870 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8871 TREE_OPERAND (dest_base, 0), 0))
8872 return NULL_TREE;
8873 off = double_int_add (mem_ref_offset (src_base),
8874 shwi_to_double_int (src_offset));
8875 if (!double_int_fits_in_shwi_p (off))
8876 return NULL_TREE;
8877 src_offset = off.low;
8878 off = double_int_add (mem_ref_offset (dest_base),
8879 shwi_to_double_int (dest_offset));
8880 if (!double_int_fits_in_shwi_p (off))
8881 return NULL_TREE;
8882 dest_offset = off.low;
8883 if (ranges_overlap_p (src_offset, maxsize,
8884 dest_offset, maxsize))
8885 return NULL_TREE;
8886 }
8887 else
8888 return NULL_TREE;
8889
8890 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8891 if (!fn)
8892 return NULL_TREE;
8893 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8894 }
8895
8896 /* If the destination and source do not alias optimize into
8897 memcpy as well. */
8898 if ((is_gimple_min_invariant (dest)
8899 || TREE_CODE (dest) == SSA_NAME)
8900 && (is_gimple_min_invariant (src)
8901 || TREE_CODE (src) == SSA_NAME))
8902 {
8903 ao_ref destr, srcr;
8904 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8905 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8906 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8907 {
8908 tree fn;
8909 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8910 if (!fn)
8911 return NULL_TREE;
8912 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8913 }
8914 }
8915
8916 return NULL_TREE;
8917 }
8918
8919 if (!host_integerp (len, 0))
8920 return NULL_TREE;
8921 /* FIXME:
8922 This logic lose for arguments like (type *)malloc (sizeof (type)),
8923 since we strip the casts of up to VOID return value from malloc.
8924 Perhaps we ought to inherit type from non-VOID argument here? */
8925 STRIP_NOPS (src);
8926 STRIP_NOPS (dest);
8927 if (!POINTER_TYPE_P (TREE_TYPE (src))
8928 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8929 return NULL_TREE;
8930 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8931 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8932 {
8933 tree tem = TREE_OPERAND (src, 0);
8934 STRIP_NOPS (tem);
8935 if (tem != TREE_OPERAND (src, 0))
8936 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8937 }
8938 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8939 {
8940 tree tem = TREE_OPERAND (dest, 0);
8941 STRIP_NOPS (tem);
8942 if (tem != TREE_OPERAND (dest, 0))
8943 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8944 }
8945 srctype = TREE_TYPE (TREE_TYPE (src));
8946 if (TREE_CODE (srctype) == ARRAY_TYPE
8947 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8948 {
8949 srctype = TREE_TYPE (srctype);
8950 STRIP_NOPS (src);
8951 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8952 }
8953 desttype = TREE_TYPE (TREE_TYPE (dest));
8954 if (TREE_CODE (desttype) == ARRAY_TYPE
8955 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8956 {
8957 desttype = TREE_TYPE (desttype);
8958 STRIP_NOPS (dest);
8959 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8960 }
8961 if (TREE_ADDRESSABLE (srctype)
8962 || TREE_ADDRESSABLE (desttype))
8963 return NULL_TREE;
8964
8965 src_align = get_pointer_alignment (src);
8966 dest_align = get_pointer_alignment (dest);
8967 if (dest_align < TYPE_ALIGN (desttype)
8968 || src_align < TYPE_ALIGN (srctype))
8969 return NULL_TREE;
8970
8971 if (!ignore)
8972 dest = builtin_save_expr (dest);
8973
8974 /* Build accesses at offset zero with a ref-all character type. */
8975 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8976 ptr_mode, true), 0);
8977
8978 destvar = dest;
8979 STRIP_NOPS (destvar);
8980 if (TREE_CODE (destvar) == ADDR_EXPR
8981 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8982 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8983 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8984 else
8985 destvar = NULL_TREE;
8986
8987 srcvar = src;
8988 STRIP_NOPS (srcvar);
8989 if (TREE_CODE (srcvar) == ADDR_EXPR
8990 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8991 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8992 {
8993 if (!destvar
8994 || src_align >= TYPE_ALIGN (desttype))
8995 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8996 srcvar, off0);
8997 else if (!STRICT_ALIGNMENT)
8998 {
8999 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
9000 src_align);
9001 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
9002 }
9003 else
9004 srcvar = NULL_TREE;
9005 }
9006 else
9007 srcvar = NULL_TREE;
9008
9009 if (srcvar == NULL_TREE && destvar == NULL_TREE)
9010 return NULL_TREE;
9011
9012 if (srcvar == NULL_TREE)
9013 {
9014 STRIP_NOPS (src);
9015 if (src_align >= TYPE_ALIGN (desttype))
9016 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
9017 else
9018 {
9019 if (STRICT_ALIGNMENT)
9020 return NULL_TREE;
9021 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
9022 src_align);
9023 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
9024 }
9025 }
9026 else if (destvar == NULL_TREE)
9027 {
9028 STRIP_NOPS (dest);
9029 if (dest_align >= TYPE_ALIGN (srctype))
9030 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
9031 else
9032 {
9033 if (STRICT_ALIGNMENT)
9034 return NULL_TREE;
9035 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
9036 dest_align);
9037 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
9038 }
9039 }
9040
9041 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
9042 }
9043
9044 if (ignore)
9045 return expr;
9046
9047 if (endp == 0 || endp == 3)
9048 return omit_one_operand_loc (loc, type, dest, expr);
9049
9050 if (expr == len)
9051 expr = NULL_TREE;
9052
9053 if (endp == 2)
9054 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
9055 ssize_int (1));
9056
9057 dest = fold_build_pointer_plus_loc (loc, dest, len);
9058 dest = fold_convert_loc (loc, type, dest);
9059 if (expr)
9060 dest = omit_one_operand_loc (loc, type, dest, expr);
9061 return dest;
9062 }
9063
9064 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9065 If LEN is not NULL, it represents the length of the string to be
9066 copied. Return NULL_TREE if no simplification can be made. */
9067
9068 tree
9069 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
9070 {
9071 tree fn;
9072
9073 if (!validate_arg (dest, POINTER_TYPE)
9074 || !validate_arg (src, POINTER_TYPE))
9075 return NULL_TREE;
9076
9077 /* If SRC and DEST are the same (and not volatile), return DEST. */
9078 if (operand_equal_p (src, dest, 0))
9079 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
9080
9081 if (optimize_function_for_size_p (cfun))
9082 return NULL_TREE;
9083
9084 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9085 if (!fn)
9086 return NULL_TREE;
9087
9088 if (!len)
9089 {
9090 len = c_strlen (src, 1);
9091 if (! len || TREE_SIDE_EFFECTS (len))
9092 return NULL_TREE;
9093 }
9094
9095 len = fold_convert_loc (loc, size_type_node, len);
9096 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
9097 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9098 build_call_expr_loc (loc, fn, 3, dest, src, len));
9099 }
9100
9101 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
9102 Return NULL_TREE if no simplification can be made. */
9103
9104 static tree
9105 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
9106 {
9107 tree fn, len, lenp1, call, type;
9108
9109 if (!validate_arg (dest, POINTER_TYPE)
9110 || !validate_arg (src, POINTER_TYPE))
9111 return NULL_TREE;
9112
9113 len = c_strlen (src, 1);
9114 if (!len
9115 || TREE_CODE (len) != INTEGER_CST)
9116 return NULL_TREE;
9117
9118 if (optimize_function_for_size_p (cfun)
9119 /* If length is zero it's small enough. */
9120 && !integer_zerop (len))
9121 return NULL_TREE;
9122
9123 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9124 if (!fn)
9125 return NULL_TREE;
9126
9127 lenp1 = size_binop_loc (loc, PLUS_EXPR,
9128 fold_convert_loc (loc, size_type_node, len),
9129 build_int_cst (size_type_node, 1));
9130 /* We use dest twice in building our expression. Save it from
9131 multiple expansions. */
9132 dest = builtin_save_expr (dest);
9133 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
9134
9135 type = TREE_TYPE (TREE_TYPE (fndecl));
9136 dest = fold_build_pointer_plus_loc (loc, dest, len);
9137 dest = fold_convert_loc (loc, type, dest);
9138 dest = omit_one_operand_loc (loc, type, dest, call);
9139 return dest;
9140 }
9141
9142 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9143 If SLEN is not NULL, it represents the length of the source string.
9144 Return NULL_TREE if no simplification can be made. */
9145
9146 tree
9147 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
9148 tree src, tree len, tree slen)
9149 {
9150 tree fn;
9151
9152 if (!validate_arg (dest, POINTER_TYPE)
9153 || !validate_arg (src, POINTER_TYPE)
9154 || !validate_arg (len, INTEGER_TYPE))
9155 return NULL_TREE;
9156
9157 /* If the LEN parameter is zero, return DEST. */
9158 if (integer_zerop (len))
9159 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9160
9161 /* We can't compare slen with len as constants below if len is not a
9162 constant. */
9163 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9164 return NULL_TREE;
9165
9166 if (!slen)
9167 slen = c_strlen (src, 1);
9168
9169 /* Now, we must be passed a constant src ptr parameter. */
9170 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9171 return NULL_TREE;
9172
9173 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
9174
9175 /* We do not support simplification of this case, though we do
9176 support it when expanding trees into RTL. */
9177 /* FIXME: generate a call to __builtin_memset. */
9178 if (tree_int_cst_lt (slen, len))
9179 return NULL_TREE;
9180
9181 /* OK transform into builtin memcpy. */
9182 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9183 if (!fn)
9184 return NULL_TREE;
9185
9186 len = fold_convert_loc (loc, size_type_node, len);
9187 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9188 build_call_expr_loc (loc, fn, 3, dest, src, len));
9189 }
9190
9191 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9192 arguments to the call, and TYPE is its return type.
9193 Return NULL_TREE if no simplification can be made. */
9194
9195 static tree
9196 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
9197 {
9198 if (!validate_arg (arg1, POINTER_TYPE)
9199 || !validate_arg (arg2, INTEGER_TYPE)
9200 || !validate_arg (len, INTEGER_TYPE))
9201 return NULL_TREE;
9202 else
9203 {
9204 const char *p1;
9205
9206 if (TREE_CODE (arg2) != INTEGER_CST
9207 || !host_integerp (len, 1))
9208 return NULL_TREE;
9209
9210 p1 = c_getstr (arg1);
9211 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9212 {
9213 char c;
9214 const char *r;
9215 tree tem;
9216
9217 if (target_char_cast (arg2, &c))
9218 return NULL_TREE;
9219
9220 r = (const char *) memchr (p1, c, tree_low_cst (len, 1));
9221
9222 if (r == NULL)
9223 return build_int_cst (TREE_TYPE (arg1), 0);
9224
9225 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
9226 return fold_convert_loc (loc, type, tem);
9227 }
9228 return NULL_TREE;
9229 }
9230 }
9231
9232 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9233 Return NULL_TREE if no simplification can be made. */
9234
9235 static tree
9236 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9237 {
9238 const char *p1, *p2;
9239
9240 if (!validate_arg (arg1, POINTER_TYPE)
9241 || !validate_arg (arg2, POINTER_TYPE)
9242 || !validate_arg (len, INTEGER_TYPE))
9243 return NULL_TREE;
9244
9245 /* If the LEN parameter is zero, return zero. */
9246 if (integer_zerop (len))
9247 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9248 arg1, arg2);
9249
9250 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9251 if (operand_equal_p (arg1, arg2, 0))
9252 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9253
9254 p1 = c_getstr (arg1);
9255 p2 = c_getstr (arg2);
9256
9257 /* If all arguments are constant, and the value of len is not greater
9258 than the lengths of arg1 and arg2, evaluate at compile-time. */
9259 if (host_integerp (len, 1) && p1 && p2
9260 && compare_tree_int (len, strlen (p1) + 1) <= 0
9261 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9262 {
9263 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9264
9265 if (r > 0)
9266 return integer_one_node;
9267 else if (r < 0)
9268 return integer_minus_one_node;
9269 else
9270 return integer_zero_node;
9271 }
9272
9273 /* If len parameter is one, return an expression corresponding to
9274 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9275 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9276 {
9277 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9278 tree cst_uchar_ptr_node
9279 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9280
9281 tree ind1
9282 = fold_convert_loc (loc, integer_type_node,
9283 build1 (INDIRECT_REF, cst_uchar_node,
9284 fold_convert_loc (loc,
9285 cst_uchar_ptr_node,
9286 arg1)));
9287 tree ind2
9288 = fold_convert_loc (loc, integer_type_node,
9289 build1 (INDIRECT_REF, cst_uchar_node,
9290 fold_convert_loc (loc,
9291 cst_uchar_ptr_node,
9292 arg2)));
9293 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9294 }
9295
9296 return NULL_TREE;
9297 }
9298
9299 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9300 Return NULL_TREE if no simplification can be made. */
9301
9302 static tree
9303 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9304 {
9305 const char *p1, *p2;
9306
9307 if (!validate_arg (arg1, POINTER_TYPE)
9308 || !validate_arg (arg2, POINTER_TYPE))
9309 return NULL_TREE;
9310
9311 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9312 if (operand_equal_p (arg1, arg2, 0))
9313 return integer_zero_node;
9314
9315 p1 = c_getstr (arg1);
9316 p2 = c_getstr (arg2);
9317
9318 if (p1 && p2)
9319 {
9320 const int i = strcmp (p1, p2);
9321 if (i < 0)
9322 return integer_minus_one_node;
9323 else if (i > 0)
9324 return integer_one_node;
9325 else
9326 return integer_zero_node;
9327 }
9328
9329 /* If the second arg is "", return *(const unsigned char*)arg1. */
9330 if (p2 && *p2 == '\0')
9331 {
9332 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9333 tree cst_uchar_ptr_node
9334 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9335
9336 return fold_convert_loc (loc, integer_type_node,
9337 build1 (INDIRECT_REF, cst_uchar_node,
9338 fold_convert_loc (loc,
9339 cst_uchar_ptr_node,
9340 arg1)));
9341 }
9342
9343 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9344 if (p1 && *p1 == '\0')
9345 {
9346 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9347 tree cst_uchar_ptr_node
9348 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9349
9350 tree temp
9351 = fold_convert_loc (loc, integer_type_node,
9352 build1 (INDIRECT_REF, cst_uchar_node,
9353 fold_convert_loc (loc,
9354 cst_uchar_ptr_node,
9355 arg2)));
9356 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9357 }
9358
9359 return NULL_TREE;
9360 }
9361
9362 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9363 Return NULL_TREE if no simplification can be made. */
9364
9365 static tree
9366 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9367 {
9368 const char *p1, *p2;
9369
9370 if (!validate_arg (arg1, POINTER_TYPE)
9371 || !validate_arg (arg2, POINTER_TYPE)
9372 || !validate_arg (len, INTEGER_TYPE))
9373 return NULL_TREE;
9374
9375 /* If the LEN parameter is zero, return zero. */
9376 if (integer_zerop (len))
9377 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9378 arg1, arg2);
9379
9380 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9381 if (operand_equal_p (arg1, arg2, 0))
9382 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9383
9384 p1 = c_getstr (arg1);
9385 p2 = c_getstr (arg2);
9386
9387 if (host_integerp (len, 1) && p1 && p2)
9388 {
9389 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9390 if (i > 0)
9391 return integer_one_node;
9392 else if (i < 0)
9393 return integer_minus_one_node;
9394 else
9395 return integer_zero_node;
9396 }
9397
9398 /* If the second arg is "", and the length is greater than zero,
9399 return *(const unsigned char*)arg1. */
9400 if (p2 && *p2 == '\0'
9401 && TREE_CODE (len) == INTEGER_CST
9402 && tree_int_cst_sgn (len) == 1)
9403 {
9404 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9405 tree cst_uchar_ptr_node
9406 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9407
9408 return fold_convert_loc (loc, integer_type_node,
9409 build1 (INDIRECT_REF, cst_uchar_node,
9410 fold_convert_loc (loc,
9411 cst_uchar_ptr_node,
9412 arg1)));
9413 }
9414
9415 /* If the first arg is "", and the length is greater than zero,
9416 return -*(const unsigned char*)arg2. */
9417 if (p1 && *p1 == '\0'
9418 && TREE_CODE (len) == INTEGER_CST
9419 && tree_int_cst_sgn (len) == 1)
9420 {
9421 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9422 tree cst_uchar_ptr_node
9423 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9424
9425 tree temp = fold_convert_loc (loc, integer_type_node,
9426 build1 (INDIRECT_REF, cst_uchar_node,
9427 fold_convert_loc (loc,
9428 cst_uchar_ptr_node,
9429 arg2)));
9430 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9431 }
9432
9433 /* If len parameter is one, return an expression corresponding to
9434 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9435 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9436 {
9437 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9438 tree cst_uchar_ptr_node
9439 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9440
9441 tree ind1 = fold_convert_loc (loc, integer_type_node,
9442 build1 (INDIRECT_REF, cst_uchar_node,
9443 fold_convert_loc (loc,
9444 cst_uchar_ptr_node,
9445 arg1)));
9446 tree ind2 = fold_convert_loc (loc, integer_type_node,
9447 build1 (INDIRECT_REF, cst_uchar_node,
9448 fold_convert_loc (loc,
9449 cst_uchar_ptr_node,
9450 arg2)));
9451 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9452 }
9453
9454 return NULL_TREE;
9455 }
9456
9457 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9458 ARG. Return NULL_TREE if no simplification can be made. */
9459
9460 static tree
9461 fold_builtin_signbit (location_t loc, tree arg, tree type)
9462 {
9463 if (!validate_arg (arg, REAL_TYPE))
9464 return NULL_TREE;
9465
9466 /* If ARG is a compile-time constant, determine the result. */
9467 if (TREE_CODE (arg) == REAL_CST
9468 && !TREE_OVERFLOW (arg))
9469 {
9470 REAL_VALUE_TYPE c;
9471
9472 c = TREE_REAL_CST (arg);
9473 return (REAL_VALUE_NEGATIVE (c)
9474 ? build_one_cst (type)
9475 : build_zero_cst (type));
9476 }
9477
9478 /* If ARG is non-negative, the result is always zero. */
9479 if (tree_expr_nonnegative_p (arg))
9480 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9481
9482 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9483 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9484 return fold_convert (type,
9485 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9486 build_real (TREE_TYPE (arg), dconst0)));
9487
9488 return NULL_TREE;
9489 }
9490
9491 /* Fold function call to builtin copysign, copysignf or copysignl with
9492 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9493 be made. */
9494
9495 static tree
9496 fold_builtin_copysign (location_t loc, tree fndecl,
9497 tree arg1, tree arg2, tree type)
9498 {
9499 tree tem;
9500
9501 if (!validate_arg (arg1, REAL_TYPE)
9502 || !validate_arg (arg2, REAL_TYPE))
9503 return NULL_TREE;
9504
9505 /* copysign(X,X) is X. */
9506 if (operand_equal_p (arg1, arg2, 0))
9507 return fold_convert_loc (loc, type, arg1);
9508
9509 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9510 if (TREE_CODE (arg1) == REAL_CST
9511 && TREE_CODE (arg2) == REAL_CST
9512 && !TREE_OVERFLOW (arg1)
9513 && !TREE_OVERFLOW (arg2))
9514 {
9515 REAL_VALUE_TYPE c1, c2;
9516
9517 c1 = TREE_REAL_CST (arg1);
9518 c2 = TREE_REAL_CST (arg2);
9519 /* c1.sign := c2.sign. */
9520 real_copysign (&c1, &c2);
9521 return build_real (type, c1);
9522 }
9523
9524 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9525 Remember to evaluate Y for side-effects. */
9526 if (tree_expr_nonnegative_p (arg2))
9527 return omit_one_operand_loc (loc, type,
9528 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9529 arg2);
9530
9531 /* Strip sign changing operations for the first argument. */
9532 tem = fold_strip_sign_ops (arg1);
9533 if (tem)
9534 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9535
9536 return NULL_TREE;
9537 }
9538
9539 /* Fold a call to builtin isascii with argument ARG. */
9540
9541 static tree
9542 fold_builtin_isascii (location_t loc, tree arg)
9543 {
9544 if (!validate_arg (arg, INTEGER_TYPE))
9545 return NULL_TREE;
9546 else
9547 {
9548 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9549 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9550 build_int_cst (integer_type_node,
9551 ~ (unsigned HOST_WIDE_INT) 0x7f));
9552 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9553 arg, integer_zero_node);
9554 }
9555 }
9556
9557 /* Fold a call to builtin toascii with argument ARG. */
9558
9559 static tree
9560 fold_builtin_toascii (location_t loc, tree arg)
9561 {
9562 if (!validate_arg (arg, INTEGER_TYPE))
9563 return NULL_TREE;
9564
9565 /* Transform toascii(c) -> (c & 0x7f). */
9566 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9567 build_int_cst (integer_type_node, 0x7f));
9568 }
9569
9570 /* Fold a call to builtin isdigit with argument ARG. */
9571
9572 static tree
9573 fold_builtin_isdigit (location_t loc, tree arg)
9574 {
9575 if (!validate_arg (arg, INTEGER_TYPE))
9576 return NULL_TREE;
9577 else
9578 {
9579 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9580 /* According to the C standard, isdigit is unaffected by locale.
9581 However, it definitely is affected by the target character set. */
9582 unsigned HOST_WIDE_INT target_digit0
9583 = lang_hooks.to_target_charset ('0');
9584
9585 if (target_digit0 == 0)
9586 return NULL_TREE;
9587
9588 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9589 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9590 build_int_cst (unsigned_type_node, target_digit0));
9591 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9592 build_int_cst (unsigned_type_node, 9));
9593 }
9594 }
9595
9596 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9597
9598 static tree
9599 fold_builtin_fabs (location_t loc, tree arg, tree type)
9600 {
9601 if (!validate_arg (arg, REAL_TYPE))
9602 return NULL_TREE;
9603
9604 arg = fold_convert_loc (loc, type, arg);
9605 if (TREE_CODE (arg) == REAL_CST)
9606 return fold_abs_const (arg, type);
9607 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9608 }
9609
9610 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9611
9612 static tree
9613 fold_builtin_abs (location_t loc, tree arg, tree type)
9614 {
9615 if (!validate_arg (arg, INTEGER_TYPE))
9616 return NULL_TREE;
9617
9618 arg = fold_convert_loc (loc, type, arg);
9619 if (TREE_CODE (arg) == INTEGER_CST)
9620 return fold_abs_const (arg, type);
9621 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9622 }
9623
9624 /* Fold a fma operation with arguments ARG[012]. */
9625
9626 tree
9627 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9628 tree type, tree arg0, tree arg1, tree arg2)
9629 {
9630 if (TREE_CODE (arg0) == REAL_CST
9631 && TREE_CODE (arg1) == REAL_CST
9632 && TREE_CODE (arg2) == REAL_CST)
9633 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9634
9635 return NULL_TREE;
9636 }
9637
9638 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9639
9640 static tree
9641 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9642 {
9643 if (validate_arg (arg0, REAL_TYPE)
9644 && validate_arg(arg1, REAL_TYPE)
9645 && validate_arg(arg2, REAL_TYPE))
9646 {
9647 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9648 if (tem)
9649 return tem;
9650
9651 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9652 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9653 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9654 }
9655 return NULL_TREE;
9656 }
9657
9658 /* Fold a call to builtin fmin or fmax. */
9659
9660 static tree
9661 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9662 tree type, bool max)
9663 {
9664 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9665 {
9666 /* Calculate the result when the argument is a constant. */
9667 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9668
9669 if (res)
9670 return res;
9671
9672 /* If either argument is NaN, return the other one. Avoid the
9673 transformation if we get (and honor) a signalling NaN. Using
9674 omit_one_operand() ensures we create a non-lvalue. */
9675 if (TREE_CODE (arg0) == REAL_CST
9676 && real_isnan (&TREE_REAL_CST (arg0))
9677 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9678 || ! TREE_REAL_CST (arg0).signalling))
9679 return omit_one_operand_loc (loc, type, arg1, arg0);
9680 if (TREE_CODE (arg1) == REAL_CST
9681 && real_isnan (&TREE_REAL_CST (arg1))
9682 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9683 || ! TREE_REAL_CST (arg1).signalling))
9684 return omit_one_operand_loc (loc, type, arg0, arg1);
9685
9686 /* Transform fmin/fmax(x,x) -> x. */
9687 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9688 return omit_one_operand_loc (loc, type, arg0, arg1);
9689
9690 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9691 functions to return the numeric arg if the other one is NaN.
9692 These tree codes don't honor that, so only transform if
9693 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9694 handled, so we don't have to worry about it either. */
9695 if (flag_finite_math_only)
9696 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9697 fold_convert_loc (loc, type, arg0),
9698 fold_convert_loc (loc, type, arg1));
9699 }
9700 return NULL_TREE;
9701 }
9702
9703 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9704
9705 static tree
9706 fold_builtin_carg (location_t loc, tree arg, tree type)
9707 {
9708 if (validate_arg (arg, COMPLEX_TYPE)
9709 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9710 {
9711 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9712
9713 if (atan2_fn)
9714 {
9715 tree new_arg = builtin_save_expr (arg);
9716 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9717 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9718 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9719 }
9720 }
9721
9722 return NULL_TREE;
9723 }
9724
9725 /* Fold a call to builtin logb/ilogb. */
9726
9727 static tree
9728 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9729 {
9730 if (! validate_arg (arg, REAL_TYPE))
9731 return NULL_TREE;
9732
9733 STRIP_NOPS (arg);
9734
9735 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9736 {
9737 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9738
9739 switch (value->cl)
9740 {
9741 case rvc_nan:
9742 case rvc_inf:
9743 /* If arg is Inf or NaN and we're logb, return it. */
9744 if (TREE_CODE (rettype) == REAL_TYPE)
9745 return fold_convert_loc (loc, rettype, arg);
9746 /* Fall through... */
9747 case rvc_zero:
9748 /* Zero may set errno and/or raise an exception for logb, also
9749 for ilogb we don't know FP_ILOGB0. */
9750 return NULL_TREE;
9751 case rvc_normal:
9752 /* For normal numbers, proceed iff radix == 2. In GCC,
9753 normalized significands are in the range [0.5, 1.0). We
9754 want the exponent as if they were [1.0, 2.0) so get the
9755 exponent and subtract 1. */
9756 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9757 return fold_convert_loc (loc, rettype,
9758 build_int_cst (integer_type_node,
9759 REAL_EXP (value)-1));
9760 break;
9761 }
9762 }
9763
9764 return NULL_TREE;
9765 }
9766
9767 /* Fold a call to builtin significand, if radix == 2. */
9768
9769 static tree
9770 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9771 {
9772 if (! validate_arg (arg, REAL_TYPE))
9773 return NULL_TREE;
9774
9775 STRIP_NOPS (arg);
9776
9777 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9778 {
9779 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9780
9781 switch (value->cl)
9782 {
9783 case rvc_zero:
9784 case rvc_nan:
9785 case rvc_inf:
9786 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9787 return fold_convert_loc (loc, rettype, arg);
9788 case rvc_normal:
9789 /* For normal numbers, proceed iff radix == 2. */
9790 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9791 {
9792 REAL_VALUE_TYPE result = *value;
9793 /* In GCC, normalized significands are in the range [0.5,
9794 1.0). We want them to be [1.0, 2.0) so set the
9795 exponent to 1. */
9796 SET_REAL_EXP (&result, 1);
9797 return build_real (rettype, result);
9798 }
9799 break;
9800 }
9801 }
9802
9803 return NULL_TREE;
9804 }
9805
9806 /* Fold a call to builtin frexp, we can assume the base is 2. */
9807
9808 static tree
9809 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9810 {
9811 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9812 return NULL_TREE;
9813
9814 STRIP_NOPS (arg0);
9815
9816 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9817 return NULL_TREE;
9818
9819 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9820
9821 /* Proceed if a valid pointer type was passed in. */
9822 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9823 {
9824 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9825 tree frac, exp;
9826
9827 switch (value->cl)
9828 {
9829 case rvc_zero:
9830 /* For +-0, return (*exp = 0, +-0). */
9831 exp = integer_zero_node;
9832 frac = arg0;
9833 break;
9834 case rvc_nan:
9835 case rvc_inf:
9836 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9837 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9838 case rvc_normal:
9839 {
9840 /* Since the frexp function always expects base 2, and in
9841 GCC normalized significands are already in the range
9842 [0.5, 1.0), we have exactly what frexp wants. */
9843 REAL_VALUE_TYPE frac_rvt = *value;
9844 SET_REAL_EXP (&frac_rvt, 0);
9845 frac = build_real (rettype, frac_rvt);
9846 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9847 }
9848 break;
9849 default:
9850 gcc_unreachable ();
9851 }
9852
9853 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9854 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9855 TREE_SIDE_EFFECTS (arg1) = 1;
9856 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9857 }
9858
9859 return NULL_TREE;
9860 }
9861
9862 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9863 then we can assume the base is two. If it's false, then we have to
9864 check the mode of the TYPE parameter in certain cases. */
9865
9866 static tree
9867 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9868 tree type, bool ldexp)
9869 {
9870 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9871 {
9872 STRIP_NOPS (arg0);
9873 STRIP_NOPS (arg1);
9874
9875 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9876 if (real_zerop (arg0) || integer_zerop (arg1)
9877 || (TREE_CODE (arg0) == REAL_CST
9878 && !real_isfinite (&TREE_REAL_CST (arg0))))
9879 return omit_one_operand_loc (loc, type, arg0, arg1);
9880
9881 /* If both arguments are constant, then try to evaluate it. */
9882 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9883 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9884 && host_integerp (arg1, 0))
9885 {
9886 /* Bound the maximum adjustment to twice the range of the
9887 mode's valid exponents. Use abs to ensure the range is
9888 positive as a sanity check. */
9889 const long max_exp_adj = 2 *
9890 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9891 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9892
9893 /* Get the user-requested adjustment. */
9894 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9895
9896 /* The requested adjustment must be inside this range. This
9897 is a preliminary cap to avoid things like overflow, we
9898 may still fail to compute the result for other reasons. */
9899 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9900 {
9901 REAL_VALUE_TYPE initial_result;
9902
9903 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9904
9905 /* Ensure we didn't overflow. */
9906 if (! real_isinf (&initial_result))
9907 {
9908 const REAL_VALUE_TYPE trunc_result
9909 = real_value_truncate (TYPE_MODE (type), initial_result);
9910
9911 /* Only proceed if the target mode can hold the
9912 resulting value. */
9913 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9914 return build_real (type, trunc_result);
9915 }
9916 }
9917 }
9918 }
9919
9920 return NULL_TREE;
9921 }
9922
9923 /* Fold a call to builtin modf. */
9924
9925 static tree
9926 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9927 {
9928 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9929 return NULL_TREE;
9930
9931 STRIP_NOPS (arg0);
9932
9933 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9934 return NULL_TREE;
9935
9936 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9937
9938 /* Proceed if a valid pointer type was passed in. */
9939 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9940 {
9941 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9942 REAL_VALUE_TYPE trunc, frac;
9943
9944 switch (value->cl)
9945 {
9946 case rvc_nan:
9947 case rvc_zero:
9948 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9949 trunc = frac = *value;
9950 break;
9951 case rvc_inf:
9952 /* For +-Inf, return (*arg1 = arg0, +-0). */
9953 frac = dconst0;
9954 frac.sign = value->sign;
9955 trunc = *value;
9956 break;
9957 case rvc_normal:
9958 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9959 real_trunc (&trunc, VOIDmode, value);
9960 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9961 /* If the original number was negative and already
9962 integral, then the fractional part is -0.0. */
9963 if (value->sign && frac.cl == rvc_zero)
9964 frac.sign = value->sign;
9965 break;
9966 }
9967
9968 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9969 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9970 build_real (rettype, trunc));
9971 TREE_SIDE_EFFECTS (arg1) = 1;
9972 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9973 build_real (rettype, frac));
9974 }
9975
9976 return NULL_TREE;
9977 }
9978
9979 /* Given a location LOC, an interclass builtin function decl FNDECL
9980 and its single argument ARG, return an folded expression computing
9981 the same, or NULL_TREE if we either couldn't or didn't want to fold
9982 (the latter happen if there's an RTL instruction available). */
9983
9984 static tree
9985 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9986 {
9987 enum machine_mode mode;
9988
9989 if (!validate_arg (arg, REAL_TYPE))
9990 return NULL_TREE;
9991
9992 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9993 return NULL_TREE;
9994
9995 mode = TYPE_MODE (TREE_TYPE (arg));
9996
9997 /* If there is no optab, try generic code. */
9998 switch (DECL_FUNCTION_CODE (fndecl))
9999 {
10000 tree result;
10001
10002 CASE_FLT_FN (BUILT_IN_ISINF):
10003 {
10004 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
10005 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
10006 tree const type = TREE_TYPE (arg);
10007 REAL_VALUE_TYPE r;
10008 char buf[128];
10009
10010 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10011 real_from_string (&r, buf);
10012 result = build_call_expr (isgr_fn, 2,
10013 fold_build1_loc (loc, ABS_EXPR, type, arg),
10014 build_real (type, r));
10015 return result;
10016 }
10017 CASE_FLT_FN (BUILT_IN_FINITE):
10018 case BUILT_IN_ISFINITE:
10019 {
10020 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
10021 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10022 tree const type = TREE_TYPE (arg);
10023 REAL_VALUE_TYPE r;
10024 char buf[128];
10025
10026 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10027 real_from_string (&r, buf);
10028 result = build_call_expr (isle_fn, 2,
10029 fold_build1_loc (loc, ABS_EXPR, type, arg),
10030 build_real (type, r));
10031 /*result = fold_build2_loc (loc, UNGT_EXPR,
10032 TREE_TYPE (TREE_TYPE (fndecl)),
10033 fold_build1_loc (loc, ABS_EXPR, type, arg),
10034 build_real (type, r));
10035 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
10036 TREE_TYPE (TREE_TYPE (fndecl)),
10037 result);*/
10038 return result;
10039 }
10040 case BUILT_IN_ISNORMAL:
10041 {
10042 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
10043 islessequal(fabs(x),DBL_MAX). */
10044 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10045 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
10046 tree const type = TREE_TYPE (arg);
10047 REAL_VALUE_TYPE rmax, rmin;
10048 char buf[128];
10049
10050 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10051 real_from_string (&rmax, buf);
10052 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10053 real_from_string (&rmin, buf);
10054 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10055 result = build_call_expr (isle_fn, 2, arg,
10056 build_real (type, rmax));
10057 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
10058 build_call_expr (isge_fn, 2, arg,
10059 build_real (type, rmin)));
10060 return result;
10061 }
10062 default:
10063 break;
10064 }
10065
10066 return NULL_TREE;
10067 }
10068
10069 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10070 ARG is the argument for the call. */
10071
10072 static tree
10073 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
10074 {
10075 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10076 REAL_VALUE_TYPE r;
10077
10078 if (!validate_arg (arg, REAL_TYPE))
10079 return NULL_TREE;
10080
10081 switch (builtin_index)
10082 {
10083 case BUILT_IN_ISINF:
10084 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10085 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10086
10087 if (TREE_CODE (arg) == REAL_CST)
10088 {
10089 r = TREE_REAL_CST (arg);
10090 if (real_isinf (&r))
10091 return real_compare (GT_EXPR, &r, &dconst0)
10092 ? integer_one_node : integer_minus_one_node;
10093 else
10094 return integer_zero_node;
10095 }
10096
10097 return NULL_TREE;
10098
10099 case BUILT_IN_ISINF_SIGN:
10100 {
10101 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10102 /* In a boolean context, GCC will fold the inner COND_EXPR to
10103 1. So e.g. "if (isinf_sign(x))" would be folded to just
10104 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10105 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
10106 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
10107 tree tmp = NULL_TREE;
10108
10109 arg = builtin_save_expr (arg);
10110
10111 if (signbit_fn && isinf_fn)
10112 {
10113 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
10114 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
10115
10116 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10117 signbit_call, integer_zero_node);
10118 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10119 isinf_call, integer_zero_node);
10120
10121 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
10122 integer_minus_one_node, integer_one_node);
10123 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10124 isinf_call, tmp,
10125 integer_zero_node);
10126 }
10127
10128 return tmp;
10129 }
10130
10131 case BUILT_IN_ISFINITE:
10132 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10133 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10134 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10135
10136 if (TREE_CODE (arg) == REAL_CST)
10137 {
10138 r = TREE_REAL_CST (arg);
10139 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10140 }
10141
10142 return NULL_TREE;
10143
10144 case BUILT_IN_ISNAN:
10145 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10146 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10147
10148 if (TREE_CODE (arg) == REAL_CST)
10149 {
10150 r = TREE_REAL_CST (arg);
10151 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10152 }
10153
10154 arg = builtin_save_expr (arg);
10155 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10156
10157 default:
10158 gcc_unreachable ();
10159 }
10160 }
10161
10162 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10163 This builtin will generate code to return the appropriate floating
10164 point classification depending on the value of the floating point
10165 number passed in. The possible return values must be supplied as
10166 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10167 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10168 one floating point argument which is "type generic". */
10169
10170 static tree
10171 fold_builtin_fpclassify (location_t loc, tree exp)
10172 {
10173 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10174 arg, type, res, tmp;
10175 enum machine_mode mode;
10176 REAL_VALUE_TYPE r;
10177 char buf[128];
10178
10179 /* Verify the required arguments in the original call. */
10180 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10181 INTEGER_TYPE, INTEGER_TYPE,
10182 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10183 return NULL_TREE;
10184
10185 fp_nan = CALL_EXPR_ARG (exp, 0);
10186 fp_infinite = CALL_EXPR_ARG (exp, 1);
10187 fp_normal = CALL_EXPR_ARG (exp, 2);
10188 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10189 fp_zero = CALL_EXPR_ARG (exp, 4);
10190 arg = CALL_EXPR_ARG (exp, 5);
10191 type = TREE_TYPE (arg);
10192 mode = TYPE_MODE (type);
10193 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10194
10195 /* fpclassify(x) ->
10196 isnan(x) ? FP_NAN :
10197 (fabs(x) == Inf ? FP_INFINITE :
10198 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10199 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10200
10201 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10202 build_real (type, dconst0));
10203 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10204 tmp, fp_zero, fp_subnormal);
10205
10206 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10207 real_from_string (&r, buf);
10208 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10209 arg, build_real (type, r));
10210 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10211
10212 if (HONOR_INFINITIES (mode))
10213 {
10214 real_inf (&r);
10215 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10216 build_real (type, r));
10217 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10218 fp_infinite, res);
10219 }
10220
10221 if (HONOR_NANS (mode))
10222 {
10223 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10224 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10225 }
10226
10227 return res;
10228 }
10229
10230 /* Fold a call to an unordered comparison function such as
10231 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10232 being called and ARG0 and ARG1 are the arguments for the call.
10233 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10234 the opposite of the desired result. UNORDERED_CODE is used
10235 for modes that can hold NaNs and ORDERED_CODE is used for
10236 the rest. */
10237
10238 static tree
10239 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10240 enum tree_code unordered_code,
10241 enum tree_code ordered_code)
10242 {
10243 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10244 enum tree_code code;
10245 tree type0, type1;
10246 enum tree_code code0, code1;
10247 tree cmp_type = NULL_TREE;
10248
10249 type0 = TREE_TYPE (arg0);
10250 type1 = TREE_TYPE (arg1);
10251
10252 code0 = TREE_CODE (type0);
10253 code1 = TREE_CODE (type1);
10254
10255 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10256 /* Choose the wider of two real types. */
10257 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10258 ? type0 : type1;
10259 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10260 cmp_type = type0;
10261 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10262 cmp_type = type1;
10263
10264 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10265 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10266
10267 if (unordered_code == UNORDERED_EXPR)
10268 {
10269 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10270 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10271 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10272 }
10273
10274 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10275 : ordered_code;
10276 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10277 fold_build2_loc (loc, code, type, arg0, arg1));
10278 }
10279
10280 /* Fold a call to built-in function FNDECL with 0 arguments.
10281 IGNORE is true if the result of the function call is ignored. This
10282 function returns NULL_TREE if no simplification was possible. */
10283
10284 static tree
10285 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10286 {
10287 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10288 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10289 switch (fcode)
10290 {
10291 CASE_FLT_FN (BUILT_IN_INF):
10292 case BUILT_IN_INFD32:
10293 case BUILT_IN_INFD64:
10294 case BUILT_IN_INFD128:
10295 return fold_builtin_inf (loc, type, true);
10296
10297 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10298 return fold_builtin_inf (loc, type, false);
10299
10300 case BUILT_IN_CLASSIFY_TYPE:
10301 return fold_builtin_classify_type (NULL_TREE);
10302
10303 default:
10304 break;
10305 }
10306 return NULL_TREE;
10307 }
10308
10309 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10310 IGNORE is true if the result of the function call is ignored. This
10311 function returns NULL_TREE if no simplification was possible. */
10312
10313 static tree
10314 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10315 {
10316 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10317 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10318 switch (fcode)
10319 {
10320 case BUILT_IN_CONSTANT_P:
10321 {
10322 tree val = fold_builtin_constant_p (arg0);
10323
10324 /* Gimplification will pull the CALL_EXPR for the builtin out of
10325 an if condition. When not optimizing, we'll not CSE it back.
10326 To avoid link error types of regressions, return false now. */
10327 if (!val && !optimize)
10328 val = integer_zero_node;
10329
10330 return val;
10331 }
10332
10333 case BUILT_IN_CLASSIFY_TYPE:
10334 return fold_builtin_classify_type (arg0);
10335
10336 case BUILT_IN_STRLEN:
10337 return fold_builtin_strlen (loc, type, arg0);
10338
10339 CASE_FLT_FN (BUILT_IN_FABS):
10340 return fold_builtin_fabs (loc, arg0, type);
10341
10342 case BUILT_IN_ABS:
10343 case BUILT_IN_LABS:
10344 case BUILT_IN_LLABS:
10345 case BUILT_IN_IMAXABS:
10346 return fold_builtin_abs (loc, arg0, type);
10347
10348 CASE_FLT_FN (BUILT_IN_CONJ):
10349 if (validate_arg (arg0, COMPLEX_TYPE)
10350 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10351 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10352 break;
10353
10354 CASE_FLT_FN (BUILT_IN_CREAL):
10355 if (validate_arg (arg0, COMPLEX_TYPE)
10356 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10357 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10358 break;
10359
10360 CASE_FLT_FN (BUILT_IN_CIMAG):
10361 if (validate_arg (arg0, COMPLEX_TYPE)
10362 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10363 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10364 break;
10365
10366 CASE_FLT_FN (BUILT_IN_CCOS):
10367 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
10368
10369 CASE_FLT_FN (BUILT_IN_CCOSH):
10370 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
10371
10372 CASE_FLT_FN (BUILT_IN_CPROJ):
10373 return fold_builtin_cproj(loc, arg0, type);
10374
10375 CASE_FLT_FN (BUILT_IN_CSIN):
10376 if (validate_arg (arg0, COMPLEX_TYPE)
10377 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10378 return do_mpc_arg1 (arg0, type, mpc_sin);
10379 break;
10380
10381 CASE_FLT_FN (BUILT_IN_CSINH):
10382 if (validate_arg (arg0, COMPLEX_TYPE)
10383 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10384 return do_mpc_arg1 (arg0, type, mpc_sinh);
10385 break;
10386
10387 CASE_FLT_FN (BUILT_IN_CTAN):
10388 if (validate_arg (arg0, COMPLEX_TYPE)
10389 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10390 return do_mpc_arg1 (arg0, type, mpc_tan);
10391 break;
10392
10393 CASE_FLT_FN (BUILT_IN_CTANH):
10394 if (validate_arg (arg0, COMPLEX_TYPE)
10395 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10396 return do_mpc_arg1 (arg0, type, mpc_tanh);
10397 break;
10398
10399 CASE_FLT_FN (BUILT_IN_CLOG):
10400 if (validate_arg (arg0, COMPLEX_TYPE)
10401 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10402 return do_mpc_arg1 (arg0, type, mpc_log);
10403 break;
10404
10405 CASE_FLT_FN (BUILT_IN_CSQRT):
10406 if (validate_arg (arg0, COMPLEX_TYPE)
10407 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10408 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10409 break;
10410
10411 CASE_FLT_FN (BUILT_IN_CASIN):
10412 if (validate_arg (arg0, COMPLEX_TYPE)
10413 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10414 return do_mpc_arg1 (arg0, type, mpc_asin);
10415 break;
10416
10417 CASE_FLT_FN (BUILT_IN_CACOS):
10418 if (validate_arg (arg0, COMPLEX_TYPE)
10419 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10420 return do_mpc_arg1 (arg0, type, mpc_acos);
10421 break;
10422
10423 CASE_FLT_FN (BUILT_IN_CATAN):
10424 if (validate_arg (arg0, COMPLEX_TYPE)
10425 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10426 return do_mpc_arg1 (arg0, type, mpc_atan);
10427 break;
10428
10429 CASE_FLT_FN (BUILT_IN_CASINH):
10430 if (validate_arg (arg0, COMPLEX_TYPE)
10431 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10432 return do_mpc_arg1 (arg0, type, mpc_asinh);
10433 break;
10434
10435 CASE_FLT_FN (BUILT_IN_CACOSH):
10436 if (validate_arg (arg0, COMPLEX_TYPE)
10437 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10438 return do_mpc_arg1 (arg0, type, mpc_acosh);
10439 break;
10440
10441 CASE_FLT_FN (BUILT_IN_CATANH):
10442 if (validate_arg (arg0, COMPLEX_TYPE)
10443 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10444 return do_mpc_arg1 (arg0, type, mpc_atanh);
10445 break;
10446
10447 CASE_FLT_FN (BUILT_IN_CABS):
10448 return fold_builtin_cabs (loc, arg0, type, fndecl);
10449
10450 CASE_FLT_FN (BUILT_IN_CARG):
10451 return fold_builtin_carg (loc, arg0, type);
10452
10453 CASE_FLT_FN (BUILT_IN_SQRT):
10454 return fold_builtin_sqrt (loc, arg0, type);
10455
10456 CASE_FLT_FN (BUILT_IN_CBRT):
10457 return fold_builtin_cbrt (loc, arg0, type);
10458
10459 CASE_FLT_FN (BUILT_IN_ASIN):
10460 if (validate_arg (arg0, REAL_TYPE))
10461 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10462 &dconstm1, &dconst1, true);
10463 break;
10464
10465 CASE_FLT_FN (BUILT_IN_ACOS):
10466 if (validate_arg (arg0, REAL_TYPE))
10467 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10468 &dconstm1, &dconst1, true);
10469 break;
10470
10471 CASE_FLT_FN (BUILT_IN_ATAN):
10472 if (validate_arg (arg0, REAL_TYPE))
10473 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10474 break;
10475
10476 CASE_FLT_FN (BUILT_IN_ASINH):
10477 if (validate_arg (arg0, REAL_TYPE))
10478 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10479 break;
10480
10481 CASE_FLT_FN (BUILT_IN_ACOSH):
10482 if (validate_arg (arg0, REAL_TYPE))
10483 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10484 &dconst1, NULL, true);
10485 break;
10486
10487 CASE_FLT_FN (BUILT_IN_ATANH):
10488 if (validate_arg (arg0, REAL_TYPE))
10489 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10490 &dconstm1, &dconst1, false);
10491 break;
10492
10493 CASE_FLT_FN (BUILT_IN_SIN):
10494 if (validate_arg (arg0, REAL_TYPE))
10495 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10496 break;
10497
10498 CASE_FLT_FN (BUILT_IN_COS):
10499 return fold_builtin_cos (loc, arg0, type, fndecl);
10500
10501 CASE_FLT_FN (BUILT_IN_TAN):
10502 return fold_builtin_tan (arg0, type);
10503
10504 CASE_FLT_FN (BUILT_IN_CEXP):
10505 return fold_builtin_cexp (loc, arg0, type);
10506
10507 CASE_FLT_FN (BUILT_IN_CEXPI):
10508 if (validate_arg (arg0, REAL_TYPE))
10509 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10510 break;
10511
10512 CASE_FLT_FN (BUILT_IN_SINH):
10513 if (validate_arg (arg0, REAL_TYPE))
10514 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10515 break;
10516
10517 CASE_FLT_FN (BUILT_IN_COSH):
10518 return fold_builtin_cosh (loc, arg0, type, fndecl);
10519
10520 CASE_FLT_FN (BUILT_IN_TANH):
10521 if (validate_arg (arg0, REAL_TYPE))
10522 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10523 break;
10524
10525 CASE_FLT_FN (BUILT_IN_ERF):
10526 if (validate_arg (arg0, REAL_TYPE))
10527 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10528 break;
10529
10530 CASE_FLT_FN (BUILT_IN_ERFC):
10531 if (validate_arg (arg0, REAL_TYPE))
10532 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10533 break;
10534
10535 CASE_FLT_FN (BUILT_IN_TGAMMA):
10536 if (validate_arg (arg0, REAL_TYPE))
10537 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10538 break;
10539
10540 CASE_FLT_FN (BUILT_IN_EXP):
10541 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10542
10543 CASE_FLT_FN (BUILT_IN_EXP2):
10544 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10545
10546 CASE_FLT_FN (BUILT_IN_EXP10):
10547 CASE_FLT_FN (BUILT_IN_POW10):
10548 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10549
10550 CASE_FLT_FN (BUILT_IN_EXPM1):
10551 if (validate_arg (arg0, REAL_TYPE))
10552 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10553 break;
10554
10555 CASE_FLT_FN (BUILT_IN_LOG):
10556 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10557
10558 CASE_FLT_FN (BUILT_IN_LOG2):
10559 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10560
10561 CASE_FLT_FN (BUILT_IN_LOG10):
10562 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10563
10564 CASE_FLT_FN (BUILT_IN_LOG1P):
10565 if (validate_arg (arg0, REAL_TYPE))
10566 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10567 &dconstm1, NULL, false);
10568 break;
10569
10570 CASE_FLT_FN (BUILT_IN_J0):
10571 if (validate_arg (arg0, REAL_TYPE))
10572 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10573 NULL, NULL, 0);
10574 break;
10575
10576 CASE_FLT_FN (BUILT_IN_J1):
10577 if (validate_arg (arg0, REAL_TYPE))
10578 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10579 NULL, NULL, 0);
10580 break;
10581
10582 CASE_FLT_FN (BUILT_IN_Y0):
10583 if (validate_arg (arg0, REAL_TYPE))
10584 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10585 &dconst0, NULL, false);
10586 break;
10587
10588 CASE_FLT_FN (BUILT_IN_Y1):
10589 if (validate_arg (arg0, REAL_TYPE))
10590 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10591 &dconst0, NULL, false);
10592 break;
10593
10594 CASE_FLT_FN (BUILT_IN_NAN):
10595 case BUILT_IN_NAND32:
10596 case BUILT_IN_NAND64:
10597 case BUILT_IN_NAND128:
10598 return fold_builtin_nan (arg0, type, true);
10599
10600 CASE_FLT_FN (BUILT_IN_NANS):
10601 return fold_builtin_nan (arg0, type, false);
10602
10603 CASE_FLT_FN (BUILT_IN_FLOOR):
10604 return fold_builtin_floor (loc, fndecl, arg0);
10605
10606 CASE_FLT_FN (BUILT_IN_CEIL):
10607 return fold_builtin_ceil (loc, fndecl, arg0);
10608
10609 CASE_FLT_FN (BUILT_IN_TRUNC):
10610 return fold_builtin_trunc (loc, fndecl, arg0);
10611
10612 CASE_FLT_FN (BUILT_IN_ROUND):
10613 return fold_builtin_round (loc, fndecl, arg0);
10614
10615 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10616 CASE_FLT_FN (BUILT_IN_RINT):
10617 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10618
10619 CASE_FLT_FN (BUILT_IN_ICEIL):
10620 CASE_FLT_FN (BUILT_IN_LCEIL):
10621 CASE_FLT_FN (BUILT_IN_LLCEIL):
10622 CASE_FLT_FN (BUILT_IN_LFLOOR):
10623 CASE_FLT_FN (BUILT_IN_IFLOOR):
10624 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10625 CASE_FLT_FN (BUILT_IN_IROUND):
10626 CASE_FLT_FN (BUILT_IN_LROUND):
10627 CASE_FLT_FN (BUILT_IN_LLROUND):
10628 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10629
10630 CASE_FLT_FN (BUILT_IN_IRINT):
10631 CASE_FLT_FN (BUILT_IN_LRINT):
10632 CASE_FLT_FN (BUILT_IN_LLRINT):
10633 return fold_fixed_mathfn (loc, fndecl, arg0);
10634
10635 case BUILT_IN_BSWAP16:
10636 case BUILT_IN_BSWAP32:
10637 case BUILT_IN_BSWAP64:
10638 return fold_builtin_bswap (fndecl, arg0);
10639
10640 CASE_INT_FN (BUILT_IN_FFS):
10641 CASE_INT_FN (BUILT_IN_CLZ):
10642 CASE_INT_FN (BUILT_IN_CTZ):
10643 CASE_INT_FN (BUILT_IN_CLRSB):
10644 CASE_INT_FN (BUILT_IN_POPCOUNT):
10645 CASE_INT_FN (BUILT_IN_PARITY):
10646 return fold_builtin_bitop (fndecl, arg0);
10647
10648 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10649 return fold_builtin_signbit (loc, arg0, type);
10650
10651 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10652 return fold_builtin_significand (loc, arg0, type);
10653
10654 CASE_FLT_FN (BUILT_IN_ILOGB):
10655 CASE_FLT_FN (BUILT_IN_LOGB):
10656 return fold_builtin_logb (loc, arg0, type);
10657
10658 case BUILT_IN_ISASCII:
10659 return fold_builtin_isascii (loc, arg0);
10660
10661 case BUILT_IN_TOASCII:
10662 return fold_builtin_toascii (loc, arg0);
10663
10664 case BUILT_IN_ISDIGIT:
10665 return fold_builtin_isdigit (loc, arg0);
10666
10667 CASE_FLT_FN (BUILT_IN_FINITE):
10668 case BUILT_IN_FINITED32:
10669 case BUILT_IN_FINITED64:
10670 case BUILT_IN_FINITED128:
10671 case BUILT_IN_ISFINITE:
10672 {
10673 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10674 if (ret)
10675 return ret;
10676 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10677 }
10678
10679 CASE_FLT_FN (BUILT_IN_ISINF):
10680 case BUILT_IN_ISINFD32:
10681 case BUILT_IN_ISINFD64:
10682 case BUILT_IN_ISINFD128:
10683 {
10684 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10685 if (ret)
10686 return ret;
10687 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10688 }
10689
10690 case BUILT_IN_ISNORMAL:
10691 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10692
10693 case BUILT_IN_ISINF_SIGN:
10694 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10695
10696 CASE_FLT_FN (BUILT_IN_ISNAN):
10697 case BUILT_IN_ISNAND32:
10698 case BUILT_IN_ISNAND64:
10699 case BUILT_IN_ISNAND128:
10700 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10701
10702 case BUILT_IN_PRINTF:
10703 case BUILT_IN_PRINTF_UNLOCKED:
10704 case BUILT_IN_VPRINTF:
10705 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10706
10707 case BUILT_IN_FREE:
10708 if (integer_zerop (arg0))
10709 return build_empty_stmt (loc);
10710 break;
10711
10712 default:
10713 break;
10714 }
10715
10716 return NULL_TREE;
10717
10718 }
10719
10720 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10721 IGNORE is true if the result of the function call is ignored. This
10722 function returns NULL_TREE if no simplification was possible. */
10723
10724 static tree
10725 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10726 {
10727 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10728 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10729
10730 switch (fcode)
10731 {
10732 CASE_FLT_FN (BUILT_IN_JN):
10733 if (validate_arg (arg0, INTEGER_TYPE)
10734 && validate_arg (arg1, REAL_TYPE))
10735 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10736 break;
10737
10738 CASE_FLT_FN (BUILT_IN_YN):
10739 if (validate_arg (arg0, INTEGER_TYPE)
10740 && validate_arg (arg1, REAL_TYPE))
10741 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10742 &dconst0, false);
10743 break;
10744
10745 CASE_FLT_FN (BUILT_IN_DREM):
10746 CASE_FLT_FN (BUILT_IN_REMAINDER):
10747 if (validate_arg (arg0, REAL_TYPE)
10748 && validate_arg(arg1, REAL_TYPE))
10749 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10750 break;
10751
10752 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10753 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10754 if (validate_arg (arg0, REAL_TYPE)
10755 && validate_arg(arg1, POINTER_TYPE))
10756 return do_mpfr_lgamma_r (arg0, arg1, type);
10757 break;
10758
10759 CASE_FLT_FN (BUILT_IN_ATAN2):
10760 if (validate_arg (arg0, REAL_TYPE)
10761 && validate_arg(arg1, REAL_TYPE))
10762 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10763 break;
10764
10765 CASE_FLT_FN (BUILT_IN_FDIM):
10766 if (validate_arg (arg0, REAL_TYPE)
10767 && validate_arg(arg1, REAL_TYPE))
10768 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10769 break;
10770
10771 CASE_FLT_FN (BUILT_IN_HYPOT):
10772 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10773
10774 CASE_FLT_FN (BUILT_IN_CPOW):
10775 if (validate_arg (arg0, COMPLEX_TYPE)
10776 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10777 && validate_arg (arg1, COMPLEX_TYPE)
10778 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10779 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10780 break;
10781
10782 CASE_FLT_FN (BUILT_IN_LDEXP):
10783 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10784 CASE_FLT_FN (BUILT_IN_SCALBN):
10785 CASE_FLT_FN (BUILT_IN_SCALBLN):
10786 return fold_builtin_load_exponent (loc, arg0, arg1,
10787 type, /*ldexp=*/false);
10788
10789 CASE_FLT_FN (BUILT_IN_FREXP):
10790 return fold_builtin_frexp (loc, arg0, arg1, type);
10791
10792 CASE_FLT_FN (BUILT_IN_MODF):
10793 return fold_builtin_modf (loc, arg0, arg1, type);
10794
10795 case BUILT_IN_BZERO:
10796 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10797
10798 case BUILT_IN_FPUTS:
10799 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10800
10801 case BUILT_IN_FPUTS_UNLOCKED:
10802 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10803
10804 case BUILT_IN_STRSTR:
10805 return fold_builtin_strstr (loc, arg0, arg1, type);
10806
10807 case BUILT_IN_STRCAT:
10808 return fold_builtin_strcat (loc, arg0, arg1);
10809
10810 case BUILT_IN_STRSPN:
10811 return fold_builtin_strspn (loc, arg0, arg1);
10812
10813 case BUILT_IN_STRCSPN:
10814 return fold_builtin_strcspn (loc, arg0, arg1);
10815
10816 case BUILT_IN_STRCHR:
10817 case BUILT_IN_INDEX:
10818 return fold_builtin_strchr (loc, arg0, arg1, type);
10819
10820 case BUILT_IN_STRRCHR:
10821 case BUILT_IN_RINDEX:
10822 return fold_builtin_strrchr (loc, arg0, arg1, type);
10823
10824 case BUILT_IN_STRCPY:
10825 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10826
10827 case BUILT_IN_STPCPY:
10828 if (ignore)
10829 {
10830 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10831 if (!fn)
10832 break;
10833
10834 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10835 }
10836 else
10837 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10838 break;
10839
10840 case BUILT_IN_STRCMP:
10841 return fold_builtin_strcmp (loc, arg0, arg1);
10842
10843 case BUILT_IN_STRPBRK:
10844 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10845
10846 case BUILT_IN_EXPECT:
10847 return fold_builtin_expect (loc, arg0, arg1);
10848
10849 CASE_FLT_FN (BUILT_IN_POW):
10850 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10851
10852 CASE_FLT_FN (BUILT_IN_POWI):
10853 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10854
10855 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10856 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10857
10858 CASE_FLT_FN (BUILT_IN_FMIN):
10859 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10860
10861 CASE_FLT_FN (BUILT_IN_FMAX):
10862 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10863
10864 case BUILT_IN_ISGREATER:
10865 return fold_builtin_unordered_cmp (loc, fndecl,
10866 arg0, arg1, UNLE_EXPR, LE_EXPR);
10867 case BUILT_IN_ISGREATEREQUAL:
10868 return fold_builtin_unordered_cmp (loc, fndecl,
10869 arg0, arg1, UNLT_EXPR, LT_EXPR);
10870 case BUILT_IN_ISLESS:
10871 return fold_builtin_unordered_cmp (loc, fndecl,
10872 arg0, arg1, UNGE_EXPR, GE_EXPR);
10873 case BUILT_IN_ISLESSEQUAL:
10874 return fold_builtin_unordered_cmp (loc, fndecl,
10875 arg0, arg1, UNGT_EXPR, GT_EXPR);
10876 case BUILT_IN_ISLESSGREATER:
10877 return fold_builtin_unordered_cmp (loc, fndecl,
10878 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10879 case BUILT_IN_ISUNORDERED:
10880 return fold_builtin_unordered_cmp (loc, fndecl,
10881 arg0, arg1, UNORDERED_EXPR,
10882 NOP_EXPR);
10883
10884 /* We do the folding for va_start in the expander. */
10885 case BUILT_IN_VA_START:
10886 break;
10887
10888 case BUILT_IN_SPRINTF:
10889 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10890
10891 case BUILT_IN_OBJECT_SIZE:
10892 return fold_builtin_object_size (arg0, arg1);
10893
10894 case BUILT_IN_PRINTF:
10895 case BUILT_IN_PRINTF_UNLOCKED:
10896 case BUILT_IN_VPRINTF:
10897 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10898
10899 case BUILT_IN_PRINTF_CHK:
10900 case BUILT_IN_VPRINTF_CHK:
10901 if (!validate_arg (arg0, INTEGER_TYPE)
10902 || TREE_SIDE_EFFECTS (arg0))
10903 return NULL_TREE;
10904 else
10905 return fold_builtin_printf (loc, fndecl,
10906 arg1, NULL_TREE, ignore, fcode);
10907 break;
10908
10909 case BUILT_IN_FPRINTF:
10910 case BUILT_IN_FPRINTF_UNLOCKED:
10911 case BUILT_IN_VFPRINTF:
10912 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10913 ignore, fcode);
10914
10915 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10916 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10917
10918 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10919 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10920
10921 default:
10922 break;
10923 }
10924 return NULL_TREE;
10925 }
10926
10927 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10928 and ARG2. IGNORE is true if the result of the function call is ignored.
10929 This function returns NULL_TREE if no simplification was possible. */
10930
10931 static tree
10932 fold_builtin_3 (location_t loc, tree fndecl,
10933 tree arg0, tree arg1, tree arg2, bool ignore)
10934 {
10935 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10936 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10937 switch (fcode)
10938 {
10939
10940 CASE_FLT_FN (BUILT_IN_SINCOS):
10941 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10942
10943 CASE_FLT_FN (BUILT_IN_FMA):
10944 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10945 break;
10946
10947 CASE_FLT_FN (BUILT_IN_REMQUO):
10948 if (validate_arg (arg0, REAL_TYPE)
10949 && validate_arg(arg1, REAL_TYPE)
10950 && validate_arg(arg2, POINTER_TYPE))
10951 return do_mpfr_remquo (arg0, arg1, arg2);
10952 break;
10953
10954 case BUILT_IN_MEMSET:
10955 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10956
10957 case BUILT_IN_BCOPY:
10958 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10959 void_type_node, true, /*endp=*/3);
10960
10961 case BUILT_IN_MEMCPY:
10962 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10963 type, ignore, /*endp=*/0);
10964
10965 case BUILT_IN_MEMPCPY:
10966 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10967 type, ignore, /*endp=*/1);
10968
10969 case BUILT_IN_MEMMOVE:
10970 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10971 type, ignore, /*endp=*/3);
10972
10973 case BUILT_IN_STRNCAT:
10974 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10975
10976 case BUILT_IN_STRNCPY:
10977 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10978
10979 case BUILT_IN_STRNCMP:
10980 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10981
10982 case BUILT_IN_MEMCHR:
10983 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10984
10985 case BUILT_IN_BCMP:
10986 case BUILT_IN_MEMCMP:
10987 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10988
10989 case BUILT_IN_SPRINTF:
10990 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10991
10992 case BUILT_IN_SNPRINTF:
10993 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
10994
10995 case BUILT_IN_STRCPY_CHK:
10996 case BUILT_IN_STPCPY_CHK:
10997 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10998 ignore, fcode);
10999
11000 case BUILT_IN_STRCAT_CHK:
11001 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
11002
11003 case BUILT_IN_PRINTF_CHK:
11004 case BUILT_IN_VPRINTF_CHK:
11005 if (!validate_arg (arg0, INTEGER_TYPE)
11006 || TREE_SIDE_EFFECTS (arg0))
11007 return NULL_TREE;
11008 else
11009 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
11010 break;
11011
11012 case BUILT_IN_FPRINTF:
11013 case BUILT_IN_FPRINTF_UNLOCKED:
11014 case BUILT_IN_VFPRINTF:
11015 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
11016 ignore, fcode);
11017
11018 case BUILT_IN_FPRINTF_CHK:
11019 case BUILT_IN_VFPRINTF_CHK:
11020 if (!validate_arg (arg1, INTEGER_TYPE)
11021 || TREE_SIDE_EFFECTS (arg1))
11022 return NULL_TREE;
11023 else
11024 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
11025 ignore, fcode);
11026
11027 default:
11028 break;
11029 }
11030 return NULL_TREE;
11031 }
11032
11033 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
11034 ARG2, and ARG3. IGNORE is true if the result of the function call is
11035 ignored. This function returns NULL_TREE if no simplification was
11036 possible. */
11037
11038 static tree
11039 fold_builtin_4 (location_t loc, tree fndecl,
11040 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
11041 {
11042 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11043
11044 switch (fcode)
11045 {
11046 case BUILT_IN_MEMCPY_CHK:
11047 case BUILT_IN_MEMPCPY_CHK:
11048 case BUILT_IN_MEMMOVE_CHK:
11049 case BUILT_IN_MEMSET_CHK:
11050 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
11051 NULL_TREE, ignore,
11052 DECL_FUNCTION_CODE (fndecl));
11053
11054 case BUILT_IN_STRNCPY_CHK:
11055 case BUILT_IN_STPNCPY_CHK:
11056 return fold_builtin_stxncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE,
11057 ignore, fcode);
11058
11059 case BUILT_IN_STRNCAT_CHK:
11060 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
11061
11062 case BUILT_IN_SNPRINTF:
11063 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
11064
11065 case BUILT_IN_FPRINTF_CHK:
11066 case BUILT_IN_VFPRINTF_CHK:
11067 if (!validate_arg (arg1, INTEGER_TYPE)
11068 || TREE_SIDE_EFFECTS (arg1))
11069 return NULL_TREE;
11070 else
11071 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
11072 ignore, fcode);
11073 break;
11074
11075 default:
11076 break;
11077 }
11078 return NULL_TREE;
11079 }
11080
11081 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
11082 arguments, where NARGS <= 4. IGNORE is true if the result of the
11083 function call is ignored. This function returns NULL_TREE if no
11084 simplification was possible. Note that this only folds builtins with
11085 fixed argument patterns. Foldings that do varargs-to-varargs
11086 transformations, or that match calls with more than 4 arguments,
11087 need to be handled with fold_builtin_varargs instead. */
11088
11089 #define MAX_ARGS_TO_FOLD_BUILTIN 4
11090
11091 static tree
11092 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
11093 {
11094 tree ret = NULL_TREE;
11095
11096 switch (nargs)
11097 {
11098 case 0:
11099 ret = fold_builtin_0 (loc, fndecl, ignore);
11100 break;
11101 case 1:
11102 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
11103 break;
11104 case 2:
11105 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
11106 break;
11107 case 3:
11108 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
11109 break;
11110 case 4:
11111 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
11112 ignore);
11113 break;
11114 default:
11115 break;
11116 }
11117 if (ret)
11118 {
11119 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11120 SET_EXPR_LOCATION (ret, loc);
11121 TREE_NO_WARNING (ret) = 1;
11122 return ret;
11123 }
11124 return NULL_TREE;
11125 }
11126
11127 /* Builtins with folding operations that operate on "..." arguments
11128 need special handling; we need to store the arguments in a convenient
11129 data structure before attempting any folding. Fortunately there are
11130 only a few builtins that fall into this category. FNDECL is the
11131 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11132 result of the function call is ignored. */
11133
11134 static tree
11135 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
11136 bool ignore ATTRIBUTE_UNUSED)
11137 {
11138 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11139 tree ret = NULL_TREE;
11140
11141 switch (fcode)
11142 {
11143 case BUILT_IN_SPRINTF_CHK:
11144 case BUILT_IN_VSPRINTF_CHK:
11145 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
11146 break;
11147
11148 case BUILT_IN_SNPRINTF_CHK:
11149 case BUILT_IN_VSNPRINTF_CHK:
11150 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
11151 break;
11152
11153 case BUILT_IN_FPCLASSIFY:
11154 ret = fold_builtin_fpclassify (loc, exp);
11155 break;
11156
11157 default:
11158 break;
11159 }
11160 if (ret)
11161 {
11162 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11163 SET_EXPR_LOCATION (ret, loc);
11164 TREE_NO_WARNING (ret) = 1;
11165 return ret;
11166 }
11167 return NULL_TREE;
11168 }
11169
11170 /* Return true if FNDECL shouldn't be folded right now.
11171 If a built-in function has an inline attribute always_inline
11172 wrapper, defer folding it after always_inline functions have
11173 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11174 might not be performed. */
11175
11176 bool
11177 avoid_folding_inline_builtin (tree fndecl)
11178 {
11179 return (DECL_DECLARED_INLINE_P (fndecl)
11180 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11181 && cfun
11182 && !cfun->always_inline_functions_inlined
11183 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11184 }
11185
11186 /* A wrapper function for builtin folding that prevents warnings for
11187 "statement without effect" and the like, caused by removing the
11188 call node earlier than the warning is generated. */
11189
11190 tree
11191 fold_call_expr (location_t loc, tree exp, bool ignore)
11192 {
11193 tree ret = NULL_TREE;
11194 tree fndecl = get_callee_fndecl (exp);
11195 if (fndecl
11196 && TREE_CODE (fndecl) == FUNCTION_DECL
11197 && DECL_BUILT_IN (fndecl)
11198 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11199 yet. Defer folding until we see all the arguments
11200 (after inlining). */
11201 && !CALL_EXPR_VA_ARG_PACK (exp))
11202 {
11203 int nargs = call_expr_nargs (exp);
11204
11205 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11206 instead last argument is __builtin_va_arg_pack (). Defer folding
11207 even in that case, until arguments are finalized. */
11208 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11209 {
11210 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11211 if (fndecl2
11212 && TREE_CODE (fndecl2) == FUNCTION_DECL
11213 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11214 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11215 return NULL_TREE;
11216 }
11217
11218 if (avoid_folding_inline_builtin (fndecl))
11219 return NULL_TREE;
11220
11221 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11222 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11223 CALL_EXPR_ARGP (exp), ignore);
11224 else
11225 {
11226 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11227 {
11228 tree *args = CALL_EXPR_ARGP (exp);
11229 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11230 }
11231 if (!ret)
11232 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11233 if (ret)
11234 return ret;
11235 }
11236 }
11237 return NULL_TREE;
11238 }
11239
11240 /* Conveniently construct a function call expression. FNDECL names the
11241 function to be called and N arguments are passed in the array
11242 ARGARRAY. */
11243
11244 tree
11245 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11246 {
11247 tree fntype = TREE_TYPE (fndecl);
11248 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11249
11250 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11251 }
11252
11253 /* Conveniently construct a function call expression. FNDECL names the
11254 function to be called and the arguments are passed in the vector
11255 VEC. */
11256
11257 tree
11258 build_call_expr_loc_vec (location_t loc, tree fndecl, VEC(tree,gc) *vec)
11259 {
11260 return build_call_expr_loc_array (loc, fndecl, VEC_length (tree, vec),
11261 VEC_address (tree, vec));
11262 }
11263
11264
11265 /* Conveniently construct a function call expression. FNDECL names the
11266 function to be called, N is the number of arguments, and the "..."
11267 parameters are the argument expressions. */
11268
11269 tree
11270 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11271 {
11272 va_list ap;
11273 tree *argarray = XALLOCAVEC (tree, n);
11274 int i;
11275
11276 va_start (ap, n);
11277 for (i = 0; i < n; i++)
11278 argarray[i] = va_arg (ap, tree);
11279 va_end (ap);
11280 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11281 }
11282
11283 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11284 varargs macros aren't supported by all bootstrap compilers. */
11285
11286 tree
11287 build_call_expr (tree fndecl, int n, ...)
11288 {
11289 va_list ap;
11290 tree *argarray = XALLOCAVEC (tree, n);
11291 int i;
11292
11293 va_start (ap, n);
11294 for (i = 0; i < n; i++)
11295 argarray[i] = va_arg (ap, tree);
11296 va_end (ap);
11297 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11298 }
11299
11300 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11301 N arguments are passed in the array ARGARRAY. */
11302
11303 tree
11304 fold_builtin_call_array (location_t loc, tree type,
11305 tree fn,
11306 int n,
11307 tree *argarray)
11308 {
11309 tree ret = NULL_TREE;
11310 tree exp;
11311
11312 if (TREE_CODE (fn) == ADDR_EXPR)
11313 {
11314 tree fndecl = TREE_OPERAND (fn, 0);
11315 if (TREE_CODE (fndecl) == FUNCTION_DECL
11316 && DECL_BUILT_IN (fndecl))
11317 {
11318 /* If last argument is __builtin_va_arg_pack (), arguments to this
11319 function are not finalized yet. Defer folding until they are. */
11320 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11321 {
11322 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11323 if (fndecl2
11324 && TREE_CODE (fndecl2) == FUNCTION_DECL
11325 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11326 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11327 return build_call_array_loc (loc, type, fn, n, argarray);
11328 }
11329 if (avoid_folding_inline_builtin (fndecl))
11330 return build_call_array_loc (loc, type, fn, n, argarray);
11331 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11332 {
11333 ret = targetm.fold_builtin (fndecl, n, argarray, false);
11334 if (ret)
11335 return ret;
11336
11337 return build_call_array_loc (loc, type, fn, n, argarray);
11338 }
11339 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11340 {
11341 /* First try the transformations that don't require consing up
11342 an exp. */
11343 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11344 if (ret)
11345 return ret;
11346 }
11347
11348 /* If we got this far, we need to build an exp. */
11349 exp = build_call_array_loc (loc, type, fn, n, argarray);
11350 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11351 return ret ? ret : exp;
11352 }
11353 }
11354
11355 return build_call_array_loc (loc, type, fn, n, argarray);
11356 }
11357
11358 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11359 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11360 of arguments in ARGS to be omitted. OLDNARGS is the number of
11361 elements in ARGS. */
11362
11363 static tree
11364 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11365 int skip, tree fndecl, int n, va_list newargs)
11366 {
11367 int nargs = oldnargs - skip + n;
11368 tree *buffer;
11369
11370 if (n > 0)
11371 {
11372 int i, j;
11373
11374 buffer = XALLOCAVEC (tree, nargs);
11375 for (i = 0; i < n; i++)
11376 buffer[i] = va_arg (newargs, tree);
11377 for (j = skip; j < oldnargs; j++, i++)
11378 buffer[i] = args[j];
11379 }
11380 else
11381 buffer = args + skip;
11382
11383 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11384 }
11385
11386 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11387 list ARGS along with N new arguments specified as the "..."
11388 parameters. SKIP is the number of arguments in ARGS to be omitted.
11389 OLDNARGS is the number of elements in ARGS. */
11390
11391 static tree
11392 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
11393 int skip, tree fndecl, int n, ...)
11394 {
11395 va_list ap;
11396 tree t;
11397
11398 va_start (ap, n);
11399 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
11400 va_end (ap);
11401
11402 return t;
11403 }
11404
11405 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11406 along with N new arguments specified as the "..." parameters. SKIP
11407 is the number of arguments in EXP to be omitted. This function is used
11408 to do varargs-to-varargs transformations. */
11409
11410 static tree
11411 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11412 {
11413 va_list ap;
11414 tree t;
11415
11416 va_start (ap, n);
11417 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11418 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11419 va_end (ap);
11420
11421 return t;
11422 }
11423
11424 /* Validate a single argument ARG against a tree code CODE representing
11425 a type. */
11426
11427 static bool
11428 validate_arg (const_tree arg, enum tree_code code)
11429 {
11430 if (!arg)
11431 return false;
11432 else if (code == POINTER_TYPE)
11433 return POINTER_TYPE_P (TREE_TYPE (arg));
11434 else if (code == INTEGER_TYPE)
11435 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11436 return code == TREE_CODE (TREE_TYPE (arg));
11437 }
11438
11439 /* This function validates the types of a function call argument list
11440 against a specified list of tree_codes. If the last specifier is a 0,
11441 that represents an ellipses, otherwise the last specifier must be a
11442 VOID_TYPE.
11443
11444 This is the GIMPLE version of validate_arglist. Eventually we want to
11445 completely convert builtins.c to work from GIMPLEs and the tree based
11446 validate_arglist will then be removed. */
11447
11448 bool
11449 validate_gimple_arglist (const_gimple call, ...)
11450 {
11451 enum tree_code code;
11452 bool res = 0;
11453 va_list ap;
11454 const_tree arg;
11455 size_t i;
11456
11457 va_start (ap, call);
11458 i = 0;
11459
11460 do
11461 {
11462 code = (enum tree_code) va_arg (ap, int);
11463 switch (code)
11464 {
11465 case 0:
11466 /* This signifies an ellipses, any further arguments are all ok. */
11467 res = true;
11468 goto end;
11469 case VOID_TYPE:
11470 /* This signifies an endlink, if no arguments remain, return
11471 true, otherwise return false. */
11472 res = (i == gimple_call_num_args (call));
11473 goto end;
11474 default:
11475 /* If no parameters remain or the parameter's code does not
11476 match the specified code, return false. Otherwise continue
11477 checking any remaining arguments. */
11478 arg = gimple_call_arg (call, i++);
11479 if (!validate_arg (arg, code))
11480 goto end;
11481 break;
11482 }
11483 }
11484 while (1);
11485
11486 /* We need gotos here since we can only have one VA_CLOSE in a
11487 function. */
11488 end: ;
11489 va_end (ap);
11490
11491 return res;
11492 }
11493
11494 /* This function validates the types of a function call argument list
11495 against a specified list of tree_codes. If the last specifier is a 0,
11496 that represents an ellipses, otherwise the last specifier must be a
11497 VOID_TYPE. */
11498
11499 bool
11500 validate_arglist (const_tree callexpr, ...)
11501 {
11502 enum tree_code code;
11503 bool res = 0;
11504 va_list ap;
11505 const_call_expr_arg_iterator iter;
11506 const_tree arg;
11507
11508 va_start (ap, callexpr);
11509 init_const_call_expr_arg_iterator (callexpr, &iter);
11510
11511 do
11512 {
11513 code = (enum tree_code) va_arg (ap, int);
11514 switch (code)
11515 {
11516 case 0:
11517 /* This signifies an ellipses, any further arguments are all ok. */
11518 res = true;
11519 goto end;
11520 case VOID_TYPE:
11521 /* This signifies an endlink, if no arguments remain, return
11522 true, otherwise return false. */
11523 res = !more_const_call_expr_args_p (&iter);
11524 goto end;
11525 default:
11526 /* If no parameters remain or the parameter's code does not
11527 match the specified code, return false. Otherwise continue
11528 checking any remaining arguments. */
11529 arg = next_const_call_expr_arg (&iter);
11530 if (!validate_arg (arg, code))
11531 goto end;
11532 break;
11533 }
11534 }
11535 while (1);
11536
11537 /* We need gotos here since we can only have one VA_CLOSE in a
11538 function. */
11539 end: ;
11540 va_end (ap);
11541
11542 return res;
11543 }
11544
11545 /* Default target-specific builtin expander that does nothing. */
11546
11547 rtx
11548 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11549 rtx target ATTRIBUTE_UNUSED,
11550 rtx subtarget ATTRIBUTE_UNUSED,
11551 enum machine_mode mode ATTRIBUTE_UNUSED,
11552 int ignore ATTRIBUTE_UNUSED)
11553 {
11554 return NULL_RTX;
11555 }
11556
11557 /* Returns true is EXP represents data that would potentially reside
11558 in a readonly section. */
11559
11560 static bool
11561 readonly_data_expr (tree exp)
11562 {
11563 STRIP_NOPS (exp);
11564
11565 if (TREE_CODE (exp) != ADDR_EXPR)
11566 return false;
11567
11568 exp = get_base_address (TREE_OPERAND (exp, 0));
11569 if (!exp)
11570 return false;
11571
11572 /* Make sure we call decl_readonly_section only for trees it
11573 can handle (since it returns true for everything it doesn't
11574 understand). */
11575 if (TREE_CODE (exp) == STRING_CST
11576 || TREE_CODE (exp) == CONSTRUCTOR
11577 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11578 return decl_readonly_section (exp, 0);
11579 else
11580 return false;
11581 }
11582
11583 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11584 to the call, and TYPE is its return type.
11585
11586 Return NULL_TREE if no simplification was possible, otherwise return the
11587 simplified form of the call as a tree.
11588
11589 The simplified form may be a constant or other expression which
11590 computes the same value, but in a more efficient manner (including
11591 calls to other builtin functions).
11592
11593 The call may contain arguments which need to be evaluated, but
11594 which are not useful to determine the result of the call. In
11595 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11596 COMPOUND_EXPR will be an argument which must be evaluated.
11597 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11598 COMPOUND_EXPR in the chain will contain the tree for the simplified
11599 form of the builtin function call. */
11600
11601 static tree
11602 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11603 {
11604 if (!validate_arg (s1, POINTER_TYPE)
11605 || !validate_arg (s2, POINTER_TYPE))
11606 return NULL_TREE;
11607 else
11608 {
11609 tree fn;
11610 const char *p1, *p2;
11611
11612 p2 = c_getstr (s2);
11613 if (p2 == NULL)
11614 return NULL_TREE;
11615
11616 p1 = c_getstr (s1);
11617 if (p1 != NULL)
11618 {
11619 const char *r = strstr (p1, p2);
11620 tree tem;
11621
11622 if (r == NULL)
11623 return build_int_cst (TREE_TYPE (s1), 0);
11624
11625 /* Return an offset into the constant string argument. */
11626 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11627 return fold_convert_loc (loc, type, tem);
11628 }
11629
11630 /* The argument is const char *, and the result is char *, so we need
11631 a type conversion here to avoid a warning. */
11632 if (p2[0] == '\0')
11633 return fold_convert_loc (loc, type, s1);
11634
11635 if (p2[1] != '\0')
11636 return NULL_TREE;
11637
11638 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11639 if (!fn)
11640 return NULL_TREE;
11641
11642 /* New argument list transforming strstr(s1, s2) to
11643 strchr(s1, s2[0]). */
11644 return build_call_expr_loc (loc, fn, 2, s1,
11645 build_int_cst (integer_type_node, p2[0]));
11646 }
11647 }
11648
11649 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11650 the call, and TYPE is its return type.
11651
11652 Return NULL_TREE if no simplification was possible, otherwise return the
11653 simplified form of the call as a tree.
11654
11655 The simplified form may be a constant or other expression which
11656 computes the same value, but in a more efficient manner (including
11657 calls to other builtin functions).
11658
11659 The call may contain arguments which need to be evaluated, but
11660 which are not useful to determine the result of the call. In
11661 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11662 COMPOUND_EXPR will be an argument which must be evaluated.
11663 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11664 COMPOUND_EXPR in the chain will contain the tree for the simplified
11665 form of the builtin function call. */
11666
11667 static tree
11668 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11669 {
11670 if (!validate_arg (s1, POINTER_TYPE)
11671 || !validate_arg (s2, INTEGER_TYPE))
11672 return NULL_TREE;
11673 else
11674 {
11675 const char *p1;
11676
11677 if (TREE_CODE (s2) != INTEGER_CST)
11678 return NULL_TREE;
11679
11680 p1 = c_getstr (s1);
11681 if (p1 != NULL)
11682 {
11683 char c;
11684 const char *r;
11685 tree tem;
11686
11687 if (target_char_cast (s2, &c))
11688 return NULL_TREE;
11689
11690 r = strchr (p1, c);
11691
11692 if (r == NULL)
11693 return build_int_cst (TREE_TYPE (s1), 0);
11694
11695 /* Return an offset into the constant string argument. */
11696 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11697 return fold_convert_loc (loc, type, tem);
11698 }
11699 return NULL_TREE;
11700 }
11701 }
11702
11703 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11704 the call, and TYPE is its return type.
11705
11706 Return NULL_TREE if no simplification was possible, otherwise return the
11707 simplified form of the call as a tree.
11708
11709 The simplified form may be a constant or other expression which
11710 computes the same value, but in a more efficient manner (including
11711 calls to other builtin functions).
11712
11713 The call may contain arguments which need to be evaluated, but
11714 which are not useful to determine the result of the call. In
11715 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11716 COMPOUND_EXPR will be an argument which must be evaluated.
11717 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11718 COMPOUND_EXPR in the chain will contain the tree for the simplified
11719 form of the builtin function call. */
11720
11721 static tree
11722 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11723 {
11724 if (!validate_arg (s1, POINTER_TYPE)
11725 || !validate_arg (s2, INTEGER_TYPE))
11726 return NULL_TREE;
11727 else
11728 {
11729 tree fn;
11730 const char *p1;
11731
11732 if (TREE_CODE (s2) != INTEGER_CST)
11733 return NULL_TREE;
11734
11735 p1 = c_getstr (s1);
11736 if (p1 != NULL)
11737 {
11738 char c;
11739 const char *r;
11740 tree tem;
11741
11742 if (target_char_cast (s2, &c))
11743 return NULL_TREE;
11744
11745 r = strrchr (p1, c);
11746
11747 if (r == NULL)
11748 return build_int_cst (TREE_TYPE (s1), 0);
11749
11750 /* Return an offset into the constant string argument. */
11751 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11752 return fold_convert_loc (loc, type, tem);
11753 }
11754
11755 if (! integer_zerop (s2))
11756 return NULL_TREE;
11757
11758 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11759 if (!fn)
11760 return NULL_TREE;
11761
11762 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11763 return build_call_expr_loc (loc, fn, 2, s1, s2);
11764 }
11765 }
11766
11767 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11768 to the call, and TYPE is its return type.
11769
11770 Return NULL_TREE if no simplification was possible, otherwise return the
11771 simplified form of the call as a tree.
11772
11773 The simplified form may be a constant or other expression which
11774 computes the same value, but in a more efficient manner (including
11775 calls to other builtin functions).
11776
11777 The call may contain arguments which need to be evaluated, but
11778 which are not useful to determine the result of the call. In
11779 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11780 COMPOUND_EXPR will be an argument which must be evaluated.
11781 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11782 COMPOUND_EXPR in the chain will contain the tree for the simplified
11783 form of the builtin function call. */
11784
11785 static tree
11786 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11787 {
11788 if (!validate_arg (s1, POINTER_TYPE)
11789 || !validate_arg (s2, POINTER_TYPE))
11790 return NULL_TREE;
11791 else
11792 {
11793 tree fn;
11794 const char *p1, *p2;
11795
11796 p2 = c_getstr (s2);
11797 if (p2 == NULL)
11798 return NULL_TREE;
11799
11800 p1 = c_getstr (s1);
11801 if (p1 != NULL)
11802 {
11803 const char *r = strpbrk (p1, p2);
11804 tree tem;
11805
11806 if (r == NULL)
11807 return build_int_cst (TREE_TYPE (s1), 0);
11808
11809 /* Return an offset into the constant string argument. */
11810 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11811 return fold_convert_loc (loc, type, tem);
11812 }
11813
11814 if (p2[0] == '\0')
11815 /* strpbrk(x, "") == NULL.
11816 Evaluate and ignore s1 in case it had side-effects. */
11817 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11818
11819 if (p2[1] != '\0')
11820 return NULL_TREE; /* Really call strpbrk. */
11821
11822 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11823 if (!fn)
11824 return NULL_TREE;
11825
11826 /* New argument list transforming strpbrk(s1, s2) to
11827 strchr(s1, s2[0]). */
11828 return build_call_expr_loc (loc, fn, 2, s1,
11829 build_int_cst (integer_type_node, p2[0]));
11830 }
11831 }
11832
11833 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11834 to the call.
11835
11836 Return NULL_TREE if no simplification was possible, otherwise return the
11837 simplified form of the call as a tree.
11838
11839 The simplified form may be a constant or other expression which
11840 computes the same value, but in a more efficient manner (including
11841 calls to other builtin functions).
11842
11843 The call may contain arguments which need to be evaluated, but
11844 which are not useful to determine the result of the call. In
11845 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11846 COMPOUND_EXPR will be an argument which must be evaluated.
11847 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11848 COMPOUND_EXPR in the chain will contain the tree for the simplified
11849 form of the builtin function call. */
11850
11851 static tree
11852 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11853 {
11854 if (!validate_arg (dst, POINTER_TYPE)
11855 || !validate_arg (src, POINTER_TYPE))
11856 return NULL_TREE;
11857 else
11858 {
11859 const char *p = c_getstr (src);
11860
11861 /* If the string length is zero, return the dst parameter. */
11862 if (p && *p == '\0')
11863 return dst;
11864
11865 if (optimize_insn_for_speed_p ())
11866 {
11867 /* See if we can store by pieces into (dst + strlen(dst)). */
11868 tree newdst, call;
11869 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11870 tree strcpy_fn = builtin_decl_implicit (BUILT_IN_STRCPY);
11871
11872 if (!strlen_fn || !strcpy_fn)
11873 return NULL_TREE;
11874
11875 /* If we don't have a movstr we don't want to emit an strcpy
11876 call. We have to do that if the length of the source string
11877 isn't computable (in that case we can use memcpy probably
11878 later expanding to a sequence of mov instructions). If we
11879 have movstr instructions we can emit strcpy calls. */
11880 if (!HAVE_movstr)
11881 {
11882 tree len = c_strlen (src, 1);
11883 if (! len || TREE_SIDE_EFFECTS (len))
11884 return NULL_TREE;
11885 }
11886
11887 /* Stabilize the argument list. */
11888 dst = builtin_save_expr (dst);
11889
11890 /* Create strlen (dst). */
11891 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11892 /* Create (dst p+ strlen (dst)). */
11893
11894 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11895 newdst = builtin_save_expr (newdst);
11896
11897 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11898 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11899 }
11900 return NULL_TREE;
11901 }
11902 }
11903
11904 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11905 arguments to the call.
11906
11907 Return NULL_TREE if no simplification was possible, otherwise return the
11908 simplified form of the call as a tree.
11909
11910 The simplified form may be a constant or other expression which
11911 computes the same value, but in a more efficient manner (including
11912 calls to other builtin functions).
11913
11914 The call may contain arguments which need to be evaluated, but
11915 which are not useful to determine the result of the call. In
11916 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11917 COMPOUND_EXPR will be an argument which must be evaluated.
11918 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11919 COMPOUND_EXPR in the chain will contain the tree for the simplified
11920 form of the builtin function call. */
11921
11922 static tree
11923 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11924 {
11925 if (!validate_arg (dst, POINTER_TYPE)
11926 || !validate_arg (src, POINTER_TYPE)
11927 || !validate_arg (len, INTEGER_TYPE))
11928 return NULL_TREE;
11929 else
11930 {
11931 const char *p = c_getstr (src);
11932
11933 /* If the requested length is zero, or the src parameter string
11934 length is zero, return the dst parameter. */
11935 if (integer_zerop (len) || (p && *p == '\0'))
11936 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11937
11938 /* If the requested len is greater than or equal to the string
11939 length, call strcat. */
11940 if (TREE_CODE (len) == INTEGER_CST && p
11941 && compare_tree_int (len, strlen (p)) >= 0)
11942 {
11943 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11944
11945 /* If the replacement _DECL isn't initialized, don't do the
11946 transformation. */
11947 if (!fn)
11948 return NULL_TREE;
11949
11950 return build_call_expr_loc (loc, fn, 2, dst, src);
11951 }
11952 return NULL_TREE;
11953 }
11954 }
11955
11956 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11957 to the call.
11958
11959 Return NULL_TREE if no simplification was possible, otherwise return the
11960 simplified form of the call as a tree.
11961
11962 The simplified form may be a constant or other expression which
11963 computes the same value, but in a more efficient manner (including
11964 calls to other builtin functions).
11965
11966 The call may contain arguments which need to be evaluated, but
11967 which are not useful to determine the result of the call. In
11968 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11969 COMPOUND_EXPR will be an argument which must be evaluated.
11970 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11971 COMPOUND_EXPR in the chain will contain the tree for the simplified
11972 form of the builtin function call. */
11973
11974 static tree
11975 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11976 {
11977 if (!validate_arg (s1, POINTER_TYPE)
11978 || !validate_arg (s2, POINTER_TYPE))
11979 return NULL_TREE;
11980 else
11981 {
11982 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11983
11984 /* If both arguments are constants, evaluate at compile-time. */
11985 if (p1 && p2)
11986 {
11987 const size_t r = strspn (p1, p2);
11988 return size_int (r);
11989 }
11990
11991 /* If either argument is "", return NULL_TREE. */
11992 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11993 /* Evaluate and ignore both arguments in case either one has
11994 side-effects. */
11995 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11996 s1, s2);
11997 return NULL_TREE;
11998 }
11999 }
12000
12001 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
12002 to the call.
12003
12004 Return NULL_TREE if no simplification was possible, otherwise return the
12005 simplified form of the call as a tree.
12006
12007 The simplified form may be a constant or other expression which
12008 computes the same value, but in a more efficient manner (including
12009 calls to other builtin functions).
12010
12011 The call may contain arguments which need to be evaluated, but
12012 which are not useful to determine the result of the call. In
12013 this case we return a chain of COMPOUND_EXPRs. The LHS of each
12014 COMPOUND_EXPR will be an argument which must be evaluated.
12015 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
12016 COMPOUND_EXPR in the chain will contain the tree for the simplified
12017 form of the builtin function call. */
12018
12019 static tree
12020 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
12021 {
12022 if (!validate_arg (s1, POINTER_TYPE)
12023 || !validate_arg (s2, POINTER_TYPE))
12024 return NULL_TREE;
12025 else
12026 {
12027 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
12028
12029 /* If both arguments are constants, evaluate at compile-time. */
12030 if (p1 && p2)
12031 {
12032 const size_t r = strcspn (p1, p2);
12033 return size_int (r);
12034 }
12035
12036 /* If the first argument is "", return NULL_TREE. */
12037 if (p1 && *p1 == '\0')
12038 {
12039 /* Evaluate and ignore argument s2 in case it has
12040 side-effects. */
12041 return omit_one_operand_loc (loc, size_type_node,
12042 size_zero_node, s2);
12043 }
12044
12045 /* If the second argument is "", return __builtin_strlen(s1). */
12046 if (p2 && *p2 == '\0')
12047 {
12048 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
12049
12050 /* If the replacement _DECL isn't initialized, don't do the
12051 transformation. */
12052 if (!fn)
12053 return NULL_TREE;
12054
12055 return build_call_expr_loc (loc, fn, 1, s1);
12056 }
12057 return NULL_TREE;
12058 }
12059 }
12060
12061 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
12062 to the call. IGNORE is true if the value returned
12063 by the builtin will be ignored. UNLOCKED is true is true if this
12064 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
12065 the known length of the string. Return NULL_TREE if no simplification
12066 was possible. */
12067
12068 tree
12069 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
12070 bool ignore, bool unlocked, tree len)
12071 {
12072 /* If we're using an unlocked function, assume the other unlocked
12073 functions exist explicitly. */
12074 tree const fn_fputc = (unlocked
12075 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
12076 : builtin_decl_implicit (BUILT_IN_FPUTC));
12077 tree const fn_fwrite = (unlocked
12078 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
12079 : builtin_decl_implicit (BUILT_IN_FWRITE));
12080
12081 /* If the return value is used, don't do the transformation. */
12082 if (!ignore)
12083 return NULL_TREE;
12084
12085 /* Verify the arguments in the original call. */
12086 if (!validate_arg (arg0, POINTER_TYPE)
12087 || !validate_arg (arg1, POINTER_TYPE))
12088 return NULL_TREE;
12089
12090 if (! len)
12091 len = c_strlen (arg0, 0);
12092
12093 /* Get the length of the string passed to fputs. If the length
12094 can't be determined, punt. */
12095 if (!len
12096 || TREE_CODE (len) != INTEGER_CST)
12097 return NULL_TREE;
12098
12099 switch (compare_tree_int (len, 1))
12100 {
12101 case -1: /* length is 0, delete the call entirely . */
12102 return omit_one_operand_loc (loc, integer_type_node,
12103 integer_zero_node, arg1);;
12104
12105 case 0: /* length is 1, call fputc. */
12106 {
12107 const char *p = c_getstr (arg0);
12108
12109 if (p != NULL)
12110 {
12111 if (fn_fputc)
12112 return build_call_expr_loc (loc, fn_fputc, 2,
12113 build_int_cst
12114 (integer_type_node, p[0]), arg1);
12115 else
12116 return NULL_TREE;
12117 }
12118 }
12119 /* FALLTHROUGH */
12120 case 1: /* length is greater than 1, call fwrite. */
12121 {
12122 /* If optimizing for size keep fputs. */
12123 if (optimize_function_for_size_p (cfun))
12124 return NULL_TREE;
12125 /* New argument list transforming fputs(string, stream) to
12126 fwrite(string, 1, len, stream). */
12127 if (fn_fwrite)
12128 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
12129 size_one_node, len, arg1);
12130 else
12131 return NULL_TREE;
12132 }
12133 default:
12134 gcc_unreachable ();
12135 }
12136 return NULL_TREE;
12137 }
12138
12139 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12140 produced. False otherwise. This is done so that we don't output the error
12141 or warning twice or three times. */
12142
12143 bool
12144 fold_builtin_next_arg (tree exp, bool va_start_p)
12145 {
12146 tree fntype = TREE_TYPE (current_function_decl);
12147 int nargs = call_expr_nargs (exp);
12148 tree arg;
12149 /* There is good chance the current input_location points inside the
12150 definition of the va_start macro (perhaps on the token for
12151 builtin) in a system header, so warnings will not be emitted.
12152 Use the location in real source code. */
12153 source_location current_location =
12154 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
12155 NULL);
12156
12157 if (!stdarg_p (fntype))
12158 {
12159 error ("%<va_start%> used in function with fixed args");
12160 return true;
12161 }
12162
12163 if (va_start_p)
12164 {
12165 if (va_start_p && (nargs != 2))
12166 {
12167 error ("wrong number of arguments to function %<va_start%>");
12168 return true;
12169 }
12170 arg = CALL_EXPR_ARG (exp, 1);
12171 }
12172 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12173 when we checked the arguments and if needed issued a warning. */
12174 else
12175 {
12176 if (nargs == 0)
12177 {
12178 /* Evidently an out of date version of <stdarg.h>; can't validate
12179 va_start's second argument, but can still work as intended. */
12180 warning_at (current_location,
12181 OPT_Wvarargs,
12182 "%<__builtin_next_arg%> called without an argument");
12183 return true;
12184 }
12185 else if (nargs > 1)
12186 {
12187 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12188 return true;
12189 }
12190 arg = CALL_EXPR_ARG (exp, 0);
12191 }
12192
12193 if (TREE_CODE (arg) == SSA_NAME)
12194 arg = SSA_NAME_VAR (arg);
12195
12196 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12197 or __builtin_next_arg (0) the first time we see it, after checking
12198 the arguments and if needed issuing a warning. */
12199 if (!integer_zerop (arg))
12200 {
12201 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12202
12203 /* Strip off all nops for the sake of the comparison. This
12204 is not quite the same as STRIP_NOPS. It does more.
12205 We must also strip off INDIRECT_EXPR for C++ reference
12206 parameters. */
12207 while (CONVERT_EXPR_P (arg)
12208 || TREE_CODE (arg) == INDIRECT_REF)
12209 arg = TREE_OPERAND (arg, 0);
12210 if (arg != last_parm)
12211 {
12212 /* FIXME: Sometimes with the tree optimizers we can get the
12213 not the last argument even though the user used the last
12214 argument. We just warn and set the arg to be the last
12215 argument so that we will get wrong-code because of
12216 it. */
12217 warning_at (current_location,
12218 OPT_Wvarargs,
12219 "second parameter of %<va_start%> not last named argument");
12220 }
12221
12222 /* Undefined by C99 7.15.1.4p4 (va_start):
12223 "If the parameter parmN is declared with the register storage
12224 class, with a function or array type, or with a type that is
12225 not compatible with the type that results after application of
12226 the default argument promotions, the behavior is undefined."
12227 */
12228 else if (DECL_REGISTER (arg))
12229 {
12230 warning_at (current_location,
12231 OPT_Wvarargs,
12232 "undefined behaviour when second parameter of "
12233 "%<va_start%> is declared with %<register%> storage");
12234 }
12235
12236 /* We want to verify the second parameter just once before the tree
12237 optimizers are run and then avoid keeping it in the tree,
12238 as otherwise we could warn even for correct code like:
12239 void foo (int i, ...)
12240 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12241 if (va_start_p)
12242 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12243 else
12244 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12245 }
12246 return false;
12247 }
12248
12249
12250 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12251 ORIG may be null if this is a 2-argument call. We don't attempt to
12252 simplify calls with more than 3 arguments.
12253
12254 Return NULL_TREE if no simplification was possible, otherwise return the
12255 simplified form of the call as a tree. If IGNORED is true, it means that
12256 the caller does not use the returned value of the function. */
12257
12258 static tree
12259 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12260 tree orig, int ignored)
12261 {
12262 tree call, retval;
12263 const char *fmt_str = NULL;
12264
12265 /* Verify the required arguments in the original call. We deal with two
12266 types of sprintf() calls: 'sprintf (str, fmt)' and
12267 'sprintf (dest, "%s", orig)'. */
12268 if (!validate_arg (dest, POINTER_TYPE)
12269 || !validate_arg (fmt, POINTER_TYPE))
12270 return NULL_TREE;
12271 if (orig && !validate_arg (orig, POINTER_TYPE))
12272 return NULL_TREE;
12273
12274 /* Check whether the format is a literal string constant. */
12275 fmt_str = c_getstr (fmt);
12276 if (fmt_str == NULL)
12277 return NULL_TREE;
12278
12279 call = NULL_TREE;
12280 retval = NULL_TREE;
12281
12282 if (!init_target_chars ())
12283 return NULL_TREE;
12284
12285 /* If the format doesn't contain % args or %%, use strcpy. */
12286 if (strchr (fmt_str, target_percent) == NULL)
12287 {
12288 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12289
12290 if (!fn)
12291 return NULL_TREE;
12292
12293 /* Don't optimize sprintf (buf, "abc", ptr++). */
12294 if (orig)
12295 return NULL_TREE;
12296
12297 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12298 'format' is known to contain no % formats. */
12299 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12300 if (!ignored)
12301 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12302 }
12303
12304 /* If the format is "%s", use strcpy if the result isn't used. */
12305 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12306 {
12307 tree fn;
12308 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12309
12310 if (!fn)
12311 return NULL_TREE;
12312
12313 /* Don't crash on sprintf (str1, "%s"). */
12314 if (!orig)
12315 return NULL_TREE;
12316
12317 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12318 if (!ignored)
12319 {
12320 retval = c_strlen (orig, 1);
12321 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12322 return NULL_TREE;
12323 }
12324 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12325 }
12326
12327 if (call && retval)
12328 {
12329 retval = fold_convert_loc
12330 (loc, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF))),
12331 retval);
12332 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12333 }
12334 else
12335 return call;
12336 }
12337
12338 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12339 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12340 attempt to simplify calls with more than 4 arguments.
12341
12342 Return NULL_TREE if no simplification was possible, otherwise return the
12343 simplified form of the call as a tree. If IGNORED is true, it means that
12344 the caller does not use the returned value of the function. */
12345
12346 static tree
12347 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
12348 tree orig, int ignored)
12349 {
12350 tree call, retval;
12351 const char *fmt_str = NULL;
12352 unsigned HOST_WIDE_INT destlen;
12353
12354 /* Verify the required arguments in the original call. We deal with two
12355 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12356 'snprintf (dest, cst, "%s", orig)'. */
12357 if (!validate_arg (dest, POINTER_TYPE)
12358 || !validate_arg (destsize, INTEGER_TYPE)
12359 || !validate_arg (fmt, POINTER_TYPE))
12360 return NULL_TREE;
12361 if (orig && !validate_arg (orig, POINTER_TYPE))
12362 return NULL_TREE;
12363
12364 if (!host_integerp (destsize, 1))
12365 return NULL_TREE;
12366
12367 /* Check whether the format is a literal string constant. */
12368 fmt_str = c_getstr (fmt);
12369 if (fmt_str == NULL)
12370 return NULL_TREE;
12371
12372 call = NULL_TREE;
12373 retval = NULL_TREE;
12374
12375 if (!init_target_chars ())
12376 return NULL_TREE;
12377
12378 destlen = tree_low_cst (destsize, 1);
12379
12380 /* If the format doesn't contain % args or %%, use strcpy. */
12381 if (strchr (fmt_str, target_percent) == NULL)
12382 {
12383 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12384 size_t len = strlen (fmt_str);
12385
12386 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12387 if (orig)
12388 return NULL_TREE;
12389
12390 /* We could expand this as
12391 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12392 or to
12393 memcpy (str, fmt_with_nul_at_cstm1, cst);
12394 but in the former case that might increase code size
12395 and in the latter case grow .rodata section too much.
12396 So punt for now. */
12397 if (len >= destlen)
12398 return NULL_TREE;
12399
12400 if (!fn)
12401 return NULL_TREE;
12402
12403 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12404 'format' is known to contain no % formats and
12405 strlen (fmt) < cst. */
12406 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12407
12408 if (!ignored)
12409 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12410 }
12411
12412 /* If the format is "%s", use strcpy if the result isn't used. */
12413 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12414 {
12415 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12416 unsigned HOST_WIDE_INT origlen;
12417
12418 /* Don't crash on snprintf (str1, cst, "%s"). */
12419 if (!orig)
12420 return NULL_TREE;
12421
12422 retval = c_strlen (orig, 1);
12423 if (!retval || !host_integerp (retval, 1))
12424 return NULL_TREE;
12425
12426 origlen = tree_low_cst (retval, 1);
12427 /* We could expand this as
12428 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12429 or to
12430 memcpy (str1, str2_with_nul_at_cstm1, cst);
12431 but in the former case that might increase code size
12432 and in the latter case grow .rodata section too much.
12433 So punt for now. */
12434 if (origlen >= destlen)
12435 return NULL_TREE;
12436
12437 /* Convert snprintf (str1, cst, "%s", str2) into
12438 strcpy (str1, str2) if strlen (str2) < cst. */
12439 if (!fn)
12440 return NULL_TREE;
12441
12442 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12443
12444 if (ignored)
12445 retval = NULL_TREE;
12446 }
12447
12448 if (call && retval)
12449 {
12450 tree fn = builtin_decl_explicit (BUILT_IN_SNPRINTF);
12451 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
12452 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12453 }
12454 else
12455 return call;
12456 }
12457
12458 /* Expand a call EXP to __builtin_object_size. */
12459
12460 rtx
12461 expand_builtin_object_size (tree exp)
12462 {
12463 tree ost;
12464 int object_size_type;
12465 tree fndecl = get_callee_fndecl (exp);
12466
12467 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12468 {
12469 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12470 exp, fndecl);
12471 expand_builtin_trap ();
12472 return const0_rtx;
12473 }
12474
12475 ost = CALL_EXPR_ARG (exp, 1);
12476 STRIP_NOPS (ost);
12477
12478 if (TREE_CODE (ost) != INTEGER_CST
12479 || tree_int_cst_sgn (ost) < 0
12480 || compare_tree_int (ost, 3) > 0)
12481 {
12482 error ("%Klast argument of %D is not integer constant between 0 and 3",
12483 exp, fndecl);
12484 expand_builtin_trap ();
12485 return const0_rtx;
12486 }
12487
12488 object_size_type = tree_low_cst (ost, 0);
12489
12490 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12491 }
12492
12493 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12494 FCODE is the BUILT_IN_* to use.
12495 Return NULL_RTX if we failed; the caller should emit a normal call,
12496 otherwise try to get the result in TARGET, if convenient (and in
12497 mode MODE if that's convenient). */
12498
12499 static rtx
12500 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12501 enum built_in_function fcode)
12502 {
12503 tree dest, src, len, size;
12504
12505 if (!validate_arglist (exp,
12506 POINTER_TYPE,
12507 fcode == BUILT_IN_MEMSET_CHK
12508 ? INTEGER_TYPE : POINTER_TYPE,
12509 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12510 return NULL_RTX;
12511
12512 dest = CALL_EXPR_ARG (exp, 0);
12513 src = CALL_EXPR_ARG (exp, 1);
12514 len = CALL_EXPR_ARG (exp, 2);
12515 size = CALL_EXPR_ARG (exp, 3);
12516
12517 if (! host_integerp (size, 1))
12518 return NULL_RTX;
12519
12520 if (host_integerp (len, 1) || integer_all_onesp (size))
12521 {
12522 tree fn;
12523
12524 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12525 {
12526 warning_at (tree_nonartificial_location (exp),
12527 0, "%Kcall to %D will always overflow destination buffer",
12528 exp, get_callee_fndecl (exp));
12529 return NULL_RTX;
12530 }
12531
12532 fn = NULL_TREE;
12533 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12534 mem{cpy,pcpy,move,set} is available. */
12535 switch (fcode)
12536 {
12537 case BUILT_IN_MEMCPY_CHK:
12538 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12539 break;
12540 case BUILT_IN_MEMPCPY_CHK:
12541 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12542 break;
12543 case BUILT_IN_MEMMOVE_CHK:
12544 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12545 break;
12546 case BUILT_IN_MEMSET_CHK:
12547 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12548 break;
12549 default:
12550 break;
12551 }
12552
12553 if (! fn)
12554 return NULL_RTX;
12555
12556 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12557 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12558 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12559 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12560 }
12561 else if (fcode == BUILT_IN_MEMSET_CHK)
12562 return NULL_RTX;
12563 else
12564 {
12565 unsigned int dest_align = get_pointer_alignment (dest);
12566
12567 /* If DEST is not a pointer type, call the normal function. */
12568 if (dest_align == 0)
12569 return NULL_RTX;
12570
12571 /* If SRC and DEST are the same (and not volatile), do nothing. */
12572 if (operand_equal_p (src, dest, 0))
12573 {
12574 tree expr;
12575
12576 if (fcode != BUILT_IN_MEMPCPY_CHK)
12577 {
12578 /* Evaluate and ignore LEN in case it has side-effects. */
12579 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12580 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12581 }
12582
12583 expr = fold_build_pointer_plus (dest, len);
12584 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12585 }
12586
12587 /* __memmove_chk special case. */
12588 if (fcode == BUILT_IN_MEMMOVE_CHK)
12589 {
12590 unsigned int src_align = get_pointer_alignment (src);
12591
12592 if (src_align == 0)
12593 return NULL_RTX;
12594
12595 /* If src is categorized for a readonly section we can use
12596 normal __memcpy_chk. */
12597 if (readonly_data_expr (src))
12598 {
12599 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12600 if (!fn)
12601 return NULL_RTX;
12602 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12603 dest, src, len, size);
12604 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12605 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12606 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12607 }
12608 }
12609 return NULL_RTX;
12610 }
12611 }
12612
12613 /* Emit warning if a buffer overflow is detected at compile time. */
12614
12615 static void
12616 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12617 {
12618 int is_strlen = 0;
12619 tree len, size;
12620 location_t loc = tree_nonartificial_location (exp);
12621
12622 switch (fcode)
12623 {
12624 case BUILT_IN_STRCPY_CHK:
12625 case BUILT_IN_STPCPY_CHK:
12626 /* For __strcat_chk the warning will be emitted only if overflowing
12627 by at least strlen (dest) + 1 bytes. */
12628 case BUILT_IN_STRCAT_CHK:
12629 len = CALL_EXPR_ARG (exp, 1);
12630 size = CALL_EXPR_ARG (exp, 2);
12631 is_strlen = 1;
12632 break;
12633 case BUILT_IN_STRNCAT_CHK:
12634 case BUILT_IN_STRNCPY_CHK:
12635 case BUILT_IN_STPNCPY_CHK:
12636 len = CALL_EXPR_ARG (exp, 2);
12637 size = CALL_EXPR_ARG (exp, 3);
12638 break;
12639 case BUILT_IN_SNPRINTF_CHK:
12640 case BUILT_IN_VSNPRINTF_CHK:
12641 len = CALL_EXPR_ARG (exp, 1);
12642 size = CALL_EXPR_ARG (exp, 3);
12643 break;
12644 default:
12645 gcc_unreachable ();
12646 }
12647
12648 if (!len || !size)
12649 return;
12650
12651 if (! host_integerp (size, 1) || integer_all_onesp (size))
12652 return;
12653
12654 if (is_strlen)
12655 {
12656 len = c_strlen (len, 1);
12657 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12658 return;
12659 }
12660 else if (fcode == BUILT_IN_STRNCAT_CHK)
12661 {
12662 tree src = CALL_EXPR_ARG (exp, 1);
12663 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12664 return;
12665 src = c_strlen (src, 1);
12666 if (! src || ! host_integerp (src, 1))
12667 {
12668 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12669 exp, get_callee_fndecl (exp));
12670 return;
12671 }
12672 else if (tree_int_cst_lt (src, size))
12673 return;
12674 }
12675 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12676 return;
12677
12678 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12679 exp, get_callee_fndecl (exp));
12680 }
12681
12682 /* Emit warning if a buffer overflow is detected at compile time
12683 in __sprintf_chk/__vsprintf_chk calls. */
12684
12685 static void
12686 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12687 {
12688 tree size, len, fmt;
12689 const char *fmt_str;
12690 int nargs = call_expr_nargs (exp);
12691
12692 /* Verify the required arguments in the original call. */
12693
12694 if (nargs < 4)
12695 return;
12696 size = CALL_EXPR_ARG (exp, 2);
12697 fmt = CALL_EXPR_ARG (exp, 3);
12698
12699 if (! host_integerp (size, 1) || integer_all_onesp (size))
12700 return;
12701
12702 /* Check whether the format is a literal string constant. */
12703 fmt_str = c_getstr (fmt);
12704 if (fmt_str == NULL)
12705 return;
12706
12707 if (!init_target_chars ())
12708 return;
12709
12710 /* If the format doesn't contain % args or %%, we know its size. */
12711 if (strchr (fmt_str, target_percent) == 0)
12712 len = build_int_cstu (size_type_node, strlen (fmt_str));
12713 /* If the format is "%s" and first ... argument is a string literal,
12714 we know it too. */
12715 else if (fcode == BUILT_IN_SPRINTF_CHK
12716 && strcmp (fmt_str, target_percent_s) == 0)
12717 {
12718 tree arg;
12719
12720 if (nargs < 5)
12721 return;
12722 arg = CALL_EXPR_ARG (exp, 4);
12723 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12724 return;
12725
12726 len = c_strlen (arg, 1);
12727 if (!len || ! host_integerp (len, 1))
12728 return;
12729 }
12730 else
12731 return;
12732
12733 if (! tree_int_cst_lt (len, size))
12734 warning_at (tree_nonartificial_location (exp),
12735 0, "%Kcall to %D will always overflow destination buffer",
12736 exp, get_callee_fndecl (exp));
12737 }
12738
12739 /* Emit warning if a free is called with address of a variable. */
12740
12741 static void
12742 maybe_emit_free_warning (tree exp)
12743 {
12744 tree arg = CALL_EXPR_ARG (exp, 0);
12745
12746 STRIP_NOPS (arg);
12747 if (TREE_CODE (arg) != ADDR_EXPR)
12748 return;
12749
12750 arg = get_base_address (TREE_OPERAND (arg, 0));
12751 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12752 return;
12753
12754 if (SSA_VAR_P (arg))
12755 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12756 "%Kattempt to free a non-heap object %qD", exp, arg);
12757 else
12758 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12759 "%Kattempt to free a non-heap object", exp);
12760 }
12761
12762 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12763 if possible. */
12764
12765 tree
12766 fold_builtin_object_size (tree ptr, tree ost)
12767 {
12768 unsigned HOST_WIDE_INT bytes;
12769 int object_size_type;
12770
12771 if (!validate_arg (ptr, POINTER_TYPE)
12772 || !validate_arg (ost, INTEGER_TYPE))
12773 return NULL_TREE;
12774
12775 STRIP_NOPS (ost);
12776
12777 if (TREE_CODE (ost) != INTEGER_CST
12778 || tree_int_cst_sgn (ost) < 0
12779 || compare_tree_int (ost, 3) > 0)
12780 return NULL_TREE;
12781
12782 object_size_type = tree_low_cst (ost, 0);
12783
12784 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12785 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12786 and (size_t) 0 for types 2 and 3. */
12787 if (TREE_SIDE_EFFECTS (ptr))
12788 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12789
12790 if (TREE_CODE (ptr) == ADDR_EXPR)
12791 {
12792 bytes = compute_builtin_object_size (ptr, object_size_type);
12793 if (double_int_fits_to_tree_p (size_type_node,
12794 uhwi_to_double_int (bytes)))
12795 return build_int_cstu (size_type_node, bytes);
12796 }
12797 else if (TREE_CODE (ptr) == SSA_NAME)
12798 {
12799 /* If object size is not known yet, delay folding until
12800 later. Maybe subsequent passes will help determining
12801 it. */
12802 bytes = compute_builtin_object_size (ptr, object_size_type);
12803 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12804 && double_int_fits_to_tree_p (size_type_node,
12805 uhwi_to_double_int (bytes)))
12806 return build_int_cstu (size_type_node, bytes);
12807 }
12808
12809 return NULL_TREE;
12810 }
12811
12812 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12813 DEST, SRC, LEN, and SIZE are the arguments to the call.
12814 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12815 code of the builtin. If MAXLEN is not NULL, it is maximum length
12816 passed as third argument. */
12817
12818 tree
12819 fold_builtin_memory_chk (location_t loc, tree fndecl,
12820 tree dest, tree src, tree len, tree size,
12821 tree maxlen, bool ignore,
12822 enum built_in_function fcode)
12823 {
12824 tree fn;
12825
12826 if (!validate_arg (dest, POINTER_TYPE)
12827 || !validate_arg (src,
12828 (fcode == BUILT_IN_MEMSET_CHK
12829 ? INTEGER_TYPE : POINTER_TYPE))
12830 || !validate_arg (len, INTEGER_TYPE)
12831 || !validate_arg (size, INTEGER_TYPE))
12832 return NULL_TREE;
12833
12834 /* If SRC and DEST are the same (and not volatile), return DEST
12835 (resp. DEST+LEN for __mempcpy_chk). */
12836 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12837 {
12838 if (fcode != BUILT_IN_MEMPCPY_CHK)
12839 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12840 dest, len);
12841 else
12842 {
12843 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12844 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12845 }
12846 }
12847
12848 if (! host_integerp (size, 1))
12849 return NULL_TREE;
12850
12851 if (! integer_all_onesp (size))
12852 {
12853 if (! host_integerp (len, 1))
12854 {
12855 /* If LEN is not constant, try MAXLEN too.
12856 For MAXLEN only allow optimizing into non-_ocs function
12857 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12858 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12859 {
12860 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12861 {
12862 /* (void) __mempcpy_chk () can be optimized into
12863 (void) __memcpy_chk (). */
12864 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12865 if (!fn)
12866 return NULL_TREE;
12867
12868 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12869 }
12870 return NULL_TREE;
12871 }
12872 }
12873 else
12874 maxlen = len;
12875
12876 if (tree_int_cst_lt (size, maxlen))
12877 return NULL_TREE;
12878 }
12879
12880 fn = NULL_TREE;
12881 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12882 mem{cpy,pcpy,move,set} is available. */
12883 switch (fcode)
12884 {
12885 case BUILT_IN_MEMCPY_CHK:
12886 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12887 break;
12888 case BUILT_IN_MEMPCPY_CHK:
12889 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12890 break;
12891 case BUILT_IN_MEMMOVE_CHK:
12892 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12893 break;
12894 case BUILT_IN_MEMSET_CHK:
12895 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12896 break;
12897 default:
12898 break;
12899 }
12900
12901 if (!fn)
12902 return NULL_TREE;
12903
12904 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12905 }
12906
12907 /* Fold a call to the __st[rp]cpy_chk builtin.
12908 DEST, SRC, and SIZE are the arguments to the call.
12909 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12910 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12911 strings passed as second argument. */
12912
12913 tree
12914 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12915 tree src, tree size,
12916 tree maxlen, bool ignore,
12917 enum built_in_function fcode)
12918 {
12919 tree len, fn;
12920
12921 if (!validate_arg (dest, POINTER_TYPE)
12922 || !validate_arg (src, POINTER_TYPE)
12923 || !validate_arg (size, INTEGER_TYPE))
12924 return NULL_TREE;
12925
12926 /* If SRC and DEST are the same (and not volatile), return DEST. */
12927 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12928 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12929
12930 if (! host_integerp (size, 1))
12931 return NULL_TREE;
12932
12933 if (! integer_all_onesp (size))
12934 {
12935 len = c_strlen (src, 1);
12936 if (! len || ! host_integerp (len, 1))
12937 {
12938 /* If LEN is not constant, try MAXLEN too.
12939 For MAXLEN only allow optimizing into non-_ocs function
12940 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12941 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12942 {
12943 if (fcode == BUILT_IN_STPCPY_CHK)
12944 {
12945 if (! ignore)
12946 return NULL_TREE;
12947
12948 /* If return value of __stpcpy_chk is ignored,
12949 optimize into __strcpy_chk. */
12950 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
12951 if (!fn)
12952 return NULL_TREE;
12953
12954 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12955 }
12956
12957 if (! len || TREE_SIDE_EFFECTS (len))
12958 return NULL_TREE;
12959
12960 /* If c_strlen returned something, but not a constant,
12961 transform __strcpy_chk into __memcpy_chk. */
12962 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12963 if (!fn)
12964 return NULL_TREE;
12965
12966 len = fold_convert_loc (loc, size_type_node, len);
12967 len = size_binop_loc (loc, PLUS_EXPR, len,
12968 build_int_cst (size_type_node, 1));
12969 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12970 build_call_expr_loc (loc, fn, 4,
12971 dest, src, len, size));
12972 }
12973 }
12974 else
12975 maxlen = len;
12976
12977 if (! tree_int_cst_lt (maxlen, size))
12978 return NULL_TREE;
12979 }
12980
12981 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12982 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
12983 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
12984 if (!fn)
12985 return NULL_TREE;
12986
12987 return build_call_expr_loc (loc, fn, 2, dest, src);
12988 }
12989
12990 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
12991 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12992 length passed as third argument. IGNORE is true if return value can be
12993 ignored. FCODE is the BUILT_IN_* code of the builtin. */
12994
12995 tree
12996 fold_builtin_stxncpy_chk (location_t loc, tree dest, tree src,
12997 tree len, tree size, tree maxlen, bool ignore,
12998 enum built_in_function fcode)
12999 {
13000 tree fn;
13001
13002 if (!validate_arg (dest, POINTER_TYPE)
13003 || !validate_arg (src, POINTER_TYPE)
13004 || !validate_arg (len, INTEGER_TYPE)
13005 || !validate_arg (size, INTEGER_TYPE))
13006 return NULL_TREE;
13007
13008 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
13009 {
13010 /* If return value of __stpncpy_chk is ignored,
13011 optimize into __strncpy_chk. */
13012 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
13013 if (fn)
13014 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
13015 }
13016
13017 if (! host_integerp (size, 1))
13018 return NULL_TREE;
13019
13020 if (! integer_all_onesp (size))
13021 {
13022 if (! host_integerp (len, 1))
13023 {
13024 /* If LEN is not constant, try MAXLEN too.
13025 For MAXLEN only allow optimizing into non-_ocs function
13026 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13027 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13028 return NULL_TREE;
13029 }
13030 else
13031 maxlen = len;
13032
13033 if (tree_int_cst_lt (size, maxlen))
13034 return NULL_TREE;
13035 }
13036
13037 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
13038 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
13039 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
13040 if (!fn)
13041 return NULL_TREE;
13042
13043 return build_call_expr_loc (loc, fn, 3, dest, src, len);
13044 }
13045
13046 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
13047 are the arguments to the call. */
13048
13049 static tree
13050 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
13051 tree src, tree size)
13052 {
13053 tree fn;
13054 const char *p;
13055
13056 if (!validate_arg (dest, POINTER_TYPE)
13057 || !validate_arg (src, POINTER_TYPE)
13058 || !validate_arg (size, INTEGER_TYPE))
13059 return NULL_TREE;
13060
13061 p = c_getstr (src);
13062 /* If the SRC parameter is "", return DEST. */
13063 if (p && *p == '\0')
13064 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
13065
13066 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
13067 return NULL_TREE;
13068
13069 /* If __builtin_strcat_chk is used, assume strcat is available. */
13070 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
13071 if (!fn)
13072 return NULL_TREE;
13073
13074 return build_call_expr_loc (loc, fn, 2, dest, src);
13075 }
13076
13077 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
13078 LEN, and SIZE. */
13079
13080 static tree
13081 fold_builtin_strncat_chk (location_t loc, tree fndecl,
13082 tree dest, tree src, tree len, tree size)
13083 {
13084 tree fn;
13085 const char *p;
13086
13087 if (!validate_arg (dest, POINTER_TYPE)
13088 || !validate_arg (src, POINTER_TYPE)
13089 || !validate_arg (size, INTEGER_TYPE)
13090 || !validate_arg (size, INTEGER_TYPE))
13091 return NULL_TREE;
13092
13093 p = c_getstr (src);
13094 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
13095 if (p && *p == '\0')
13096 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
13097 else if (integer_zerop (len))
13098 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
13099
13100 if (! host_integerp (size, 1))
13101 return NULL_TREE;
13102
13103 if (! integer_all_onesp (size))
13104 {
13105 tree src_len = c_strlen (src, 1);
13106 if (src_len
13107 && host_integerp (src_len, 1)
13108 && host_integerp (len, 1)
13109 && ! tree_int_cst_lt (len, src_len))
13110 {
13111 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
13112 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
13113 if (!fn)
13114 return NULL_TREE;
13115
13116 return build_call_expr_loc (loc, fn, 3, dest, src, size);
13117 }
13118 return NULL_TREE;
13119 }
13120
13121 /* If __builtin_strncat_chk is used, assume strncat is available. */
13122 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
13123 if (!fn)
13124 return NULL_TREE;
13125
13126 return build_call_expr_loc (loc, fn, 3, dest, src, len);
13127 }
13128
13129 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
13130 Return NULL_TREE if a normal call should be emitted rather than
13131 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
13132 or BUILT_IN_VSPRINTF_CHK. */
13133
13134 static tree
13135 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
13136 enum built_in_function fcode)
13137 {
13138 tree dest, size, len, fn, fmt, flag;
13139 const char *fmt_str;
13140
13141 /* Verify the required arguments in the original call. */
13142 if (nargs < 4)
13143 return NULL_TREE;
13144 dest = args[0];
13145 if (!validate_arg (dest, POINTER_TYPE))
13146 return NULL_TREE;
13147 flag = args[1];
13148 if (!validate_arg (flag, INTEGER_TYPE))
13149 return NULL_TREE;
13150 size = args[2];
13151 if (!validate_arg (size, INTEGER_TYPE))
13152 return NULL_TREE;
13153 fmt = args[3];
13154 if (!validate_arg (fmt, POINTER_TYPE))
13155 return NULL_TREE;
13156
13157 if (! host_integerp (size, 1))
13158 return NULL_TREE;
13159
13160 len = NULL_TREE;
13161
13162 if (!init_target_chars ())
13163 return NULL_TREE;
13164
13165 /* Check whether the format is a literal string constant. */
13166 fmt_str = c_getstr (fmt);
13167 if (fmt_str != NULL)
13168 {
13169 /* If the format doesn't contain % args or %%, we know the size. */
13170 if (strchr (fmt_str, target_percent) == 0)
13171 {
13172 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13173 len = build_int_cstu (size_type_node, strlen (fmt_str));
13174 }
13175 /* If the format is "%s" and first ... argument is a string literal,
13176 we know the size too. */
13177 else if (fcode == BUILT_IN_SPRINTF_CHK
13178 && strcmp (fmt_str, target_percent_s) == 0)
13179 {
13180 tree arg;
13181
13182 if (nargs == 5)
13183 {
13184 arg = args[4];
13185 if (validate_arg (arg, POINTER_TYPE))
13186 {
13187 len = c_strlen (arg, 1);
13188 if (! len || ! host_integerp (len, 1))
13189 len = NULL_TREE;
13190 }
13191 }
13192 }
13193 }
13194
13195 if (! integer_all_onesp (size))
13196 {
13197 if (! len || ! tree_int_cst_lt (len, size))
13198 return NULL_TREE;
13199 }
13200
13201 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13202 or if format doesn't contain % chars or is "%s". */
13203 if (! integer_zerop (flag))
13204 {
13205 if (fmt_str == NULL)
13206 return NULL_TREE;
13207 if (strchr (fmt_str, target_percent) != NULL
13208 && strcmp (fmt_str, target_percent_s))
13209 return NULL_TREE;
13210 }
13211
13212 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13213 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
13214 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
13215 if (!fn)
13216 return NULL_TREE;
13217
13218 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
13219 }
13220
13221 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13222 a normal call should be emitted rather than expanding the function
13223 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13224
13225 static tree
13226 fold_builtin_sprintf_chk (location_t loc, tree exp,
13227 enum built_in_function fcode)
13228 {
13229 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
13230 CALL_EXPR_ARGP (exp), fcode);
13231 }
13232
13233 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13234 NULL_TREE if a normal call should be emitted rather than expanding
13235 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13236 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13237 passed as second argument. */
13238
13239 static tree
13240 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
13241 tree maxlen, enum built_in_function fcode)
13242 {
13243 tree dest, size, len, fn, fmt, flag;
13244 const char *fmt_str;
13245
13246 /* Verify the required arguments in the original call. */
13247 if (nargs < 5)
13248 return NULL_TREE;
13249 dest = args[0];
13250 if (!validate_arg (dest, POINTER_TYPE))
13251 return NULL_TREE;
13252 len = args[1];
13253 if (!validate_arg (len, INTEGER_TYPE))
13254 return NULL_TREE;
13255 flag = args[2];
13256 if (!validate_arg (flag, INTEGER_TYPE))
13257 return NULL_TREE;
13258 size = args[3];
13259 if (!validate_arg (size, INTEGER_TYPE))
13260 return NULL_TREE;
13261 fmt = args[4];
13262 if (!validate_arg (fmt, POINTER_TYPE))
13263 return NULL_TREE;
13264
13265 if (! host_integerp (size, 1))
13266 return NULL_TREE;
13267
13268 if (! integer_all_onesp (size))
13269 {
13270 if (! host_integerp (len, 1))
13271 {
13272 /* If LEN is not constant, try MAXLEN too.
13273 For MAXLEN only allow optimizing into non-_ocs function
13274 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13275 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13276 return NULL_TREE;
13277 }
13278 else
13279 maxlen = len;
13280
13281 if (tree_int_cst_lt (size, maxlen))
13282 return NULL_TREE;
13283 }
13284
13285 if (!init_target_chars ())
13286 return NULL_TREE;
13287
13288 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13289 or if format doesn't contain % chars or is "%s". */
13290 if (! integer_zerop (flag))
13291 {
13292 fmt_str = c_getstr (fmt);
13293 if (fmt_str == NULL)
13294 return NULL_TREE;
13295 if (strchr (fmt_str, target_percent) != NULL
13296 && strcmp (fmt_str, target_percent_s))
13297 return NULL_TREE;
13298 }
13299
13300 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13301 available. */
13302 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
13303 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
13304 if (!fn)
13305 return NULL_TREE;
13306
13307 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
13308 }
13309
13310 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13311 a normal call should be emitted rather than expanding the function
13312 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13313 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13314 passed as second argument. */
13315
13316 tree
13317 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
13318 enum built_in_function fcode)
13319 {
13320 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
13321 CALL_EXPR_ARGP (exp), maxlen, fcode);
13322 }
13323
13324 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13325 FMT and ARG are the arguments to the call; we don't fold cases with
13326 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13327
13328 Return NULL_TREE if no simplification was possible, otherwise return the
13329 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13330 code of the function to be simplified. */
13331
13332 static tree
13333 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13334 tree arg, bool ignore,
13335 enum built_in_function fcode)
13336 {
13337 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13338 const char *fmt_str = NULL;
13339
13340 /* If the return value is used, don't do the transformation. */
13341 if (! ignore)
13342 return NULL_TREE;
13343
13344 /* Verify the required arguments in the original call. */
13345 if (!validate_arg (fmt, POINTER_TYPE))
13346 return NULL_TREE;
13347
13348 /* Check whether the format is a literal string constant. */
13349 fmt_str = c_getstr (fmt);
13350 if (fmt_str == NULL)
13351 return NULL_TREE;
13352
13353 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13354 {
13355 /* If we're using an unlocked function, assume the other
13356 unlocked functions exist explicitly. */
13357 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
13358 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
13359 }
13360 else
13361 {
13362 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
13363 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
13364 }
13365
13366 if (!init_target_chars ())
13367 return NULL_TREE;
13368
13369 if (strcmp (fmt_str, target_percent_s) == 0
13370 || strchr (fmt_str, target_percent) == NULL)
13371 {
13372 const char *str;
13373
13374 if (strcmp (fmt_str, target_percent_s) == 0)
13375 {
13376 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13377 return NULL_TREE;
13378
13379 if (!arg || !validate_arg (arg, POINTER_TYPE))
13380 return NULL_TREE;
13381
13382 str = c_getstr (arg);
13383 if (str == NULL)
13384 return NULL_TREE;
13385 }
13386 else
13387 {
13388 /* The format specifier doesn't contain any '%' characters. */
13389 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13390 && arg)
13391 return NULL_TREE;
13392 str = fmt_str;
13393 }
13394
13395 /* If the string was "", printf does nothing. */
13396 if (str[0] == '\0')
13397 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13398
13399 /* If the string has length of 1, call putchar. */
13400 if (str[1] == '\0')
13401 {
13402 /* Given printf("c"), (where c is any one character,)
13403 convert "c"[0] to an int and pass that to the replacement
13404 function. */
13405 newarg = build_int_cst (integer_type_node, str[0]);
13406 if (fn_putchar)
13407 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13408 }
13409 else
13410 {
13411 /* If the string was "string\n", call puts("string"). */
13412 size_t len = strlen (str);
13413 if ((unsigned char)str[len - 1] == target_newline
13414 && (size_t) (int) len == len
13415 && (int) len > 0)
13416 {
13417 char *newstr;
13418 tree offset_node, string_cst;
13419
13420 /* Create a NUL-terminated string that's one char shorter
13421 than the original, stripping off the trailing '\n'. */
13422 newarg = build_string_literal (len, str);
13423 string_cst = string_constant (newarg, &offset_node);
13424 gcc_checking_assert (string_cst
13425 && (TREE_STRING_LENGTH (string_cst)
13426 == (int) len)
13427 && integer_zerop (offset_node)
13428 && (unsigned char)
13429 TREE_STRING_POINTER (string_cst)[len - 1]
13430 == target_newline);
13431 /* build_string_literal creates a new STRING_CST,
13432 modify it in place to avoid double copying. */
13433 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
13434 newstr[len - 1] = '\0';
13435 if (fn_puts)
13436 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13437 }
13438 else
13439 /* We'd like to arrange to call fputs(string,stdout) here,
13440 but we need stdout and don't have a way to get it yet. */
13441 return NULL_TREE;
13442 }
13443 }
13444
13445 /* The other optimizations can be done only on the non-va_list variants. */
13446 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13447 return NULL_TREE;
13448
13449 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13450 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13451 {
13452 if (!arg || !validate_arg (arg, POINTER_TYPE))
13453 return NULL_TREE;
13454 if (fn_puts)
13455 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13456 }
13457
13458 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13459 else if (strcmp (fmt_str, target_percent_c) == 0)
13460 {
13461 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13462 return NULL_TREE;
13463 if (fn_putchar)
13464 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13465 }
13466
13467 if (!call)
13468 return NULL_TREE;
13469
13470 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13471 }
13472
13473 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13474 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13475 more than 3 arguments, and ARG may be null in the 2-argument case.
13476
13477 Return NULL_TREE if no simplification was possible, otherwise return the
13478 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13479 code of the function to be simplified. */
13480
13481 static tree
13482 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13483 tree fmt, tree arg, bool ignore,
13484 enum built_in_function fcode)
13485 {
13486 tree fn_fputc, fn_fputs, call = NULL_TREE;
13487 const char *fmt_str = NULL;
13488
13489 /* If the return value is used, don't do the transformation. */
13490 if (! ignore)
13491 return NULL_TREE;
13492
13493 /* Verify the required arguments in the original call. */
13494 if (!validate_arg (fp, POINTER_TYPE))
13495 return NULL_TREE;
13496 if (!validate_arg (fmt, POINTER_TYPE))
13497 return NULL_TREE;
13498
13499 /* Check whether the format is a literal string constant. */
13500 fmt_str = c_getstr (fmt);
13501 if (fmt_str == NULL)
13502 return NULL_TREE;
13503
13504 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13505 {
13506 /* If we're using an unlocked function, assume the other
13507 unlocked functions exist explicitly. */
13508 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
13509 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
13510 }
13511 else
13512 {
13513 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
13514 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
13515 }
13516
13517 if (!init_target_chars ())
13518 return NULL_TREE;
13519
13520 /* If the format doesn't contain % args or %%, use strcpy. */
13521 if (strchr (fmt_str, target_percent) == NULL)
13522 {
13523 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13524 && arg)
13525 return NULL_TREE;
13526
13527 /* If the format specifier was "", fprintf does nothing. */
13528 if (fmt_str[0] == '\0')
13529 {
13530 /* If FP has side-effects, just wait until gimplification is
13531 done. */
13532 if (TREE_SIDE_EFFECTS (fp))
13533 return NULL_TREE;
13534
13535 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13536 }
13537
13538 /* When "string" doesn't contain %, replace all cases of
13539 fprintf (fp, string) with fputs (string, fp). The fputs
13540 builtin will take care of special cases like length == 1. */
13541 if (fn_fputs)
13542 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13543 }
13544
13545 /* The other optimizations can be done only on the non-va_list variants. */
13546 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13547 return NULL_TREE;
13548
13549 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13550 else if (strcmp (fmt_str, target_percent_s) == 0)
13551 {
13552 if (!arg || !validate_arg (arg, POINTER_TYPE))
13553 return NULL_TREE;
13554 if (fn_fputs)
13555 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13556 }
13557
13558 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13559 else if (strcmp (fmt_str, target_percent_c) == 0)
13560 {
13561 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13562 return NULL_TREE;
13563 if (fn_fputc)
13564 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13565 }
13566
13567 if (!call)
13568 return NULL_TREE;
13569 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13570 }
13571
13572 /* Initialize format string characters in the target charset. */
13573
13574 static bool
13575 init_target_chars (void)
13576 {
13577 static bool init;
13578 if (!init)
13579 {
13580 target_newline = lang_hooks.to_target_charset ('\n');
13581 target_percent = lang_hooks.to_target_charset ('%');
13582 target_c = lang_hooks.to_target_charset ('c');
13583 target_s = lang_hooks.to_target_charset ('s');
13584 if (target_newline == 0 || target_percent == 0 || target_c == 0
13585 || target_s == 0)
13586 return false;
13587
13588 target_percent_c[0] = target_percent;
13589 target_percent_c[1] = target_c;
13590 target_percent_c[2] = '\0';
13591
13592 target_percent_s[0] = target_percent;
13593 target_percent_s[1] = target_s;
13594 target_percent_s[2] = '\0';
13595
13596 target_percent_s_newline[0] = target_percent;
13597 target_percent_s_newline[1] = target_s;
13598 target_percent_s_newline[2] = target_newline;
13599 target_percent_s_newline[3] = '\0';
13600
13601 init = true;
13602 }
13603 return true;
13604 }
13605
13606 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13607 and no overflow/underflow occurred. INEXACT is true if M was not
13608 exactly calculated. TYPE is the tree type for the result. This
13609 function assumes that you cleared the MPFR flags and then
13610 calculated M to see if anything subsequently set a flag prior to
13611 entering this function. Return NULL_TREE if any checks fail. */
13612
13613 static tree
13614 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13615 {
13616 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13617 overflow/underflow occurred. If -frounding-math, proceed iff the
13618 result of calling FUNC was exact. */
13619 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13620 && (!flag_rounding_math || !inexact))
13621 {
13622 REAL_VALUE_TYPE rr;
13623
13624 real_from_mpfr (&rr, m, type, GMP_RNDN);
13625 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13626 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13627 but the mpft_t is not, then we underflowed in the
13628 conversion. */
13629 if (real_isfinite (&rr)
13630 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13631 {
13632 REAL_VALUE_TYPE rmode;
13633
13634 real_convert (&rmode, TYPE_MODE (type), &rr);
13635 /* Proceed iff the specified mode can hold the value. */
13636 if (real_identical (&rmode, &rr))
13637 return build_real (type, rmode);
13638 }
13639 }
13640 return NULL_TREE;
13641 }
13642
13643 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13644 number and no overflow/underflow occurred. INEXACT is true if M
13645 was not exactly calculated. TYPE is the tree type for the result.
13646 This function assumes that you cleared the MPFR flags and then
13647 calculated M to see if anything subsequently set a flag prior to
13648 entering this function. Return NULL_TREE if any checks fail, if
13649 FORCE_CONVERT is true, then bypass the checks. */
13650
13651 static tree
13652 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13653 {
13654 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13655 overflow/underflow occurred. If -frounding-math, proceed iff the
13656 result of calling FUNC was exact. */
13657 if (force_convert
13658 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13659 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13660 && (!flag_rounding_math || !inexact)))
13661 {
13662 REAL_VALUE_TYPE re, im;
13663
13664 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13665 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13666 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13667 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13668 but the mpft_t is not, then we underflowed in the
13669 conversion. */
13670 if (force_convert
13671 || (real_isfinite (&re) && real_isfinite (&im)
13672 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13673 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13674 {
13675 REAL_VALUE_TYPE re_mode, im_mode;
13676
13677 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13678 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13679 /* Proceed iff the specified mode can hold the value. */
13680 if (force_convert
13681 || (real_identical (&re_mode, &re)
13682 && real_identical (&im_mode, &im)))
13683 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13684 build_real (TREE_TYPE (type), im_mode));
13685 }
13686 }
13687 return NULL_TREE;
13688 }
13689
13690 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13691 FUNC on it and return the resulting value as a tree with type TYPE.
13692 If MIN and/or MAX are not NULL, then the supplied ARG must be
13693 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13694 acceptable values, otherwise they are not. The mpfr precision is
13695 set to the precision of TYPE. We assume that function FUNC returns
13696 zero if the result could be calculated exactly within the requested
13697 precision. */
13698
13699 static tree
13700 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13701 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13702 bool inclusive)
13703 {
13704 tree result = NULL_TREE;
13705
13706 STRIP_NOPS (arg);
13707
13708 /* To proceed, MPFR must exactly represent the target floating point
13709 format, which only happens when the target base equals two. */
13710 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13711 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13712 {
13713 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13714
13715 if (real_isfinite (ra)
13716 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13717 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13718 {
13719 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13720 const int prec = fmt->p;
13721 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13722 int inexact;
13723 mpfr_t m;
13724
13725 mpfr_init2 (m, prec);
13726 mpfr_from_real (m, ra, GMP_RNDN);
13727 mpfr_clear_flags ();
13728 inexact = func (m, m, rnd);
13729 result = do_mpfr_ckconv (m, type, inexact);
13730 mpfr_clear (m);
13731 }
13732 }
13733
13734 return result;
13735 }
13736
13737 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13738 FUNC on it and return the resulting value as a tree with type TYPE.
13739 The mpfr precision is set to the precision of TYPE. We assume that
13740 function FUNC returns zero if the result could be calculated
13741 exactly within the requested precision. */
13742
13743 static tree
13744 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13745 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13746 {
13747 tree result = NULL_TREE;
13748
13749 STRIP_NOPS (arg1);
13750 STRIP_NOPS (arg2);
13751
13752 /* To proceed, MPFR must exactly represent the target floating point
13753 format, which only happens when the target base equals two. */
13754 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13755 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13756 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13757 {
13758 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13759 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13760
13761 if (real_isfinite (ra1) && real_isfinite (ra2))
13762 {
13763 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13764 const int prec = fmt->p;
13765 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13766 int inexact;
13767 mpfr_t m1, m2;
13768
13769 mpfr_inits2 (prec, m1, m2, NULL);
13770 mpfr_from_real (m1, ra1, GMP_RNDN);
13771 mpfr_from_real (m2, ra2, GMP_RNDN);
13772 mpfr_clear_flags ();
13773 inexact = func (m1, m1, m2, rnd);
13774 result = do_mpfr_ckconv (m1, type, inexact);
13775 mpfr_clears (m1, m2, NULL);
13776 }
13777 }
13778
13779 return result;
13780 }
13781
13782 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13783 FUNC on it and return the resulting value as a tree with type TYPE.
13784 The mpfr precision is set to the precision of TYPE. We assume that
13785 function FUNC returns zero if the result could be calculated
13786 exactly within the requested precision. */
13787
13788 static tree
13789 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13790 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13791 {
13792 tree result = NULL_TREE;
13793
13794 STRIP_NOPS (arg1);
13795 STRIP_NOPS (arg2);
13796 STRIP_NOPS (arg3);
13797
13798 /* To proceed, MPFR must exactly represent the target floating point
13799 format, which only happens when the target base equals two. */
13800 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13801 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13802 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13803 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13804 {
13805 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13806 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13807 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13808
13809 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13810 {
13811 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13812 const int prec = fmt->p;
13813 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13814 int inexact;
13815 mpfr_t m1, m2, m3;
13816
13817 mpfr_inits2 (prec, m1, m2, m3, NULL);
13818 mpfr_from_real (m1, ra1, GMP_RNDN);
13819 mpfr_from_real (m2, ra2, GMP_RNDN);
13820 mpfr_from_real (m3, ra3, GMP_RNDN);
13821 mpfr_clear_flags ();
13822 inexact = func (m1, m1, m2, m3, rnd);
13823 result = do_mpfr_ckconv (m1, type, inexact);
13824 mpfr_clears (m1, m2, m3, NULL);
13825 }
13826 }
13827
13828 return result;
13829 }
13830
13831 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13832 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13833 If ARG_SINP and ARG_COSP are NULL then the result is returned
13834 as a complex value.
13835 The type is taken from the type of ARG and is used for setting the
13836 precision of the calculation and results. */
13837
13838 static tree
13839 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13840 {
13841 tree const type = TREE_TYPE (arg);
13842 tree result = NULL_TREE;
13843
13844 STRIP_NOPS (arg);
13845
13846 /* To proceed, MPFR must exactly represent the target floating point
13847 format, which only happens when the target base equals two. */
13848 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13849 && TREE_CODE (arg) == REAL_CST
13850 && !TREE_OVERFLOW (arg))
13851 {
13852 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13853
13854 if (real_isfinite (ra))
13855 {
13856 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13857 const int prec = fmt->p;
13858 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13859 tree result_s, result_c;
13860 int inexact;
13861 mpfr_t m, ms, mc;
13862
13863 mpfr_inits2 (prec, m, ms, mc, NULL);
13864 mpfr_from_real (m, ra, GMP_RNDN);
13865 mpfr_clear_flags ();
13866 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13867 result_s = do_mpfr_ckconv (ms, type, inexact);
13868 result_c = do_mpfr_ckconv (mc, type, inexact);
13869 mpfr_clears (m, ms, mc, NULL);
13870 if (result_s && result_c)
13871 {
13872 /* If we are to return in a complex value do so. */
13873 if (!arg_sinp && !arg_cosp)
13874 return build_complex (build_complex_type (type),
13875 result_c, result_s);
13876
13877 /* Dereference the sin/cos pointer arguments. */
13878 arg_sinp = build_fold_indirect_ref (arg_sinp);
13879 arg_cosp = build_fold_indirect_ref (arg_cosp);
13880 /* Proceed if valid pointer type were passed in. */
13881 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13882 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13883 {
13884 /* Set the values. */
13885 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13886 result_s);
13887 TREE_SIDE_EFFECTS (result_s) = 1;
13888 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13889 result_c);
13890 TREE_SIDE_EFFECTS (result_c) = 1;
13891 /* Combine the assignments into a compound expr. */
13892 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13893 result_s, result_c));
13894 }
13895 }
13896 }
13897 }
13898 return result;
13899 }
13900
13901 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13902 two-argument mpfr order N Bessel function FUNC on them and return
13903 the resulting value as a tree with type TYPE. The mpfr precision
13904 is set to the precision of TYPE. We assume that function FUNC
13905 returns zero if the result could be calculated exactly within the
13906 requested precision. */
13907 static tree
13908 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13909 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13910 const REAL_VALUE_TYPE *min, bool inclusive)
13911 {
13912 tree result = NULL_TREE;
13913
13914 STRIP_NOPS (arg1);
13915 STRIP_NOPS (arg2);
13916
13917 /* To proceed, MPFR must exactly represent the target floating point
13918 format, which only happens when the target base equals two. */
13919 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13920 && host_integerp (arg1, 0)
13921 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13922 {
13923 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13924 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13925
13926 if (n == (long)n
13927 && real_isfinite (ra)
13928 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13929 {
13930 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13931 const int prec = fmt->p;
13932 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13933 int inexact;
13934 mpfr_t m;
13935
13936 mpfr_init2 (m, prec);
13937 mpfr_from_real (m, ra, GMP_RNDN);
13938 mpfr_clear_flags ();
13939 inexact = func (m, n, m, rnd);
13940 result = do_mpfr_ckconv (m, type, inexact);
13941 mpfr_clear (m);
13942 }
13943 }
13944
13945 return result;
13946 }
13947
13948 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13949 the pointer *(ARG_QUO) and return the result. The type is taken
13950 from the type of ARG0 and is used for setting the precision of the
13951 calculation and results. */
13952
13953 static tree
13954 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13955 {
13956 tree const type = TREE_TYPE (arg0);
13957 tree result = NULL_TREE;
13958
13959 STRIP_NOPS (arg0);
13960 STRIP_NOPS (arg1);
13961
13962 /* To proceed, MPFR must exactly represent the target floating point
13963 format, which only happens when the target base equals two. */
13964 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13965 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13966 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13967 {
13968 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13969 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13970
13971 if (real_isfinite (ra0) && real_isfinite (ra1))
13972 {
13973 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13974 const int prec = fmt->p;
13975 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13976 tree result_rem;
13977 long integer_quo;
13978 mpfr_t m0, m1;
13979
13980 mpfr_inits2 (prec, m0, m1, NULL);
13981 mpfr_from_real (m0, ra0, GMP_RNDN);
13982 mpfr_from_real (m1, ra1, GMP_RNDN);
13983 mpfr_clear_flags ();
13984 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13985 /* Remquo is independent of the rounding mode, so pass
13986 inexact=0 to do_mpfr_ckconv(). */
13987 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13988 mpfr_clears (m0, m1, NULL);
13989 if (result_rem)
13990 {
13991 /* MPFR calculates quo in the host's long so it may
13992 return more bits in quo than the target int can hold
13993 if sizeof(host long) > sizeof(target int). This can
13994 happen even for native compilers in LP64 mode. In
13995 these cases, modulo the quo value with the largest
13996 number that the target int can hold while leaving one
13997 bit for the sign. */
13998 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13999 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
14000
14001 /* Dereference the quo pointer argument. */
14002 arg_quo = build_fold_indirect_ref (arg_quo);
14003 /* Proceed iff a valid pointer type was passed in. */
14004 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
14005 {
14006 /* Set the value. */
14007 tree result_quo
14008 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
14009 build_int_cst (TREE_TYPE (arg_quo),
14010 integer_quo));
14011 TREE_SIDE_EFFECTS (result_quo) = 1;
14012 /* Combine the quo assignment with the rem. */
14013 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
14014 result_quo, result_rem));
14015 }
14016 }
14017 }
14018 }
14019 return result;
14020 }
14021
14022 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
14023 resulting value as a tree with type TYPE. The mpfr precision is
14024 set to the precision of TYPE. We assume that this mpfr function
14025 returns zero if the result could be calculated exactly within the
14026 requested precision. In addition, the integer pointer represented
14027 by ARG_SG will be dereferenced and set to the appropriate signgam
14028 (-1,1) value. */
14029
14030 static tree
14031 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
14032 {
14033 tree result = NULL_TREE;
14034
14035 STRIP_NOPS (arg);
14036
14037 /* To proceed, MPFR must exactly represent the target floating point
14038 format, which only happens when the target base equals two. Also
14039 verify ARG is a constant and that ARG_SG is an int pointer. */
14040 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
14041 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
14042 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
14043 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
14044 {
14045 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
14046
14047 /* In addition to NaN and Inf, the argument cannot be zero or a
14048 negative integer. */
14049 if (real_isfinite (ra)
14050 && ra->cl != rvc_zero
14051 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
14052 {
14053 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
14054 const int prec = fmt->p;
14055 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
14056 int inexact, sg;
14057 mpfr_t m;
14058 tree result_lg;
14059
14060 mpfr_init2 (m, prec);
14061 mpfr_from_real (m, ra, GMP_RNDN);
14062 mpfr_clear_flags ();
14063 inexact = mpfr_lgamma (m, &sg, m, rnd);
14064 result_lg = do_mpfr_ckconv (m, type, inexact);
14065 mpfr_clear (m);
14066 if (result_lg)
14067 {
14068 tree result_sg;
14069
14070 /* Dereference the arg_sg pointer argument. */
14071 arg_sg = build_fold_indirect_ref (arg_sg);
14072 /* Assign the signgam value into *arg_sg. */
14073 result_sg = fold_build2 (MODIFY_EXPR,
14074 TREE_TYPE (arg_sg), arg_sg,
14075 build_int_cst (TREE_TYPE (arg_sg), sg));
14076 TREE_SIDE_EFFECTS (result_sg) = 1;
14077 /* Combine the signgam assignment with the lgamma result. */
14078 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
14079 result_sg, result_lg));
14080 }
14081 }
14082 }
14083
14084 return result;
14085 }
14086
14087 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
14088 function FUNC on it and return the resulting value as a tree with
14089 type TYPE. The mpfr precision is set to the precision of TYPE. We
14090 assume that function FUNC returns zero if the result could be
14091 calculated exactly within the requested precision. */
14092
14093 static tree
14094 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
14095 {
14096 tree result = NULL_TREE;
14097
14098 STRIP_NOPS (arg);
14099
14100 /* To proceed, MPFR must exactly represent the target floating point
14101 format, which only happens when the target base equals two. */
14102 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
14103 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
14104 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
14105 {
14106 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
14107 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
14108
14109 if (real_isfinite (re) && real_isfinite (im))
14110 {
14111 const struct real_format *const fmt =
14112 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14113 const int prec = fmt->p;
14114 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14115 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14116 int inexact;
14117 mpc_t m;
14118
14119 mpc_init2 (m, prec);
14120 mpfr_from_real (mpc_realref(m), re, rnd);
14121 mpfr_from_real (mpc_imagref(m), im, rnd);
14122 mpfr_clear_flags ();
14123 inexact = func (m, m, crnd);
14124 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
14125 mpc_clear (m);
14126 }
14127 }
14128
14129 return result;
14130 }
14131
14132 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
14133 mpc function FUNC on it and return the resulting value as a tree
14134 with type TYPE. The mpfr precision is set to the precision of
14135 TYPE. We assume that function FUNC returns zero if the result
14136 could be calculated exactly within the requested precision. If
14137 DO_NONFINITE is true, then fold expressions containing Inf or NaN
14138 in the arguments and/or results. */
14139
14140 tree
14141 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
14142 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
14143 {
14144 tree result = NULL_TREE;
14145
14146 STRIP_NOPS (arg0);
14147 STRIP_NOPS (arg1);
14148
14149 /* To proceed, MPFR must exactly represent the target floating point
14150 format, which only happens when the target base equals two. */
14151 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
14152 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
14153 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
14154 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
14155 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
14156 {
14157 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
14158 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
14159 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
14160 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
14161
14162 if (do_nonfinite
14163 || (real_isfinite (re0) && real_isfinite (im0)
14164 && real_isfinite (re1) && real_isfinite (im1)))
14165 {
14166 const struct real_format *const fmt =
14167 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14168 const int prec = fmt->p;
14169 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14170 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14171 int inexact;
14172 mpc_t m0, m1;
14173
14174 mpc_init2 (m0, prec);
14175 mpc_init2 (m1, prec);
14176 mpfr_from_real (mpc_realref(m0), re0, rnd);
14177 mpfr_from_real (mpc_imagref(m0), im0, rnd);
14178 mpfr_from_real (mpc_realref(m1), re1, rnd);
14179 mpfr_from_real (mpc_imagref(m1), im1, rnd);
14180 mpfr_clear_flags ();
14181 inexact = func (m0, m0, m1, crnd);
14182 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
14183 mpc_clear (m0);
14184 mpc_clear (m1);
14185 }
14186 }
14187
14188 return result;
14189 }
14190
14191 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14192 a normal call should be emitted rather than expanding the function
14193 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14194
14195 static tree
14196 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
14197 {
14198 int nargs = gimple_call_num_args (stmt);
14199
14200 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
14201 (nargs > 0
14202 ? gimple_call_arg_ptr (stmt, 0)
14203 : &error_mark_node), fcode);
14204 }
14205
14206 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14207 a normal call should be emitted rather than expanding the function
14208 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14209 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14210 passed as second argument. */
14211
14212 tree
14213 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14214 enum built_in_function fcode)
14215 {
14216 int nargs = gimple_call_num_args (stmt);
14217
14218 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
14219 (nargs > 0
14220 ? gimple_call_arg_ptr (stmt, 0)
14221 : &error_mark_node), maxlen, fcode);
14222 }
14223
14224 /* Builtins with folding operations that operate on "..." arguments
14225 need special handling; we need to store the arguments in a convenient
14226 data structure before attempting any folding. Fortunately there are
14227 only a few builtins that fall into this category. FNDECL is the
14228 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14229 result of the function call is ignored. */
14230
14231 static tree
14232 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14233 bool ignore ATTRIBUTE_UNUSED)
14234 {
14235 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14236 tree ret = NULL_TREE;
14237
14238 switch (fcode)
14239 {
14240 case BUILT_IN_SPRINTF_CHK:
14241 case BUILT_IN_VSPRINTF_CHK:
14242 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14243 break;
14244
14245 case BUILT_IN_SNPRINTF_CHK:
14246 case BUILT_IN_VSNPRINTF_CHK:
14247 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14248
14249 default:
14250 break;
14251 }
14252 if (ret)
14253 {
14254 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14255 TREE_NO_WARNING (ret) = 1;
14256 return ret;
14257 }
14258 return NULL_TREE;
14259 }
14260
14261 /* A wrapper function for builtin folding that prevents warnings for
14262 "statement without effect" and the like, caused by removing the
14263 call node earlier than the warning is generated. */
14264
14265 tree
14266 fold_call_stmt (gimple stmt, bool ignore)
14267 {
14268 tree ret = NULL_TREE;
14269 tree fndecl = gimple_call_fndecl (stmt);
14270 location_t loc = gimple_location (stmt);
14271 if (fndecl
14272 && TREE_CODE (fndecl) == FUNCTION_DECL
14273 && DECL_BUILT_IN (fndecl)
14274 && !gimple_call_va_arg_pack_p (stmt))
14275 {
14276 int nargs = gimple_call_num_args (stmt);
14277 tree *args = (nargs > 0
14278 ? gimple_call_arg_ptr (stmt, 0)
14279 : &error_mark_node);
14280
14281 if (avoid_folding_inline_builtin (fndecl))
14282 return NULL_TREE;
14283 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14284 {
14285 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14286 }
14287 else
14288 {
14289 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14290 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14291 if (!ret)
14292 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14293 if (ret)
14294 {
14295 /* Propagate location information from original call to
14296 expansion of builtin. Otherwise things like
14297 maybe_emit_chk_warning, that operate on the expansion
14298 of a builtin, will use the wrong location information. */
14299 if (gimple_has_location (stmt))
14300 {
14301 tree realret = ret;
14302 if (TREE_CODE (ret) == NOP_EXPR)
14303 realret = TREE_OPERAND (ret, 0);
14304 if (CAN_HAVE_LOCATION_P (realret)
14305 && !EXPR_HAS_LOCATION (realret))
14306 SET_EXPR_LOCATION (realret, loc);
14307 return realret;
14308 }
14309 return ret;
14310 }
14311 }
14312 }
14313 return NULL_TREE;
14314 }
14315
14316 /* Look up the function in builtin_decl that corresponds to DECL
14317 and set ASMSPEC as its user assembler name. DECL must be a
14318 function decl that declares a builtin. */
14319
14320 void
14321 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14322 {
14323 tree builtin;
14324 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
14325 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
14326 && asmspec != 0);
14327
14328 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14329 set_user_assembler_name (builtin, asmspec);
14330 switch (DECL_FUNCTION_CODE (decl))
14331 {
14332 case BUILT_IN_MEMCPY:
14333 init_block_move_fn (asmspec);
14334 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
14335 break;
14336 case BUILT_IN_MEMSET:
14337 init_block_clear_fn (asmspec);
14338 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
14339 break;
14340 case BUILT_IN_MEMMOVE:
14341 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
14342 break;
14343 case BUILT_IN_MEMCMP:
14344 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
14345 break;
14346 case BUILT_IN_ABORT:
14347 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
14348 break;
14349 case BUILT_IN_FFS:
14350 if (INT_TYPE_SIZE < BITS_PER_WORD)
14351 {
14352 set_user_assembler_libfunc ("ffs", asmspec);
14353 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
14354 MODE_INT, 0), "ffs");
14355 }
14356 break;
14357 default:
14358 break;
14359 }
14360 }
14361
14362 /* Return true if DECL is a builtin that expands to a constant or similarly
14363 simple code. */
14364 bool
14365 is_simple_builtin (tree decl)
14366 {
14367 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14368 switch (DECL_FUNCTION_CODE (decl))
14369 {
14370 /* Builtins that expand to constants. */
14371 case BUILT_IN_CONSTANT_P:
14372 case BUILT_IN_EXPECT:
14373 case BUILT_IN_OBJECT_SIZE:
14374 case BUILT_IN_UNREACHABLE:
14375 /* Simple register moves or loads from stack. */
14376 case BUILT_IN_ASSUME_ALIGNED:
14377 case BUILT_IN_RETURN_ADDRESS:
14378 case BUILT_IN_EXTRACT_RETURN_ADDR:
14379 case BUILT_IN_FROB_RETURN_ADDR:
14380 case BUILT_IN_RETURN:
14381 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14382 case BUILT_IN_FRAME_ADDRESS:
14383 case BUILT_IN_VA_END:
14384 case BUILT_IN_STACK_SAVE:
14385 case BUILT_IN_STACK_RESTORE:
14386 /* Exception state returns or moves registers around. */
14387 case BUILT_IN_EH_FILTER:
14388 case BUILT_IN_EH_POINTER:
14389 case BUILT_IN_EH_COPY_VALUES:
14390 return true;
14391
14392 default:
14393 return false;
14394 }
14395
14396 return false;
14397 }
14398
14399 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14400 most probably expanded inline into reasonably simple code. This is a
14401 superset of is_simple_builtin. */
14402 bool
14403 is_inexpensive_builtin (tree decl)
14404 {
14405 if (!decl)
14406 return false;
14407 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14408 return true;
14409 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14410 switch (DECL_FUNCTION_CODE (decl))
14411 {
14412 case BUILT_IN_ABS:
14413 case BUILT_IN_ALLOCA:
14414 case BUILT_IN_ALLOCA_WITH_ALIGN:
14415 case BUILT_IN_BSWAP16:
14416 case BUILT_IN_BSWAP32:
14417 case BUILT_IN_BSWAP64:
14418 case BUILT_IN_CLZ:
14419 case BUILT_IN_CLZIMAX:
14420 case BUILT_IN_CLZL:
14421 case BUILT_IN_CLZLL:
14422 case BUILT_IN_CTZ:
14423 case BUILT_IN_CTZIMAX:
14424 case BUILT_IN_CTZL:
14425 case BUILT_IN_CTZLL:
14426 case BUILT_IN_FFS:
14427 case BUILT_IN_FFSIMAX:
14428 case BUILT_IN_FFSL:
14429 case BUILT_IN_FFSLL:
14430 case BUILT_IN_IMAXABS:
14431 case BUILT_IN_FINITE:
14432 case BUILT_IN_FINITEF:
14433 case BUILT_IN_FINITEL:
14434 case BUILT_IN_FINITED32:
14435 case BUILT_IN_FINITED64:
14436 case BUILT_IN_FINITED128:
14437 case BUILT_IN_FPCLASSIFY:
14438 case BUILT_IN_ISFINITE:
14439 case BUILT_IN_ISINF_SIGN:
14440 case BUILT_IN_ISINF:
14441 case BUILT_IN_ISINFF:
14442 case BUILT_IN_ISINFL:
14443 case BUILT_IN_ISINFD32:
14444 case BUILT_IN_ISINFD64:
14445 case BUILT_IN_ISINFD128:
14446 case BUILT_IN_ISNAN:
14447 case BUILT_IN_ISNANF:
14448 case BUILT_IN_ISNANL:
14449 case BUILT_IN_ISNAND32:
14450 case BUILT_IN_ISNAND64:
14451 case BUILT_IN_ISNAND128:
14452 case BUILT_IN_ISNORMAL:
14453 case BUILT_IN_ISGREATER:
14454 case BUILT_IN_ISGREATEREQUAL:
14455 case BUILT_IN_ISLESS:
14456 case BUILT_IN_ISLESSEQUAL:
14457 case BUILT_IN_ISLESSGREATER:
14458 case BUILT_IN_ISUNORDERED:
14459 case BUILT_IN_VA_ARG_PACK:
14460 case BUILT_IN_VA_ARG_PACK_LEN:
14461 case BUILT_IN_VA_COPY:
14462 case BUILT_IN_TRAP:
14463 case BUILT_IN_SAVEREGS:
14464 case BUILT_IN_POPCOUNTL:
14465 case BUILT_IN_POPCOUNTLL:
14466 case BUILT_IN_POPCOUNTIMAX:
14467 case BUILT_IN_POPCOUNT:
14468 case BUILT_IN_PARITYL:
14469 case BUILT_IN_PARITYLL:
14470 case BUILT_IN_PARITYIMAX:
14471 case BUILT_IN_PARITY:
14472 case BUILT_IN_LABS:
14473 case BUILT_IN_LLABS:
14474 case BUILT_IN_PREFETCH:
14475 return true;
14476
14477 default:
14478 return is_simple_builtin (decl);
14479 }
14480
14481 return false;
14482 }