Makefile.def (target_modules): Remove libmudflap
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "realmpfr.h"
28 #include "gimple.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "expr.h"
36 #include "optabs.h"
37 #include "libfuncs.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "predict.h"
42 #include "tm_p.h"
43 #include "target.h"
44 #include "langhooks.h"
45 #include "basic-block.h"
46 #include "tree-ssanames.h"
47 #include "tree-dfa.h"
48 #include "value-prof.h"
49 #include "diagnostic-core.h"
50 #include "builtins.h"
51 #include "ubsan.h"
52
53
54 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
55
56 struct target_builtins default_target_builtins;
57 #if SWITCHABLE_TARGET
58 struct target_builtins *this_target_builtins = &default_target_builtins;
59 #endif
60
61 /* Define the names of the builtin function types and codes. */
62 const char *const built_in_class_names[BUILT_IN_LAST]
63 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
64
65 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
66 const char * built_in_names[(int) END_BUILTINS] =
67 {
68 #include "builtins.def"
69 };
70 #undef DEF_BUILTIN
71
72 /* Setup an array of _DECL trees, make sure each element is
73 initialized to NULL_TREE. */
74 builtin_info_type builtin_info;
75
76 /* Non-zero if __builtin_constant_p should be folded right away. */
77 bool force_folding_builtin_constant_p;
78
79 static const char *c_getstr (tree);
80 static rtx c_readstr (const char *, enum machine_mode);
81 static int target_char_cast (tree, char *);
82 static rtx get_memory_rtx (tree, tree);
83 static int apply_args_size (void);
84 static int apply_result_size (void);
85 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
86 static rtx result_vector (int, rtx);
87 #endif
88 static void expand_builtin_update_setjmp_buf (rtx);
89 static void expand_builtin_prefetch (tree);
90 static rtx expand_builtin_apply_args (void);
91 static rtx expand_builtin_apply_args_1 (void);
92 static rtx expand_builtin_apply (rtx, rtx, rtx);
93 static void expand_builtin_return (rtx);
94 static enum type_class type_to_class (tree);
95 static rtx expand_builtin_classify_type (tree);
96 static void expand_errno_check (tree, rtx);
97 static rtx expand_builtin_mathfn (tree, rtx, rtx);
98 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
99 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
100 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
101 static rtx expand_builtin_interclass_mathfn (tree, rtx);
102 static rtx expand_builtin_sincos (tree);
103 static rtx expand_builtin_cexpi (tree, rtx);
104 static rtx expand_builtin_int_roundingfn (tree, rtx);
105 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
106 static rtx expand_builtin_next_arg (void);
107 static rtx expand_builtin_va_start (tree);
108 static rtx expand_builtin_va_end (tree);
109 static rtx expand_builtin_va_copy (tree);
110 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strcmp (tree, rtx);
112 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
113 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
114 static rtx expand_builtin_memcpy (tree, rtx);
115 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
117 enum machine_mode, int);
118 static rtx expand_builtin_strcpy (tree, rtx);
119 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
120 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_strncpy (tree, rtx);
122 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
123 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
125 static rtx expand_builtin_bzero (tree);
126 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
127 static rtx expand_builtin_alloca (tree, bool);
128 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
129 static rtx expand_builtin_frame_address (tree, tree);
130 static tree stabilize_va_list_loc (location_t, tree, int);
131 static rtx expand_builtin_expect (tree, rtx);
132 static tree fold_builtin_constant_p (tree);
133 static tree fold_builtin_expect (location_t, tree, tree);
134 static tree fold_builtin_classify_type (tree);
135 static tree fold_builtin_strlen (location_t, tree, tree);
136 static tree fold_builtin_inf (location_t, tree, int);
137 static tree fold_builtin_nan (tree, tree, int);
138 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
139 static bool validate_arg (const_tree, enum tree_code code);
140 static bool integer_valued_real_p (tree);
141 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
142 static bool readonly_data_expr (tree);
143 static rtx expand_builtin_fabs (tree, rtx, rtx);
144 static rtx expand_builtin_signbit (tree, rtx);
145 static tree fold_builtin_sqrt (location_t, tree, tree);
146 static tree fold_builtin_cbrt (location_t, tree, tree);
147 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
148 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
149 static tree fold_builtin_cos (location_t, tree, tree, tree);
150 static tree fold_builtin_cosh (location_t, tree, tree, tree);
151 static tree fold_builtin_tan (tree, tree);
152 static tree fold_builtin_trunc (location_t, tree, tree);
153 static tree fold_builtin_floor (location_t, tree, tree);
154 static tree fold_builtin_ceil (location_t, tree, tree);
155 static tree fold_builtin_round (location_t, tree, tree);
156 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
157 static tree fold_builtin_bitop (tree, tree);
158 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
159 static tree fold_builtin_strchr (location_t, tree, tree, tree);
160 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
161 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
162 static tree fold_builtin_strcmp (location_t, tree, tree);
163 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
164 static tree fold_builtin_signbit (location_t, tree, tree);
165 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
166 static tree fold_builtin_isascii (location_t, tree);
167 static tree fold_builtin_toascii (location_t, tree);
168 static tree fold_builtin_isdigit (location_t, tree);
169 static tree fold_builtin_fabs (location_t, tree, tree);
170 static tree fold_builtin_abs (location_t, tree, tree);
171 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
172 enum tree_code);
173 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
174 static tree fold_builtin_0 (location_t, tree, bool);
175 static tree fold_builtin_1 (location_t, tree, tree, bool);
176 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
177 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
178 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
179 static tree fold_builtin_varargs (location_t, tree, tree, bool);
180
181 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
182 static tree fold_builtin_strstr (location_t, tree, tree, tree);
183 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
184 static tree fold_builtin_strcat (location_t, tree, tree);
185 static tree fold_builtin_strncat (location_t, tree, tree, tree);
186 static tree fold_builtin_strspn (location_t, tree, tree);
187 static tree fold_builtin_strcspn (location_t, tree, tree);
188 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
189 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
190
191 static rtx expand_builtin_object_size (tree);
192 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
193 enum built_in_function);
194 static void maybe_emit_chk_warning (tree, enum built_in_function);
195 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
196 static void maybe_emit_free_warning (tree);
197 static tree fold_builtin_object_size (tree, tree);
198 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
199 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
200 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
201 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
202 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
203 enum built_in_function);
204 static bool init_target_chars (void);
205
206 static unsigned HOST_WIDE_INT target_newline;
207 static unsigned HOST_WIDE_INT target_percent;
208 static unsigned HOST_WIDE_INT target_c;
209 static unsigned HOST_WIDE_INT target_s;
210 static char target_percent_c[3];
211 static char target_percent_s[3];
212 static char target_percent_s_newline[4];
213 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
214 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
215 static tree do_mpfr_arg2 (tree, tree, tree,
216 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
217 static tree do_mpfr_arg3 (tree, tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219 static tree do_mpfr_sincos (tree, tree, tree);
220 static tree do_mpfr_bessel_n (tree, tree, tree,
221 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
222 const REAL_VALUE_TYPE *, bool);
223 static tree do_mpfr_remquo (tree, tree, tree);
224 static tree do_mpfr_lgamma_r (tree, tree, tree);
225 static void expand_builtin_sync_synchronize (void);
226
227 /* Return true if NAME starts with __builtin_ or __sync_. */
228
229 static bool
230 is_builtin_name (const char *name)
231 {
232 if (strncmp (name, "__builtin_", 10) == 0)
233 return true;
234 if (strncmp (name, "__sync_", 7) == 0)
235 return true;
236 if (strncmp (name, "__atomic_", 9) == 0)
237 return true;
238 return false;
239 }
240
241
242 /* Return true if DECL is a function symbol representing a built-in. */
243
244 bool
245 is_builtin_fn (tree decl)
246 {
247 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
248 }
249
250 /* By default we assume that c99 functions are present at the runtime,
251 but sincos is not. */
252 bool
253 default_libc_has_function (enum function_class fn_class)
254 {
255 if (fn_class == function_c94
256 || fn_class == function_c99_misc
257 || fn_class == function_c99_math_complex)
258 return true;
259
260 return false;
261 }
262
263 bool
264 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
265 {
266 return true;
267 }
268
269 bool
270 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
271 {
272 return false;
273 }
274
275 /* Return true if NODE should be considered for inline expansion regardless
276 of the optimization level. This means whenever a function is invoked with
277 its "internal" name, which normally contains the prefix "__builtin". */
278
279 static bool
280 called_as_built_in (tree node)
281 {
282 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
283 we want the name used to call the function, not the name it
284 will have. */
285 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
286 return is_builtin_name (name);
287 }
288
289 /* Compute values M and N such that M divides (address of EXP - N) and such
290 that N < M. If these numbers can be determined, store M in alignp and N in
291 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
292 *alignp and any bit-offset to *bitposp.
293
294 Note that the address (and thus the alignment) computed here is based
295 on the address to which a symbol resolves, whereas DECL_ALIGN is based
296 on the address at which an object is actually located. These two
297 addresses are not always the same. For example, on ARM targets,
298 the address &foo of a Thumb function foo() has the lowest bit set,
299 whereas foo() itself starts on an even address.
300
301 If ADDR_P is true we are taking the address of the memory reference EXP
302 and thus cannot rely on the access taking place. */
303
304 static bool
305 get_object_alignment_2 (tree exp, unsigned int *alignp,
306 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
307 {
308 HOST_WIDE_INT bitsize, bitpos;
309 tree offset;
310 enum machine_mode mode;
311 int unsignedp, volatilep;
312 unsigned int inner, align = BITS_PER_UNIT;
313 bool known_alignment = false;
314
315 /* Get the innermost object and the constant (bitpos) and possibly
316 variable (offset) offset of the access. */
317 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
318 &mode, &unsignedp, &volatilep, true);
319
320 /* Extract alignment information from the innermost object and
321 possibly adjust bitpos and offset. */
322 if (TREE_CODE (exp) == FUNCTION_DECL)
323 {
324 /* Function addresses can encode extra information besides their
325 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
326 allows the low bit to be used as a virtual bit, we know
327 that the address itself must be at least 2-byte aligned. */
328 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
329 align = 2 * BITS_PER_UNIT;
330 }
331 else if (TREE_CODE (exp) == LABEL_DECL)
332 ;
333 else if (TREE_CODE (exp) == CONST_DECL)
334 {
335 /* The alignment of a CONST_DECL is determined by its initializer. */
336 exp = DECL_INITIAL (exp);
337 align = TYPE_ALIGN (TREE_TYPE (exp));
338 #ifdef CONSTANT_ALIGNMENT
339 if (CONSTANT_CLASS_P (exp))
340 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
341 #endif
342 known_alignment = true;
343 }
344 else if (DECL_P (exp))
345 {
346 align = DECL_ALIGN (exp);
347 known_alignment = true;
348 }
349 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
350 {
351 align = TYPE_ALIGN (TREE_TYPE (exp));
352 }
353 else if (TREE_CODE (exp) == INDIRECT_REF
354 || TREE_CODE (exp) == MEM_REF
355 || TREE_CODE (exp) == TARGET_MEM_REF)
356 {
357 tree addr = TREE_OPERAND (exp, 0);
358 unsigned ptr_align;
359 unsigned HOST_WIDE_INT ptr_bitpos;
360
361 if (TREE_CODE (addr) == BIT_AND_EXPR
362 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
363 {
364 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
365 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
366 align *= BITS_PER_UNIT;
367 addr = TREE_OPERAND (addr, 0);
368 }
369
370 known_alignment
371 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
372 align = MAX (ptr_align, align);
373
374 /* The alignment of the pointer operand in a TARGET_MEM_REF
375 has to take the variable offset parts into account. */
376 if (TREE_CODE (exp) == TARGET_MEM_REF)
377 {
378 if (TMR_INDEX (exp))
379 {
380 unsigned HOST_WIDE_INT step = 1;
381 if (TMR_STEP (exp))
382 step = TREE_INT_CST_LOW (TMR_STEP (exp));
383 align = MIN (align, (step & -step) * BITS_PER_UNIT);
384 }
385 if (TMR_INDEX2 (exp))
386 align = BITS_PER_UNIT;
387 known_alignment = false;
388 }
389
390 /* When EXP is an actual memory reference then we can use
391 TYPE_ALIGN of a pointer indirection to derive alignment.
392 Do so only if get_pointer_alignment_1 did not reveal absolute
393 alignment knowledge and if using that alignment would
394 improve the situation. */
395 if (!addr_p && !known_alignment
396 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
397 align = TYPE_ALIGN (TREE_TYPE (exp));
398 else
399 {
400 /* Else adjust bitpos accordingly. */
401 bitpos += ptr_bitpos;
402 if (TREE_CODE (exp) == MEM_REF
403 || TREE_CODE (exp) == TARGET_MEM_REF)
404 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
405 }
406 }
407 else if (TREE_CODE (exp) == STRING_CST)
408 {
409 /* STRING_CST are the only constant objects we allow to be not
410 wrapped inside a CONST_DECL. */
411 align = TYPE_ALIGN (TREE_TYPE (exp));
412 #ifdef CONSTANT_ALIGNMENT
413 if (CONSTANT_CLASS_P (exp))
414 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
415 #endif
416 known_alignment = true;
417 }
418
419 /* If there is a non-constant offset part extract the maximum
420 alignment that can prevail. */
421 inner = ~0U;
422 while (offset)
423 {
424 tree next_offset;
425
426 if (TREE_CODE (offset) == PLUS_EXPR)
427 {
428 next_offset = TREE_OPERAND (offset, 0);
429 offset = TREE_OPERAND (offset, 1);
430 }
431 else
432 next_offset = NULL;
433 if (host_integerp (offset, 1))
434 {
435 /* Any overflow in calculating offset_bits won't change
436 the alignment. */
437 unsigned offset_bits
438 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
439
440 if (offset_bits)
441 inner = MIN (inner, (offset_bits & -offset_bits));
442 }
443 else if (TREE_CODE (offset) == MULT_EXPR
444 && host_integerp (TREE_OPERAND (offset, 1), 1))
445 {
446 /* Any overflow in calculating offset_factor won't change
447 the alignment. */
448 unsigned offset_factor
449 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
450 * BITS_PER_UNIT);
451
452 if (offset_factor)
453 inner = MIN (inner, (offset_factor & -offset_factor));
454 }
455 else
456 {
457 inner = MIN (inner, BITS_PER_UNIT);
458 break;
459 }
460 offset = next_offset;
461 }
462 /* Alignment is innermost object alignment adjusted by the constant
463 and non-constant offset parts. */
464 align = MIN (align, inner);
465
466 *alignp = align;
467 *bitposp = bitpos & (*alignp - 1);
468 return known_alignment;
469 }
470
471 /* For a memory reference expression EXP compute values M and N such that M
472 divides (&EXP - N) and such that N < M. If these numbers can be determined,
473 store M in alignp and N in *BITPOSP and return true. Otherwise return false
474 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
475
476 bool
477 get_object_alignment_1 (tree exp, unsigned int *alignp,
478 unsigned HOST_WIDE_INT *bitposp)
479 {
480 return get_object_alignment_2 (exp, alignp, bitposp, false);
481 }
482
483 /* Return the alignment in bits of EXP, an object. */
484
485 unsigned int
486 get_object_alignment (tree exp)
487 {
488 unsigned HOST_WIDE_INT bitpos = 0;
489 unsigned int align;
490
491 get_object_alignment_1 (exp, &align, &bitpos);
492
493 /* align and bitpos now specify known low bits of the pointer.
494 ptr & (align - 1) == bitpos. */
495
496 if (bitpos != 0)
497 align = (bitpos & -bitpos);
498 return align;
499 }
500
501 /* For a pointer valued expression EXP compute values M and N such that M
502 divides (EXP - N) and such that N < M. If these numbers can be determined,
503 store M in alignp and N in *BITPOSP and return true. Return false if
504 the results are just a conservative approximation.
505
506 If EXP is not a pointer, false is returned too. */
507
508 bool
509 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
510 unsigned HOST_WIDE_INT *bitposp)
511 {
512 STRIP_NOPS (exp);
513
514 if (TREE_CODE (exp) == ADDR_EXPR)
515 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
516 alignp, bitposp, true);
517 else if (TREE_CODE (exp) == SSA_NAME
518 && POINTER_TYPE_P (TREE_TYPE (exp)))
519 {
520 unsigned int ptr_align, ptr_misalign;
521 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
522
523 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
524 {
525 *bitposp = ptr_misalign * BITS_PER_UNIT;
526 *alignp = ptr_align * BITS_PER_UNIT;
527 /* We cannot really tell whether this result is an approximation. */
528 return true;
529 }
530 else
531 {
532 *bitposp = 0;
533 *alignp = BITS_PER_UNIT;
534 return false;
535 }
536 }
537 else if (TREE_CODE (exp) == INTEGER_CST)
538 {
539 *alignp = BIGGEST_ALIGNMENT;
540 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
541 & (BIGGEST_ALIGNMENT - 1));
542 return true;
543 }
544
545 *bitposp = 0;
546 *alignp = BITS_PER_UNIT;
547 return false;
548 }
549
550 /* Return the alignment in bits of EXP, a pointer valued expression.
551 The alignment returned is, by default, the alignment of the thing that
552 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
553
554 Otherwise, look at the expression to see if we can do better, i.e., if the
555 expression is actually pointing at an object whose alignment is tighter. */
556
557 unsigned int
558 get_pointer_alignment (tree exp)
559 {
560 unsigned HOST_WIDE_INT bitpos = 0;
561 unsigned int align;
562
563 get_pointer_alignment_1 (exp, &align, &bitpos);
564
565 /* align and bitpos now specify known low bits of the pointer.
566 ptr & (align - 1) == bitpos. */
567
568 if (bitpos != 0)
569 align = (bitpos & -bitpos);
570
571 return align;
572 }
573
574 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
575 way, because it could contain a zero byte in the middle.
576 TREE_STRING_LENGTH is the size of the character array, not the string.
577
578 ONLY_VALUE should be nonzero if the result is not going to be emitted
579 into the instruction stream and zero if it is going to be expanded.
580 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
581 is returned, otherwise NULL, since
582 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
583 evaluate the side-effects.
584
585 The value returned is of type `ssizetype'.
586
587 Unfortunately, string_constant can't access the values of const char
588 arrays with initializers, so neither can we do so here. */
589
590 tree
591 c_strlen (tree src, int only_value)
592 {
593 tree offset_node;
594 HOST_WIDE_INT offset;
595 int max;
596 const char *ptr;
597 location_t loc;
598
599 STRIP_NOPS (src);
600 if (TREE_CODE (src) == COND_EXPR
601 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
602 {
603 tree len1, len2;
604
605 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
606 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
607 if (tree_int_cst_equal (len1, len2))
608 return len1;
609 }
610
611 if (TREE_CODE (src) == COMPOUND_EXPR
612 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
613 return c_strlen (TREE_OPERAND (src, 1), only_value);
614
615 loc = EXPR_LOC_OR_HERE (src);
616
617 src = string_constant (src, &offset_node);
618 if (src == 0)
619 return NULL_TREE;
620
621 max = TREE_STRING_LENGTH (src) - 1;
622 ptr = TREE_STRING_POINTER (src);
623
624 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
625 {
626 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
627 compute the offset to the following null if we don't know where to
628 start searching for it. */
629 int i;
630
631 for (i = 0; i < max; i++)
632 if (ptr[i] == 0)
633 return NULL_TREE;
634
635 /* We don't know the starting offset, but we do know that the string
636 has no internal zero bytes. We can assume that the offset falls
637 within the bounds of the string; otherwise, the programmer deserves
638 what he gets. Subtract the offset from the length of the string,
639 and return that. This would perhaps not be valid if we were dealing
640 with named arrays in addition to literal string constants. */
641
642 return size_diffop_loc (loc, size_int (max), offset_node);
643 }
644
645 /* We have a known offset into the string. Start searching there for
646 a null character if we can represent it as a single HOST_WIDE_INT. */
647 if (offset_node == 0)
648 offset = 0;
649 else if (! host_integerp (offset_node, 0))
650 offset = -1;
651 else
652 offset = tree_low_cst (offset_node, 0);
653
654 /* If the offset is known to be out of bounds, warn, and call strlen at
655 runtime. */
656 if (offset < 0 || offset > max)
657 {
658 /* Suppress multiple warnings for propagated constant strings. */
659 if (! TREE_NO_WARNING (src))
660 {
661 warning_at (loc, 0, "offset outside bounds of constant string");
662 TREE_NO_WARNING (src) = 1;
663 }
664 return NULL_TREE;
665 }
666
667 /* Use strlen to search for the first zero byte. Since any strings
668 constructed with build_string will have nulls appended, we win even
669 if we get handed something like (char[4])"abcd".
670
671 Since OFFSET is our starting index into the string, no further
672 calculation is needed. */
673 return ssize_int (strlen (ptr + offset));
674 }
675
676 /* Return a char pointer for a C string if it is a string constant
677 or sum of string constant and integer constant. */
678
679 static const char *
680 c_getstr (tree src)
681 {
682 tree offset_node;
683
684 src = string_constant (src, &offset_node);
685 if (src == 0)
686 return 0;
687
688 if (offset_node == 0)
689 return TREE_STRING_POINTER (src);
690 else if (!host_integerp (offset_node, 1)
691 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
692 return 0;
693
694 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
695 }
696
697 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
698 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
699
700 static rtx
701 c_readstr (const char *str, enum machine_mode mode)
702 {
703 HOST_WIDE_INT c[2];
704 HOST_WIDE_INT ch;
705 unsigned int i, j;
706
707 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
708
709 c[0] = 0;
710 c[1] = 0;
711 ch = 1;
712 for (i = 0; i < GET_MODE_SIZE (mode); i++)
713 {
714 j = i;
715 if (WORDS_BIG_ENDIAN)
716 j = GET_MODE_SIZE (mode) - i - 1;
717 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
718 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
719 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
720 j *= BITS_PER_UNIT;
721 gcc_assert (j < HOST_BITS_PER_DOUBLE_INT);
722
723 if (ch)
724 ch = (unsigned char) str[i];
725 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
726 }
727 return immed_double_const (c[0], c[1], mode);
728 }
729
730 /* Cast a target constant CST to target CHAR and if that value fits into
731 host char type, return zero and put that value into variable pointed to by
732 P. */
733
734 static int
735 target_char_cast (tree cst, char *p)
736 {
737 unsigned HOST_WIDE_INT val, hostval;
738
739 if (TREE_CODE (cst) != INTEGER_CST
740 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
741 return 1;
742
743 val = TREE_INT_CST_LOW (cst);
744 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
745 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
746
747 hostval = val;
748 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
749 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
750
751 if (val != hostval)
752 return 1;
753
754 *p = hostval;
755 return 0;
756 }
757
758 /* Similar to save_expr, but assumes that arbitrary code is not executed
759 in between the multiple evaluations. In particular, we assume that a
760 non-addressable local variable will not be modified. */
761
762 static tree
763 builtin_save_expr (tree exp)
764 {
765 if (TREE_CODE (exp) == SSA_NAME
766 || (TREE_ADDRESSABLE (exp) == 0
767 && (TREE_CODE (exp) == PARM_DECL
768 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
769 return exp;
770
771 return save_expr (exp);
772 }
773
774 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
775 times to get the address of either a higher stack frame, or a return
776 address located within it (depending on FNDECL_CODE). */
777
778 static rtx
779 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
780 {
781 int i;
782
783 #ifdef INITIAL_FRAME_ADDRESS_RTX
784 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
785 #else
786 rtx tem;
787
788 /* For a zero count with __builtin_return_address, we don't care what
789 frame address we return, because target-specific definitions will
790 override us. Therefore frame pointer elimination is OK, and using
791 the soft frame pointer is OK.
792
793 For a nonzero count, or a zero count with __builtin_frame_address,
794 we require a stable offset from the current frame pointer to the
795 previous one, so we must use the hard frame pointer, and
796 we must disable frame pointer elimination. */
797 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
798 tem = frame_pointer_rtx;
799 else
800 {
801 tem = hard_frame_pointer_rtx;
802
803 /* Tell reload not to eliminate the frame pointer. */
804 crtl->accesses_prior_frames = 1;
805 }
806 #endif
807
808 /* Some machines need special handling before we can access
809 arbitrary frames. For example, on the SPARC, we must first flush
810 all register windows to the stack. */
811 #ifdef SETUP_FRAME_ADDRESSES
812 if (count > 0)
813 SETUP_FRAME_ADDRESSES ();
814 #endif
815
816 /* On the SPARC, the return address is not in the frame, it is in a
817 register. There is no way to access it off of the current frame
818 pointer, but it can be accessed off the previous frame pointer by
819 reading the value from the register window save area. */
820 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
821 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
822 count--;
823 #endif
824
825 /* Scan back COUNT frames to the specified frame. */
826 for (i = 0; i < count; i++)
827 {
828 /* Assume the dynamic chain pointer is in the word that the
829 frame address points to, unless otherwise specified. */
830 #ifdef DYNAMIC_CHAIN_ADDRESS
831 tem = DYNAMIC_CHAIN_ADDRESS (tem);
832 #endif
833 tem = memory_address (Pmode, tem);
834 tem = gen_frame_mem (Pmode, tem);
835 tem = copy_to_reg (tem);
836 }
837
838 /* For __builtin_frame_address, return what we've got. But, on
839 the SPARC for example, we may have to add a bias. */
840 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
841 #ifdef FRAME_ADDR_RTX
842 return FRAME_ADDR_RTX (tem);
843 #else
844 return tem;
845 #endif
846
847 /* For __builtin_return_address, get the return address from that frame. */
848 #ifdef RETURN_ADDR_RTX
849 tem = RETURN_ADDR_RTX (count, tem);
850 #else
851 tem = memory_address (Pmode,
852 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
853 tem = gen_frame_mem (Pmode, tem);
854 #endif
855 return tem;
856 }
857
858 /* Alias set used for setjmp buffer. */
859 static alias_set_type setjmp_alias_set = -1;
860
861 /* Construct the leading half of a __builtin_setjmp call. Control will
862 return to RECEIVER_LABEL. This is also called directly by the SJLJ
863 exception handling code. */
864
865 void
866 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
867 {
868 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
869 rtx stack_save;
870 rtx mem;
871
872 if (setjmp_alias_set == -1)
873 setjmp_alias_set = new_alias_set ();
874
875 buf_addr = convert_memory_address (Pmode, buf_addr);
876
877 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
878
879 /* We store the frame pointer and the address of receiver_label in
880 the buffer and use the rest of it for the stack save area, which
881 is machine-dependent. */
882
883 mem = gen_rtx_MEM (Pmode, buf_addr);
884 set_mem_alias_set (mem, setjmp_alias_set);
885 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
886
887 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
888 GET_MODE_SIZE (Pmode))),
889 set_mem_alias_set (mem, setjmp_alias_set);
890
891 emit_move_insn (validize_mem (mem),
892 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
893
894 stack_save = gen_rtx_MEM (sa_mode,
895 plus_constant (Pmode, buf_addr,
896 2 * GET_MODE_SIZE (Pmode)));
897 set_mem_alias_set (stack_save, setjmp_alias_set);
898 emit_stack_save (SAVE_NONLOCAL, &stack_save);
899
900 /* If there is further processing to do, do it. */
901 #ifdef HAVE_builtin_setjmp_setup
902 if (HAVE_builtin_setjmp_setup)
903 emit_insn (gen_builtin_setjmp_setup (buf_addr));
904 #endif
905
906 /* We have a nonlocal label. */
907 cfun->has_nonlocal_label = 1;
908 }
909
910 /* Construct the trailing part of a __builtin_setjmp call. This is
911 also called directly by the SJLJ exception handling code.
912 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
913
914 void
915 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
916 {
917 rtx chain;
918
919 /* Mark the FP as used when we get here, so we have to make sure it's
920 marked as used by this function. */
921 emit_use (hard_frame_pointer_rtx);
922
923 /* Mark the static chain as clobbered here so life information
924 doesn't get messed up for it. */
925 chain = targetm.calls.static_chain (current_function_decl, true);
926 if (chain && REG_P (chain))
927 emit_clobber (chain);
928
929 /* Now put in the code to restore the frame pointer, and argument
930 pointer, if needed. */
931 #ifdef HAVE_nonlocal_goto
932 if (! HAVE_nonlocal_goto)
933 #endif
934 /* First adjust our frame pointer to its actual value. It was
935 previously set to the start of the virtual area corresponding to
936 the stacked variables when we branched here and now needs to be
937 adjusted to the actual hardware fp value.
938
939 Assignments to virtual registers are converted by
940 instantiate_virtual_regs into the corresponding assignment
941 to the underlying register (fp in this case) that makes
942 the original assignment true.
943 So the following insn will actually be decrementing fp by
944 STARTING_FRAME_OFFSET. */
945 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
946
947 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
948 if (fixed_regs[ARG_POINTER_REGNUM])
949 {
950 #ifdef ELIMINABLE_REGS
951 /* If the argument pointer can be eliminated in favor of the
952 frame pointer, we don't need to restore it. We assume here
953 that if such an elimination is present, it can always be used.
954 This is the case on all known machines; if we don't make this
955 assumption, we do unnecessary saving on many machines. */
956 size_t i;
957 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
958
959 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
960 if (elim_regs[i].from == ARG_POINTER_REGNUM
961 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
962 break;
963
964 if (i == ARRAY_SIZE (elim_regs))
965 #endif
966 {
967 /* Now restore our arg pointer from the address at which it
968 was saved in our stack frame. */
969 emit_move_insn (crtl->args.internal_arg_pointer,
970 copy_to_reg (get_arg_pointer_save_area ()));
971 }
972 }
973 #endif
974
975 #ifdef HAVE_builtin_setjmp_receiver
976 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
977 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
978 else
979 #endif
980 #ifdef HAVE_nonlocal_goto_receiver
981 if (HAVE_nonlocal_goto_receiver)
982 emit_insn (gen_nonlocal_goto_receiver ());
983 else
984 #endif
985 { /* Nothing */ }
986
987 /* We must not allow the code we just generated to be reordered by
988 scheduling. Specifically, the update of the frame pointer must
989 happen immediately, not later. Similarly, we must block
990 (frame-related) register values to be used across this code. */
991 emit_insn (gen_blockage ());
992 }
993
994 /* __builtin_longjmp is passed a pointer to an array of five words (not
995 all will be used on all machines). It operates similarly to the C
996 library function of the same name, but is more efficient. Much of
997 the code below is copied from the handling of non-local gotos. */
998
999 static void
1000 expand_builtin_longjmp (rtx buf_addr, rtx value)
1001 {
1002 rtx fp, lab, stack, insn, last;
1003 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1004
1005 /* DRAP is needed for stack realign if longjmp is expanded to current
1006 function */
1007 if (SUPPORTS_STACK_ALIGNMENT)
1008 crtl->need_drap = true;
1009
1010 if (setjmp_alias_set == -1)
1011 setjmp_alias_set = new_alias_set ();
1012
1013 buf_addr = convert_memory_address (Pmode, buf_addr);
1014
1015 buf_addr = force_reg (Pmode, buf_addr);
1016
1017 /* We require that the user must pass a second argument of 1, because
1018 that is what builtin_setjmp will return. */
1019 gcc_assert (value == const1_rtx);
1020
1021 last = get_last_insn ();
1022 #ifdef HAVE_builtin_longjmp
1023 if (HAVE_builtin_longjmp)
1024 emit_insn (gen_builtin_longjmp (buf_addr));
1025 else
1026 #endif
1027 {
1028 fp = gen_rtx_MEM (Pmode, buf_addr);
1029 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1030 GET_MODE_SIZE (Pmode)));
1031
1032 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1033 2 * GET_MODE_SIZE (Pmode)));
1034 set_mem_alias_set (fp, setjmp_alias_set);
1035 set_mem_alias_set (lab, setjmp_alias_set);
1036 set_mem_alias_set (stack, setjmp_alias_set);
1037
1038 /* Pick up FP, label, and SP from the block and jump. This code is
1039 from expand_goto in stmt.c; see there for detailed comments. */
1040 #ifdef HAVE_nonlocal_goto
1041 if (HAVE_nonlocal_goto)
1042 /* We have to pass a value to the nonlocal_goto pattern that will
1043 get copied into the static_chain pointer, but it does not matter
1044 what that value is, because builtin_setjmp does not use it. */
1045 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1046 else
1047 #endif
1048 {
1049 lab = copy_to_reg (lab);
1050
1051 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1052 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1053
1054 emit_move_insn (hard_frame_pointer_rtx, fp);
1055 emit_stack_restore (SAVE_NONLOCAL, stack);
1056
1057 emit_use (hard_frame_pointer_rtx);
1058 emit_use (stack_pointer_rtx);
1059 emit_indirect_jump (lab);
1060 }
1061 }
1062
1063 /* Search backwards and mark the jump insn as a non-local goto.
1064 Note that this precludes the use of __builtin_longjmp to a
1065 __builtin_setjmp target in the same function. However, we've
1066 already cautioned the user that these functions are for
1067 internal exception handling use only. */
1068 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1069 {
1070 gcc_assert (insn != last);
1071
1072 if (JUMP_P (insn))
1073 {
1074 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1075 break;
1076 }
1077 else if (CALL_P (insn))
1078 break;
1079 }
1080 }
1081
1082 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1083 and the address of the save area. */
1084
1085 static rtx
1086 expand_builtin_nonlocal_goto (tree exp)
1087 {
1088 tree t_label, t_save_area;
1089 rtx r_label, r_save_area, r_fp, r_sp, insn;
1090
1091 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1092 return NULL_RTX;
1093
1094 t_label = CALL_EXPR_ARG (exp, 0);
1095 t_save_area = CALL_EXPR_ARG (exp, 1);
1096
1097 r_label = expand_normal (t_label);
1098 r_label = convert_memory_address (Pmode, r_label);
1099 r_save_area = expand_normal (t_save_area);
1100 r_save_area = convert_memory_address (Pmode, r_save_area);
1101 /* Copy the address of the save location to a register just in case it was
1102 based on the frame pointer. */
1103 r_save_area = copy_to_reg (r_save_area);
1104 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1105 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1106 plus_constant (Pmode, r_save_area,
1107 GET_MODE_SIZE (Pmode)));
1108
1109 crtl->has_nonlocal_goto = 1;
1110
1111 #ifdef HAVE_nonlocal_goto
1112 /* ??? We no longer need to pass the static chain value, afaik. */
1113 if (HAVE_nonlocal_goto)
1114 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1115 else
1116 #endif
1117 {
1118 r_label = copy_to_reg (r_label);
1119
1120 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1121 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1122
1123 /* Restore frame pointer for containing function. */
1124 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1125 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1126
1127 /* USE of hard_frame_pointer_rtx added for consistency;
1128 not clear if really needed. */
1129 emit_use (hard_frame_pointer_rtx);
1130 emit_use (stack_pointer_rtx);
1131
1132 /* If the architecture is using a GP register, we must
1133 conservatively assume that the target function makes use of it.
1134 The prologue of functions with nonlocal gotos must therefore
1135 initialize the GP register to the appropriate value, and we
1136 must then make sure that this value is live at the point
1137 of the jump. (Note that this doesn't necessarily apply
1138 to targets with a nonlocal_goto pattern; they are free
1139 to implement it in their own way. Note also that this is
1140 a no-op if the GP register is a global invariant.) */
1141 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1142 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1143 emit_use (pic_offset_table_rtx);
1144
1145 emit_indirect_jump (r_label);
1146 }
1147
1148 /* Search backwards to the jump insn and mark it as a
1149 non-local goto. */
1150 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1151 {
1152 if (JUMP_P (insn))
1153 {
1154 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1155 break;
1156 }
1157 else if (CALL_P (insn))
1158 break;
1159 }
1160
1161 return const0_rtx;
1162 }
1163
1164 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1165 (not all will be used on all machines) that was passed to __builtin_setjmp.
1166 It updates the stack pointer in that block to correspond to the current
1167 stack pointer. */
1168
1169 static void
1170 expand_builtin_update_setjmp_buf (rtx buf_addr)
1171 {
1172 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1173 rtx stack_save
1174 = gen_rtx_MEM (sa_mode,
1175 memory_address
1176 (sa_mode,
1177 plus_constant (Pmode, buf_addr,
1178 2 * GET_MODE_SIZE (Pmode))));
1179
1180 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1181 }
1182
1183 /* Expand a call to __builtin_prefetch. For a target that does not support
1184 data prefetch, evaluate the memory address argument in case it has side
1185 effects. */
1186
1187 static void
1188 expand_builtin_prefetch (tree exp)
1189 {
1190 tree arg0, arg1, arg2;
1191 int nargs;
1192 rtx op0, op1, op2;
1193
1194 if (!validate_arglist (exp, POINTER_TYPE, 0))
1195 return;
1196
1197 arg0 = CALL_EXPR_ARG (exp, 0);
1198
1199 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1200 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1201 locality). */
1202 nargs = call_expr_nargs (exp);
1203 if (nargs > 1)
1204 arg1 = CALL_EXPR_ARG (exp, 1);
1205 else
1206 arg1 = integer_zero_node;
1207 if (nargs > 2)
1208 arg2 = CALL_EXPR_ARG (exp, 2);
1209 else
1210 arg2 = integer_three_node;
1211
1212 /* Argument 0 is an address. */
1213 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1214
1215 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1216 if (TREE_CODE (arg1) != INTEGER_CST)
1217 {
1218 error ("second argument to %<__builtin_prefetch%> must be a constant");
1219 arg1 = integer_zero_node;
1220 }
1221 op1 = expand_normal (arg1);
1222 /* Argument 1 must be either zero or one. */
1223 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1224 {
1225 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1226 " using zero");
1227 op1 = const0_rtx;
1228 }
1229
1230 /* Argument 2 (locality) must be a compile-time constant int. */
1231 if (TREE_CODE (arg2) != INTEGER_CST)
1232 {
1233 error ("third argument to %<__builtin_prefetch%> must be a constant");
1234 arg2 = integer_zero_node;
1235 }
1236 op2 = expand_normal (arg2);
1237 /* Argument 2 must be 0, 1, 2, or 3. */
1238 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1239 {
1240 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1241 op2 = const0_rtx;
1242 }
1243
1244 #ifdef HAVE_prefetch
1245 if (HAVE_prefetch)
1246 {
1247 struct expand_operand ops[3];
1248
1249 create_address_operand (&ops[0], op0);
1250 create_integer_operand (&ops[1], INTVAL (op1));
1251 create_integer_operand (&ops[2], INTVAL (op2));
1252 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1253 return;
1254 }
1255 #endif
1256
1257 /* Don't do anything with direct references to volatile memory, but
1258 generate code to handle other side effects. */
1259 if (!MEM_P (op0) && side_effects_p (op0))
1260 emit_insn (op0);
1261 }
1262
1263 /* Get a MEM rtx for expression EXP which is the address of an operand
1264 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1265 the maximum length of the block of memory that might be accessed or
1266 NULL if unknown. */
1267
1268 static rtx
1269 get_memory_rtx (tree exp, tree len)
1270 {
1271 tree orig_exp = exp;
1272 rtx addr, mem;
1273
1274 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1275 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1276 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1277 exp = TREE_OPERAND (exp, 0);
1278
1279 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1280 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1281
1282 /* Get an expression we can use to find the attributes to assign to MEM.
1283 First remove any nops. */
1284 while (CONVERT_EXPR_P (exp)
1285 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1286 exp = TREE_OPERAND (exp, 0);
1287
1288 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1289 (as builtin stringops may alias with anything). */
1290 exp = fold_build2 (MEM_REF,
1291 build_array_type (char_type_node,
1292 build_range_type (sizetype,
1293 size_one_node, len)),
1294 exp, build_int_cst (ptr_type_node, 0));
1295
1296 /* If the MEM_REF has no acceptable address, try to get the base object
1297 from the original address we got, and build an all-aliasing
1298 unknown-sized access to that one. */
1299 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1300 set_mem_attributes (mem, exp, 0);
1301 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1302 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1303 0))))
1304 {
1305 exp = build_fold_addr_expr (exp);
1306 exp = fold_build2 (MEM_REF,
1307 build_array_type (char_type_node,
1308 build_range_type (sizetype,
1309 size_zero_node,
1310 NULL)),
1311 exp, build_int_cst (ptr_type_node, 0));
1312 set_mem_attributes (mem, exp, 0);
1313 }
1314 set_mem_alias_set (mem, 0);
1315 return mem;
1316 }
1317 \f
1318 /* Built-in functions to perform an untyped call and return. */
1319
1320 #define apply_args_mode \
1321 (this_target_builtins->x_apply_args_mode)
1322 #define apply_result_mode \
1323 (this_target_builtins->x_apply_result_mode)
1324
1325 /* Return the size required for the block returned by __builtin_apply_args,
1326 and initialize apply_args_mode. */
1327
1328 static int
1329 apply_args_size (void)
1330 {
1331 static int size = -1;
1332 int align;
1333 unsigned int regno;
1334 enum machine_mode mode;
1335
1336 /* The values computed by this function never change. */
1337 if (size < 0)
1338 {
1339 /* The first value is the incoming arg-pointer. */
1340 size = GET_MODE_SIZE (Pmode);
1341
1342 /* The second value is the structure value address unless this is
1343 passed as an "invisible" first argument. */
1344 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1345 size += GET_MODE_SIZE (Pmode);
1346
1347 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1348 if (FUNCTION_ARG_REGNO_P (regno))
1349 {
1350 mode = targetm.calls.get_raw_arg_mode (regno);
1351
1352 gcc_assert (mode != VOIDmode);
1353
1354 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1355 if (size % align != 0)
1356 size = CEIL (size, align) * align;
1357 size += GET_MODE_SIZE (mode);
1358 apply_args_mode[regno] = mode;
1359 }
1360 else
1361 {
1362 apply_args_mode[regno] = VOIDmode;
1363 }
1364 }
1365 return size;
1366 }
1367
1368 /* Return the size required for the block returned by __builtin_apply,
1369 and initialize apply_result_mode. */
1370
1371 static int
1372 apply_result_size (void)
1373 {
1374 static int size = -1;
1375 int align, regno;
1376 enum machine_mode mode;
1377
1378 /* The values computed by this function never change. */
1379 if (size < 0)
1380 {
1381 size = 0;
1382
1383 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1384 if (targetm.calls.function_value_regno_p (regno))
1385 {
1386 mode = targetm.calls.get_raw_result_mode (regno);
1387
1388 gcc_assert (mode != VOIDmode);
1389
1390 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1391 if (size % align != 0)
1392 size = CEIL (size, align) * align;
1393 size += GET_MODE_SIZE (mode);
1394 apply_result_mode[regno] = mode;
1395 }
1396 else
1397 apply_result_mode[regno] = VOIDmode;
1398
1399 /* Allow targets that use untyped_call and untyped_return to override
1400 the size so that machine-specific information can be stored here. */
1401 #ifdef APPLY_RESULT_SIZE
1402 size = APPLY_RESULT_SIZE;
1403 #endif
1404 }
1405 return size;
1406 }
1407
1408 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1409 /* Create a vector describing the result block RESULT. If SAVEP is true,
1410 the result block is used to save the values; otherwise it is used to
1411 restore the values. */
1412
1413 static rtx
1414 result_vector (int savep, rtx result)
1415 {
1416 int regno, size, align, nelts;
1417 enum machine_mode mode;
1418 rtx reg, mem;
1419 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1420
1421 size = nelts = 0;
1422 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1423 if ((mode = apply_result_mode[regno]) != VOIDmode)
1424 {
1425 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1426 if (size % align != 0)
1427 size = CEIL (size, align) * align;
1428 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1429 mem = adjust_address (result, mode, size);
1430 savevec[nelts++] = (savep
1431 ? gen_rtx_SET (VOIDmode, mem, reg)
1432 : gen_rtx_SET (VOIDmode, reg, mem));
1433 size += GET_MODE_SIZE (mode);
1434 }
1435 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1436 }
1437 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1438
1439 /* Save the state required to perform an untyped call with the same
1440 arguments as were passed to the current function. */
1441
1442 static rtx
1443 expand_builtin_apply_args_1 (void)
1444 {
1445 rtx registers, tem;
1446 int size, align, regno;
1447 enum machine_mode mode;
1448 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1449
1450 /* Create a block where the arg-pointer, structure value address,
1451 and argument registers can be saved. */
1452 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1453
1454 /* Walk past the arg-pointer and structure value address. */
1455 size = GET_MODE_SIZE (Pmode);
1456 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1457 size += GET_MODE_SIZE (Pmode);
1458
1459 /* Save each register used in calling a function to the block. */
1460 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1461 if ((mode = apply_args_mode[regno]) != VOIDmode)
1462 {
1463 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1464 if (size % align != 0)
1465 size = CEIL (size, align) * align;
1466
1467 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1468
1469 emit_move_insn (adjust_address (registers, mode, size), tem);
1470 size += GET_MODE_SIZE (mode);
1471 }
1472
1473 /* Save the arg pointer to the block. */
1474 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1475 #ifdef STACK_GROWS_DOWNWARD
1476 /* We need the pointer as the caller actually passed them to us, not
1477 as we might have pretended they were passed. Make sure it's a valid
1478 operand, as emit_move_insn isn't expected to handle a PLUS. */
1479 tem
1480 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1481 NULL_RTX);
1482 #endif
1483 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1484
1485 size = GET_MODE_SIZE (Pmode);
1486
1487 /* Save the structure value address unless this is passed as an
1488 "invisible" first argument. */
1489 if (struct_incoming_value)
1490 {
1491 emit_move_insn (adjust_address (registers, Pmode, size),
1492 copy_to_reg (struct_incoming_value));
1493 size += GET_MODE_SIZE (Pmode);
1494 }
1495
1496 /* Return the address of the block. */
1497 return copy_addr_to_reg (XEXP (registers, 0));
1498 }
1499
1500 /* __builtin_apply_args returns block of memory allocated on
1501 the stack into which is stored the arg pointer, structure
1502 value address, static chain, and all the registers that might
1503 possibly be used in performing a function call. The code is
1504 moved to the start of the function so the incoming values are
1505 saved. */
1506
1507 static rtx
1508 expand_builtin_apply_args (void)
1509 {
1510 /* Don't do __builtin_apply_args more than once in a function.
1511 Save the result of the first call and reuse it. */
1512 if (apply_args_value != 0)
1513 return apply_args_value;
1514 {
1515 /* When this function is called, it means that registers must be
1516 saved on entry to this function. So we migrate the
1517 call to the first insn of this function. */
1518 rtx temp;
1519 rtx seq;
1520
1521 start_sequence ();
1522 temp = expand_builtin_apply_args_1 ();
1523 seq = get_insns ();
1524 end_sequence ();
1525
1526 apply_args_value = temp;
1527
1528 /* Put the insns after the NOTE that starts the function.
1529 If this is inside a start_sequence, make the outer-level insn
1530 chain current, so the code is placed at the start of the
1531 function. If internal_arg_pointer is a non-virtual pseudo,
1532 it needs to be placed after the function that initializes
1533 that pseudo. */
1534 push_topmost_sequence ();
1535 if (REG_P (crtl->args.internal_arg_pointer)
1536 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1537 emit_insn_before (seq, parm_birth_insn);
1538 else
1539 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1540 pop_topmost_sequence ();
1541 return temp;
1542 }
1543 }
1544
1545 /* Perform an untyped call and save the state required to perform an
1546 untyped return of whatever value was returned by the given function. */
1547
1548 static rtx
1549 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1550 {
1551 int size, align, regno;
1552 enum machine_mode mode;
1553 rtx incoming_args, result, reg, dest, src, call_insn;
1554 rtx old_stack_level = 0;
1555 rtx call_fusage = 0;
1556 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1557
1558 arguments = convert_memory_address (Pmode, arguments);
1559
1560 /* Create a block where the return registers can be saved. */
1561 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1562
1563 /* Fetch the arg pointer from the ARGUMENTS block. */
1564 incoming_args = gen_reg_rtx (Pmode);
1565 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1566 #ifndef STACK_GROWS_DOWNWARD
1567 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1568 incoming_args, 0, OPTAB_LIB_WIDEN);
1569 #endif
1570
1571 /* Push a new argument block and copy the arguments. Do not allow
1572 the (potential) memcpy call below to interfere with our stack
1573 manipulations. */
1574 do_pending_stack_adjust ();
1575 NO_DEFER_POP;
1576
1577 /* Save the stack with nonlocal if available. */
1578 #ifdef HAVE_save_stack_nonlocal
1579 if (HAVE_save_stack_nonlocal)
1580 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1581 else
1582 #endif
1583 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1584
1585 /* Allocate a block of memory onto the stack and copy the memory
1586 arguments to the outgoing arguments address. We can pass TRUE
1587 as the 4th argument because we just saved the stack pointer
1588 and will restore it right after the call. */
1589 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1590
1591 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1592 may have already set current_function_calls_alloca to true.
1593 current_function_calls_alloca won't be set if argsize is zero,
1594 so we have to guarantee need_drap is true here. */
1595 if (SUPPORTS_STACK_ALIGNMENT)
1596 crtl->need_drap = true;
1597
1598 dest = virtual_outgoing_args_rtx;
1599 #ifndef STACK_GROWS_DOWNWARD
1600 if (CONST_INT_P (argsize))
1601 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1602 else
1603 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1604 #endif
1605 dest = gen_rtx_MEM (BLKmode, dest);
1606 set_mem_align (dest, PARM_BOUNDARY);
1607 src = gen_rtx_MEM (BLKmode, incoming_args);
1608 set_mem_align (src, PARM_BOUNDARY);
1609 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1610
1611 /* Refer to the argument block. */
1612 apply_args_size ();
1613 arguments = gen_rtx_MEM (BLKmode, arguments);
1614 set_mem_align (arguments, PARM_BOUNDARY);
1615
1616 /* Walk past the arg-pointer and structure value address. */
1617 size = GET_MODE_SIZE (Pmode);
1618 if (struct_value)
1619 size += GET_MODE_SIZE (Pmode);
1620
1621 /* Restore each of the registers previously saved. Make USE insns
1622 for each of these registers for use in making the call. */
1623 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1624 if ((mode = apply_args_mode[regno]) != VOIDmode)
1625 {
1626 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1627 if (size % align != 0)
1628 size = CEIL (size, align) * align;
1629 reg = gen_rtx_REG (mode, regno);
1630 emit_move_insn (reg, adjust_address (arguments, mode, size));
1631 use_reg (&call_fusage, reg);
1632 size += GET_MODE_SIZE (mode);
1633 }
1634
1635 /* Restore the structure value address unless this is passed as an
1636 "invisible" first argument. */
1637 size = GET_MODE_SIZE (Pmode);
1638 if (struct_value)
1639 {
1640 rtx value = gen_reg_rtx (Pmode);
1641 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1642 emit_move_insn (struct_value, value);
1643 if (REG_P (struct_value))
1644 use_reg (&call_fusage, struct_value);
1645 size += GET_MODE_SIZE (Pmode);
1646 }
1647
1648 /* All arguments and registers used for the call are set up by now! */
1649 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1650
1651 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1652 and we don't want to load it into a register as an optimization,
1653 because prepare_call_address already did it if it should be done. */
1654 if (GET_CODE (function) != SYMBOL_REF)
1655 function = memory_address (FUNCTION_MODE, function);
1656
1657 /* Generate the actual call instruction and save the return value. */
1658 #ifdef HAVE_untyped_call
1659 if (HAVE_untyped_call)
1660 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1661 result, result_vector (1, result)));
1662 else
1663 #endif
1664 #ifdef HAVE_call_value
1665 if (HAVE_call_value)
1666 {
1667 rtx valreg = 0;
1668
1669 /* Locate the unique return register. It is not possible to
1670 express a call that sets more than one return register using
1671 call_value; use untyped_call for that. In fact, untyped_call
1672 only needs to save the return registers in the given block. */
1673 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1674 if ((mode = apply_result_mode[regno]) != VOIDmode)
1675 {
1676 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1677
1678 valreg = gen_rtx_REG (mode, regno);
1679 }
1680
1681 emit_call_insn (GEN_CALL_VALUE (valreg,
1682 gen_rtx_MEM (FUNCTION_MODE, function),
1683 const0_rtx, NULL_RTX, const0_rtx));
1684
1685 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1686 }
1687 else
1688 #endif
1689 gcc_unreachable ();
1690
1691 /* Find the CALL insn we just emitted, and attach the register usage
1692 information. */
1693 call_insn = last_call_insn ();
1694 add_function_usage_to (call_insn, call_fusage);
1695
1696 /* Restore the stack. */
1697 #ifdef HAVE_save_stack_nonlocal
1698 if (HAVE_save_stack_nonlocal)
1699 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1700 else
1701 #endif
1702 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1703 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1704
1705 OK_DEFER_POP;
1706
1707 /* Return the address of the result block. */
1708 result = copy_addr_to_reg (XEXP (result, 0));
1709 return convert_memory_address (ptr_mode, result);
1710 }
1711
1712 /* Perform an untyped return. */
1713
1714 static void
1715 expand_builtin_return (rtx result)
1716 {
1717 int size, align, regno;
1718 enum machine_mode mode;
1719 rtx reg;
1720 rtx call_fusage = 0;
1721
1722 result = convert_memory_address (Pmode, result);
1723
1724 apply_result_size ();
1725 result = gen_rtx_MEM (BLKmode, result);
1726
1727 #ifdef HAVE_untyped_return
1728 if (HAVE_untyped_return)
1729 {
1730 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1731 emit_barrier ();
1732 return;
1733 }
1734 #endif
1735
1736 /* Restore the return value and note that each value is used. */
1737 size = 0;
1738 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1739 if ((mode = apply_result_mode[regno]) != VOIDmode)
1740 {
1741 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1742 if (size % align != 0)
1743 size = CEIL (size, align) * align;
1744 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1745 emit_move_insn (reg, adjust_address (result, mode, size));
1746
1747 push_to_sequence (call_fusage);
1748 emit_use (reg);
1749 call_fusage = get_insns ();
1750 end_sequence ();
1751 size += GET_MODE_SIZE (mode);
1752 }
1753
1754 /* Put the USE insns before the return. */
1755 emit_insn (call_fusage);
1756
1757 /* Return whatever values was restored by jumping directly to the end
1758 of the function. */
1759 expand_naked_return ();
1760 }
1761
1762 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1763
1764 static enum type_class
1765 type_to_class (tree type)
1766 {
1767 switch (TREE_CODE (type))
1768 {
1769 case VOID_TYPE: return void_type_class;
1770 case INTEGER_TYPE: return integer_type_class;
1771 case ENUMERAL_TYPE: return enumeral_type_class;
1772 case BOOLEAN_TYPE: return boolean_type_class;
1773 case POINTER_TYPE: return pointer_type_class;
1774 case REFERENCE_TYPE: return reference_type_class;
1775 case OFFSET_TYPE: return offset_type_class;
1776 case REAL_TYPE: return real_type_class;
1777 case COMPLEX_TYPE: return complex_type_class;
1778 case FUNCTION_TYPE: return function_type_class;
1779 case METHOD_TYPE: return method_type_class;
1780 case RECORD_TYPE: return record_type_class;
1781 case UNION_TYPE:
1782 case QUAL_UNION_TYPE: return union_type_class;
1783 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1784 ? string_type_class : array_type_class);
1785 case LANG_TYPE: return lang_type_class;
1786 default: return no_type_class;
1787 }
1788 }
1789
1790 /* Expand a call EXP to __builtin_classify_type. */
1791
1792 static rtx
1793 expand_builtin_classify_type (tree exp)
1794 {
1795 if (call_expr_nargs (exp))
1796 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1797 return GEN_INT (no_type_class);
1798 }
1799
1800 /* This helper macro, meant to be used in mathfn_built_in below,
1801 determines which among a set of three builtin math functions is
1802 appropriate for a given type mode. The `F' and `L' cases are
1803 automatically generated from the `double' case. */
1804 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1805 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1806 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1807 fcodel = BUILT_IN_MATHFN##L ; break;
1808 /* Similar to above, but appends _R after any F/L suffix. */
1809 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1810 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1811 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1812 fcodel = BUILT_IN_MATHFN##L_R ; break;
1813
1814 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1815 if available. If IMPLICIT is true use the implicit builtin declaration,
1816 otherwise use the explicit declaration. If we can't do the conversion,
1817 return zero. */
1818
1819 static tree
1820 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1821 {
1822 enum built_in_function fcode, fcodef, fcodel, fcode2;
1823
1824 switch (fn)
1825 {
1826 CASE_MATHFN (BUILT_IN_ACOS)
1827 CASE_MATHFN (BUILT_IN_ACOSH)
1828 CASE_MATHFN (BUILT_IN_ASIN)
1829 CASE_MATHFN (BUILT_IN_ASINH)
1830 CASE_MATHFN (BUILT_IN_ATAN)
1831 CASE_MATHFN (BUILT_IN_ATAN2)
1832 CASE_MATHFN (BUILT_IN_ATANH)
1833 CASE_MATHFN (BUILT_IN_CBRT)
1834 CASE_MATHFN (BUILT_IN_CEIL)
1835 CASE_MATHFN (BUILT_IN_CEXPI)
1836 CASE_MATHFN (BUILT_IN_COPYSIGN)
1837 CASE_MATHFN (BUILT_IN_COS)
1838 CASE_MATHFN (BUILT_IN_COSH)
1839 CASE_MATHFN (BUILT_IN_DREM)
1840 CASE_MATHFN (BUILT_IN_ERF)
1841 CASE_MATHFN (BUILT_IN_ERFC)
1842 CASE_MATHFN (BUILT_IN_EXP)
1843 CASE_MATHFN (BUILT_IN_EXP10)
1844 CASE_MATHFN (BUILT_IN_EXP2)
1845 CASE_MATHFN (BUILT_IN_EXPM1)
1846 CASE_MATHFN (BUILT_IN_FABS)
1847 CASE_MATHFN (BUILT_IN_FDIM)
1848 CASE_MATHFN (BUILT_IN_FLOOR)
1849 CASE_MATHFN (BUILT_IN_FMA)
1850 CASE_MATHFN (BUILT_IN_FMAX)
1851 CASE_MATHFN (BUILT_IN_FMIN)
1852 CASE_MATHFN (BUILT_IN_FMOD)
1853 CASE_MATHFN (BUILT_IN_FREXP)
1854 CASE_MATHFN (BUILT_IN_GAMMA)
1855 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1856 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1857 CASE_MATHFN (BUILT_IN_HYPOT)
1858 CASE_MATHFN (BUILT_IN_ILOGB)
1859 CASE_MATHFN (BUILT_IN_ICEIL)
1860 CASE_MATHFN (BUILT_IN_IFLOOR)
1861 CASE_MATHFN (BUILT_IN_INF)
1862 CASE_MATHFN (BUILT_IN_IRINT)
1863 CASE_MATHFN (BUILT_IN_IROUND)
1864 CASE_MATHFN (BUILT_IN_ISINF)
1865 CASE_MATHFN (BUILT_IN_J0)
1866 CASE_MATHFN (BUILT_IN_J1)
1867 CASE_MATHFN (BUILT_IN_JN)
1868 CASE_MATHFN (BUILT_IN_LCEIL)
1869 CASE_MATHFN (BUILT_IN_LDEXP)
1870 CASE_MATHFN (BUILT_IN_LFLOOR)
1871 CASE_MATHFN (BUILT_IN_LGAMMA)
1872 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1873 CASE_MATHFN (BUILT_IN_LLCEIL)
1874 CASE_MATHFN (BUILT_IN_LLFLOOR)
1875 CASE_MATHFN (BUILT_IN_LLRINT)
1876 CASE_MATHFN (BUILT_IN_LLROUND)
1877 CASE_MATHFN (BUILT_IN_LOG)
1878 CASE_MATHFN (BUILT_IN_LOG10)
1879 CASE_MATHFN (BUILT_IN_LOG1P)
1880 CASE_MATHFN (BUILT_IN_LOG2)
1881 CASE_MATHFN (BUILT_IN_LOGB)
1882 CASE_MATHFN (BUILT_IN_LRINT)
1883 CASE_MATHFN (BUILT_IN_LROUND)
1884 CASE_MATHFN (BUILT_IN_MODF)
1885 CASE_MATHFN (BUILT_IN_NAN)
1886 CASE_MATHFN (BUILT_IN_NANS)
1887 CASE_MATHFN (BUILT_IN_NEARBYINT)
1888 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1889 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1890 CASE_MATHFN (BUILT_IN_POW)
1891 CASE_MATHFN (BUILT_IN_POWI)
1892 CASE_MATHFN (BUILT_IN_POW10)
1893 CASE_MATHFN (BUILT_IN_REMAINDER)
1894 CASE_MATHFN (BUILT_IN_REMQUO)
1895 CASE_MATHFN (BUILT_IN_RINT)
1896 CASE_MATHFN (BUILT_IN_ROUND)
1897 CASE_MATHFN (BUILT_IN_SCALB)
1898 CASE_MATHFN (BUILT_IN_SCALBLN)
1899 CASE_MATHFN (BUILT_IN_SCALBN)
1900 CASE_MATHFN (BUILT_IN_SIGNBIT)
1901 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1902 CASE_MATHFN (BUILT_IN_SIN)
1903 CASE_MATHFN (BUILT_IN_SINCOS)
1904 CASE_MATHFN (BUILT_IN_SINH)
1905 CASE_MATHFN (BUILT_IN_SQRT)
1906 CASE_MATHFN (BUILT_IN_TAN)
1907 CASE_MATHFN (BUILT_IN_TANH)
1908 CASE_MATHFN (BUILT_IN_TGAMMA)
1909 CASE_MATHFN (BUILT_IN_TRUNC)
1910 CASE_MATHFN (BUILT_IN_Y0)
1911 CASE_MATHFN (BUILT_IN_Y1)
1912 CASE_MATHFN (BUILT_IN_YN)
1913
1914 default:
1915 return NULL_TREE;
1916 }
1917
1918 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1919 fcode2 = fcode;
1920 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1921 fcode2 = fcodef;
1922 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1923 fcode2 = fcodel;
1924 else
1925 return NULL_TREE;
1926
1927 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1928 return NULL_TREE;
1929
1930 return builtin_decl_explicit (fcode2);
1931 }
1932
1933 /* Like mathfn_built_in_1(), but always use the implicit array. */
1934
1935 tree
1936 mathfn_built_in (tree type, enum built_in_function fn)
1937 {
1938 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1939 }
1940
1941 /* If errno must be maintained, expand the RTL to check if the result,
1942 TARGET, of a built-in function call, EXP, is NaN, and if so set
1943 errno to EDOM. */
1944
1945 static void
1946 expand_errno_check (tree exp, rtx target)
1947 {
1948 rtx lab = gen_label_rtx ();
1949
1950 /* Test the result; if it is NaN, set errno=EDOM because
1951 the argument was not in the domain. */
1952 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1953 NULL_RTX, NULL_RTX, lab,
1954 /* The jump is very likely. */
1955 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1956
1957 #ifdef TARGET_EDOM
1958 /* If this built-in doesn't throw an exception, set errno directly. */
1959 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1960 {
1961 #ifdef GEN_ERRNO_RTX
1962 rtx errno_rtx = GEN_ERRNO_RTX;
1963 #else
1964 rtx errno_rtx
1965 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1966 #endif
1967 emit_move_insn (errno_rtx,
1968 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1969 emit_label (lab);
1970 return;
1971 }
1972 #endif
1973
1974 /* Make sure the library call isn't expanded as a tail call. */
1975 CALL_EXPR_TAILCALL (exp) = 0;
1976
1977 /* We can't set errno=EDOM directly; let the library call do it.
1978 Pop the arguments right away in case the call gets deleted. */
1979 NO_DEFER_POP;
1980 expand_call (exp, target, 0);
1981 OK_DEFER_POP;
1982 emit_label (lab);
1983 }
1984
1985 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1986 Return NULL_RTX if a normal call should be emitted rather than expanding
1987 the function in-line. EXP is the expression that is a call to the builtin
1988 function; if convenient, the result should be placed in TARGET.
1989 SUBTARGET may be used as the target for computing one of EXP's operands. */
1990
1991 static rtx
1992 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1993 {
1994 optab builtin_optab;
1995 rtx op0, insns;
1996 tree fndecl = get_callee_fndecl (exp);
1997 enum machine_mode mode;
1998 bool errno_set = false;
1999 bool try_widening = false;
2000 tree arg;
2001
2002 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2003 return NULL_RTX;
2004
2005 arg = CALL_EXPR_ARG (exp, 0);
2006
2007 switch (DECL_FUNCTION_CODE (fndecl))
2008 {
2009 CASE_FLT_FN (BUILT_IN_SQRT):
2010 errno_set = ! tree_expr_nonnegative_p (arg);
2011 try_widening = true;
2012 builtin_optab = sqrt_optab;
2013 break;
2014 CASE_FLT_FN (BUILT_IN_EXP):
2015 errno_set = true; builtin_optab = exp_optab; break;
2016 CASE_FLT_FN (BUILT_IN_EXP10):
2017 CASE_FLT_FN (BUILT_IN_POW10):
2018 errno_set = true; builtin_optab = exp10_optab; break;
2019 CASE_FLT_FN (BUILT_IN_EXP2):
2020 errno_set = true; builtin_optab = exp2_optab; break;
2021 CASE_FLT_FN (BUILT_IN_EXPM1):
2022 errno_set = true; builtin_optab = expm1_optab; break;
2023 CASE_FLT_FN (BUILT_IN_LOGB):
2024 errno_set = true; builtin_optab = logb_optab; break;
2025 CASE_FLT_FN (BUILT_IN_LOG):
2026 errno_set = true; builtin_optab = log_optab; break;
2027 CASE_FLT_FN (BUILT_IN_LOG10):
2028 errno_set = true; builtin_optab = log10_optab; break;
2029 CASE_FLT_FN (BUILT_IN_LOG2):
2030 errno_set = true; builtin_optab = log2_optab; break;
2031 CASE_FLT_FN (BUILT_IN_LOG1P):
2032 errno_set = true; builtin_optab = log1p_optab; break;
2033 CASE_FLT_FN (BUILT_IN_ASIN):
2034 builtin_optab = asin_optab; break;
2035 CASE_FLT_FN (BUILT_IN_ACOS):
2036 builtin_optab = acos_optab; break;
2037 CASE_FLT_FN (BUILT_IN_TAN):
2038 builtin_optab = tan_optab; break;
2039 CASE_FLT_FN (BUILT_IN_ATAN):
2040 builtin_optab = atan_optab; break;
2041 CASE_FLT_FN (BUILT_IN_FLOOR):
2042 builtin_optab = floor_optab; break;
2043 CASE_FLT_FN (BUILT_IN_CEIL):
2044 builtin_optab = ceil_optab; break;
2045 CASE_FLT_FN (BUILT_IN_TRUNC):
2046 builtin_optab = btrunc_optab; break;
2047 CASE_FLT_FN (BUILT_IN_ROUND):
2048 builtin_optab = round_optab; break;
2049 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2050 builtin_optab = nearbyint_optab;
2051 if (flag_trapping_math)
2052 break;
2053 /* Else fallthrough and expand as rint. */
2054 CASE_FLT_FN (BUILT_IN_RINT):
2055 builtin_optab = rint_optab; break;
2056 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2057 builtin_optab = significand_optab; break;
2058 default:
2059 gcc_unreachable ();
2060 }
2061
2062 /* Make a suitable register to place result in. */
2063 mode = TYPE_MODE (TREE_TYPE (exp));
2064
2065 if (! flag_errno_math || ! HONOR_NANS (mode))
2066 errno_set = false;
2067
2068 /* Before working hard, check whether the instruction is available, but try
2069 to widen the mode for specific operations. */
2070 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2071 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2072 && (!errno_set || !optimize_insn_for_size_p ()))
2073 {
2074 rtx result = gen_reg_rtx (mode);
2075
2076 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2077 need to expand the argument again. This way, we will not perform
2078 side-effects more the once. */
2079 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2080
2081 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2082
2083 start_sequence ();
2084
2085 /* Compute into RESULT.
2086 Set RESULT to wherever the result comes back. */
2087 result = expand_unop (mode, builtin_optab, op0, result, 0);
2088
2089 if (result != 0)
2090 {
2091 if (errno_set)
2092 expand_errno_check (exp, result);
2093
2094 /* Output the entire sequence. */
2095 insns = get_insns ();
2096 end_sequence ();
2097 emit_insn (insns);
2098 return result;
2099 }
2100
2101 /* If we were unable to expand via the builtin, stop the sequence
2102 (without outputting the insns) and call to the library function
2103 with the stabilized argument list. */
2104 end_sequence ();
2105 }
2106
2107 return expand_call (exp, target, target == const0_rtx);
2108 }
2109
2110 /* Expand a call to the builtin binary math functions (pow and atan2).
2111 Return NULL_RTX if a normal call should be emitted rather than expanding the
2112 function in-line. EXP is the expression that is a call to the builtin
2113 function; if convenient, the result should be placed in TARGET.
2114 SUBTARGET may be used as the target for computing one of EXP's
2115 operands. */
2116
2117 static rtx
2118 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2119 {
2120 optab builtin_optab;
2121 rtx op0, op1, insns, result;
2122 int op1_type = REAL_TYPE;
2123 tree fndecl = get_callee_fndecl (exp);
2124 tree arg0, arg1;
2125 enum machine_mode mode;
2126 bool errno_set = true;
2127
2128 switch (DECL_FUNCTION_CODE (fndecl))
2129 {
2130 CASE_FLT_FN (BUILT_IN_SCALBN):
2131 CASE_FLT_FN (BUILT_IN_SCALBLN):
2132 CASE_FLT_FN (BUILT_IN_LDEXP):
2133 op1_type = INTEGER_TYPE;
2134 default:
2135 break;
2136 }
2137
2138 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2139 return NULL_RTX;
2140
2141 arg0 = CALL_EXPR_ARG (exp, 0);
2142 arg1 = CALL_EXPR_ARG (exp, 1);
2143
2144 switch (DECL_FUNCTION_CODE (fndecl))
2145 {
2146 CASE_FLT_FN (BUILT_IN_POW):
2147 builtin_optab = pow_optab; break;
2148 CASE_FLT_FN (BUILT_IN_ATAN2):
2149 builtin_optab = atan2_optab; break;
2150 CASE_FLT_FN (BUILT_IN_SCALB):
2151 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2152 return 0;
2153 builtin_optab = scalb_optab; break;
2154 CASE_FLT_FN (BUILT_IN_SCALBN):
2155 CASE_FLT_FN (BUILT_IN_SCALBLN):
2156 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2157 return 0;
2158 /* Fall through... */
2159 CASE_FLT_FN (BUILT_IN_LDEXP):
2160 builtin_optab = ldexp_optab; break;
2161 CASE_FLT_FN (BUILT_IN_FMOD):
2162 builtin_optab = fmod_optab; break;
2163 CASE_FLT_FN (BUILT_IN_REMAINDER):
2164 CASE_FLT_FN (BUILT_IN_DREM):
2165 builtin_optab = remainder_optab; break;
2166 default:
2167 gcc_unreachable ();
2168 }
2169
2170 /* Make a suitable register to place result in. */
2171 mode = TYPE_MODE (TREE_TYPE (exp));
2172
2173 /* Before working hard, check whether the instruction is available. */
2174 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2175 return NULL_RTX;
2176
2177 result = gen_reg_rtx (mode);
2178
2179 if (! flag_errno_math || ! HONOR_NANS (mode))
2180 errno_set = false;
2181
2182 if (errno_set && optimize_insn_for_size_p ())
2183 return 0;
2184
2185 /* Always stabilize the argument list. */
2186 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2187 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2188
2189 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2190 op1 = expand_normal (arg1);
2191
2192 start_sequence ();
2193
2194 /* Compute into RESULT.
2195 Set RESULT to wherever the result comes back. */
2196 result = expand_binop (mode, builtin_optab, op0, op1,
2197 result, 0, OPTAB_DIRECT);
2198
2199 /* If we were unable to expand via the builtin, stop the sequence
2200 (without outputting the insns) and call to the library function
2201 with the stabilized argument list. */
2202 if (result == 0)
2203 {
2204 end_sequence ();
2205 return expand_call (exp, target, target == const0_rtx);
2206 }
2207
2208 if (errno_set)
2209 expand_errno_check (exp, result);
2210
2211 /* Output the entire sequence. */
2212 insns = get_insns ();
2213 end_sequence ();
2214 emit_insn (insns);
2215
2216 return result;
2217 }
2218
2219 /* Expand a call to the builtin trinary math functions (fma).
2220 Return NULL_RTX if a normal call should be emitted rather than expanding the
2221 function in-line. EXP is the expression that is a call to the builtin
2222 function; if convenient, the result should be placed in TARGET.
2223 SUBTARGET may be used as the target for computing one of EXP's
2224 operands. */
2225
2226 static rtx
2227 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2228 {
2229 optab builtin_optab;
2230 rtx op0, op1, op2, insns, result;
2231 tree fndecl = get_callee_fndecl (exp);
2232 tree arg0, arg1, arg2;
2233 enum machine_mode mode;
2234
2235 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2236 return NULL_RTX;
2237
2238 arg0 = CALL_EXPR_ARG (exp, 0);
2239 arg1 = CALL_EXPR_ARG (exp, 1);
2240 arg2 = CALL_EXPR_ARG (exp, 2);
2241
2242 switch (DECL_FUNCTION_CODE (fndecl))
2243 {
2244 CASE_FLT_FN (BUILT_IN_FMA):
2245 builtin_optab = fma_optab; break;
2246 default:
2247 gcc_unreachable ();
2248 }
2249
2250 /* Make a suitable register to place result in. */
2251 mode = TYPE_MODE (TREE_TYPE (exp));
2252
2253 /* Before working hard, check whether the instruction is available. */
2254 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2255 return NULL_RTX;
2256
2257 result = gen_reg_rtx (mode);
2258
2259 /* Always stabilize the argument list. */
2260 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2261 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2262 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2263
2264 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2265 op1 = expand_normal (arg1);
2266 op2 = expand_normal (arg2);
2267
2268 start_sequence ();
2269
2270 /* Compute into RESULT.
2271 Set RESULT to wherever the result comes back. */
2272 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2273 result, 0);
2274
2275 /* If we were unable to expand via the builtin, stop the sequence
2276 (without outputting the insns) and call to the library function
2277 with the stabilized argument list. */
2278 if (result == 0)
2279 {
2280 end_sequence ();
2281 return expand_call (exp, target, target == const0_rtx);
2282 }
2283
2284 /* Output the entire sequence. */
2285 insns = get_insns ();
2286 end_sequence ();
2287 emit_insn (insns);
2288
2289 return result;
2290 }
2291
2292 /* Expand a call to the builtin sin and cos math functions.
2293 Return NULL_RTX if a normal call should be emitted rather than expanding the
2294 function in-line. EXP is the expression that is a call to the builtin
2295 function; if convenient, the result should be placed in TARGET.
2296 SUBTARGET may be used as the target for computing one of EXP's
2297 operands. */
2298
2299 static rtx
2300 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2301 {
2302 optab builtin_optab;
2303 rtx op0, insns;
2304 tree fndecl = get_callee_fndecl (exp);
2305 enum machine_mode mode;
2306 tree arg;
2307
2308 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2309 return NULL_RTX;
2310
2311 arg = CALL_EXPR_ARG (exp, 0);
2312
2313 switch (DECL_FUNCTION_CODE (fndecl))
2314 {
2315 CASE_FLT_FN (BUILT_IN_SIN):
2316 CASE_FLT_FN (BUILT_IN_COS):
2317 builtin_optab = sincos_optab; break;
2318 default:
2319 gcc_unreachable ();
2320 }
2321
2322 /* Make a suitable register to place result in. */
2323 mode = TYPE_MODE (TREE_TYPE (exp));
2324
2325 /* Check if sincos insn is available, otherwise fallback
2326 to sin or cos insn. */
2327 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2328 switch (DECL_FUNCTION_CODE (fndecl))
2329 {
2330 CASE_FLT_FN (BUILT_IN_SIN):
2331 builtin_optab = sin_optab; break;
2332 CASE_FLT_FN (BUILT_IN_COS):
2333 builtin_optab = cos_optab; break;
2334 default:
2335 gcc_unreachable ();
2336 }
2337
2338 /* Before working hard, check whether the instruction is available. */
2339 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2340 {
2341 rtx result = gen_reg_rtx (mode);
2342
2343 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2344 need to expand the argument again. This way, we will not perform
2345 side-effects more the once. */
2346 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2347
2348 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2349
2350 start_sequence ();
2351
2352 /* Compute into RESULT.
2353 Set RESULT to wherever the result comes back. */
2354 if (builtin_optab == sincos_optab)
2355 {
2356 int ok;
2357
2358 switch (DECL_FUNCTION_CODE (fndecl))
2359 {
2360 CASE_FLT_FN (BUILT_IN_SIN):
2361 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2362 break;
2363 CASE_FLT_FN (BUILT_IN_COS):
2364 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2365 break;
2366 default:
2367 gcc_unreachable ();
2368 }
2369 gcc_assert (ok);
2370 }
2371 else
2372 result = expand_unop (mode, builtin_optab, op0, result, 0);
2373
2374 if (result != 0)
2375 {
2376 /* Output the entire sequence. */
2377 insns = get_insns ();
2378 end_sequence ();
2379 emit_insn (insns);
2380 return result;
2381 }
2382
2383 /* If we were unable to expand via the builtin, stop the sequence
2384 (without outputting the insns) and call to the library function
2385 with the stabilized argument list. */
2386 end_sequence ();
2387 }
2388
2389 return expand_call (exp, target, target == const0_rtx);
2390 }
2391
2392 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2393 return an RTL instruction code that implements the functionality.
2394 If that isn't possible or available return CODE_FOR_nothing. */
2395
2396 static enum insn_code
2397 interclass_mathfn_icode (tree arg, tree fndecl)
2398 {
2399 bool errno_set = false;
2400 optab builtin_optab = unknown_optab;
2401 enum machine_mode mode;
2402
2403 switch (DECL_FUNCTION_CODE (fndecl))
2404 {
2405 CASE_FLT_FN (BUILT_IN_ILOGB):
2406 errno_set = true; builtin_optab = ilogb_optab; break;
2407 CASE_FLT_FN (BUILT_IN_ISINF):
2408 builtin_optab = isinf_optab; break;
2409 case BUILT_IN_ISNORMAL:
2410 case BUILT_IN_ISFINITE:
2411 CASE_FLT_FN (BUILT_IN_FINITE):
2412 case BUILT_IN_FINITED32:
2413 case BUILT_IN_FINITED64:
2414 case BUILT_IN_FINITED128:
2415 case BUILT_IN_ISINFD32:
2416 case BUILT_IN_ISINFD64:
2417 case BUILT_IN_ISINFD128:
2418 /* These builtins have no optabs (yet). */
2419 break;
2420 default:
2421 gcc_unreachable ();
2422 }
2423
2424 /* There's no easy way to detect the case we need to set EDOM. */
2425 if (flag_errno_math && errno_set)
2426 return CODE_FOR_nothing;
2427
2428 /* Optab mode depends on the mode of the input argument. */
2429 mode = TYPE_MODE (TREE_TYPE (arg));
2430
2431 if (builtin_optab)
2432 return optab_handler (builtin_optab, mode);
2433 return CODE_FOR_nothing;
2434 }
2435
2436 /* Expand a call to one of the builtin math functions that operate on
2437 floating point argument and output an integer result (ilogb, isinf,
2438 isnan, etc).
2439 Return 0 if a normal call should be emitted rather than expanding the
2440 function in-line. EXP is the expression that is a call to the builtin
2441 function; if convenient, the result should be placed in TARGET. */
2442
2443 static rtx
2444 expand_builtin_interclass_mathfn (tree exp, rtx target)
2445 {
2446 enum insn_code icode = CODE_FOR_nothing;
2447 rtx op0;
2448 tree fndecl = get_callee_fndecl (exp);
2449 enum machine_mode mode;
2450 tree arg;
2451
2452 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2453 return NULL_RTX;
2454
2455 arg = CALL_EXPR_ARG (exp, 0);
2456 icode = interclass_mathfn_icode (arg, fndecl);
2457 mode = TYPE_MODE (TREE_TYPE (arg));
2458
2459 if (icode != CODE_FOR_nothing)
2460 {
2461 struct expand_operand ops[1];
2462 rtx last = get_last_insn ();
2463 tree orig_arg = arg;
2464
2465 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2466 need to expand the argument again. This way, we will not perform
2467 side-effects more the once. */
2468 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2469
2470 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2471
2472 if (mode != GET_MODE (op0))
2473 op0 = convert_to_mode (mode, op0, 0);
2474
2475 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2476 if (maybe_legitimize_operands (icode, 0, 1, ops)
2477 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2478 return ops[0].value;
2479
2480 delete_insns_since (last);
2481 CALL_EXPR_ARG (exp, 0) = orig_arg;
2482 }
2483
2484 return NULL_RTX;
2485 }
2486
2487 /* Expand a call to the builtin sincos math function.
2488 Return NULL_RTX if a normal call should be emitted rather than expanding the
2489 function in-line. EXP is the expression that is a call to the builtin
2490 function. */
2491
2492 static rtx
2493 expand_builtin_sincos (tree exp)
2494 {
2495 rtx op0, op1, op2, target1, target2;
2496 enum machine_mode mode;
2497 tree arg, sinp, cosp;
2498 int result;
2499 location_t loc = EXPR_LOCATION (exp);
2500 tree alias_type, alias_off;
2501
2502 if (!validate_arglist (exp, REAL_TYPE,
2503 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2504 return NULL_RTX;
2505
2506 arg = CALL_EXPR_ARG (exp, 0);
2507 sinp = CALL_EXPR_ARG (exp, 1);
2508 cosp = CALL_EXPR_ARG (exp, 2);
2509
2510 /* Make a suitable register to place result in. */
2511 mode = TYPE_MODE (TREE_TYPE (arg));
2512
2513 /* Check if sincos insn is available, otherwise emit the call. */
2514 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2515 return NULL_RTX;
2516
2517 target1 = gen_reg_rtx (mode);
2518 target2 = gen_reg_rtx (mode);
2519
2520 op0 = expand_normal (arg);
2521 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2522 alias_off = build_int_cst (alias_type, 0);
2523 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2524 sinp, alias_off));
2525 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2526 cosp, alias_off));
2527
2528 /* Compute into target1 and target2.
2529 Set TARGET to wherever the result comes back. */
2530 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2531 gcc_assert (result);
2532
2533 /* Move target1 and target2 to the memory locations indicated
2534 by op1 and op2. */
2535 emit_move_insn (op1, target1);
2536 emit_move_insn (op2, target2);
2537
2538 return const0_rtx;
2539 }
2540
2541 /* Expand a call to the internal cexpi builtin to the sincos math function.
2542 EXP is the expression that is a call to the builtin function; if convenient,
2543 the result should be placed in TARGET. */
2544
2545 static rtx
2546 expand_builtin_cexpi (tree exp, rtx target)
2547 {
2548 tree fndecl = get_callee_fndecl (exp);
2549 tree arg, type;
2550 enum machine_mode mode;
2551 rtx op0, op1, op2;
2552 location_t loc = EXPR_LOCATION (exp);
2553
2554 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2555 return NULL_RTX;
2556
2557 arg = CALL_EXPR_ARG (exp, 0);
2558 type = TREE_TYPE (arg);
2559 mode = TYPE_MODE (TREE_TYPE (arg));
2560
2561 /* Try expanding via a sincos optab, fall back to emitting a libcall
2562 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2563 is only generated from sincos, cexp or if we have either of them. */
2564 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2565 {
2566 op1 = gen_reg_rtx (mode);
2567 op2 = gen_reg_rtx (mode);
2568
2569 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2570
2571 /* Compute into op1 and op2. */
2572 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2573 }
2574 else if (targetm.libc_has_function (function_sincos))
2575 {
2576 tree call, fn = NULL_TREE;
2577 tree top1, top2;
2578 rtx op1a, op2a;
2579
2580 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2581 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2582 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2583 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2584 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2585 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2586 else
2587 gcc_unreachable ();
2588
2589 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2590 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2591 op1a = copy_addr_to_reg (XEXP (op1, 0));
2592 op2a = copy_addr_to_reg (XEXP (op2, 0));
2593 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2594 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2595
2596 /* Make sure not to fold the sincos call again. */
2597 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2598 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2599 call, 3, arg, top1, top2));
2600 }
2601 else
2602 {
2603 tree call, fn = NULL_TREE, narg;
2604 tree ctype = build_complex_type (type);
2605
2606 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2607 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2608 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2609 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2610 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2611 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2612 else
2613 gcc_unreachable ();
2614
2615 /* If we don't have a decl for cexp create one. This is the
2616 friendliest fallback if the user calls __builtin_cexpi
2617 without full target C99 function support. */
2618 if (fn == NULL_TREE)
2619 {
2620 tree fntype;
2621 const char *name = NULL;
2622
2623 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2624 name = "cexpf";
2625 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2626 name = "cexp";
2627 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2628 name = "cexpl";
2629
2630 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2631 fn = build_fn_decl (name, fntype);
2632 }
2633
2634 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2635 build_real (type, dconst0), arg);
2636
2637 /* Make sure not to fold the cexp call again. */
2638 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2639 return expand_expr (build_call_nary (ctype, call, 1, narg),
2640 target, VOIDmode, EXPAND_NORMAL);
2641 }
2642
2643 /* Now build the proper return type. */
2644 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2645 make_tree (TREE_TYPE (arg), op2),
2646 make_tree (TREE_TYPE (arg), op1)),
2647 target, VOIDmode, EXPAND_NORMAL);
2648 }
2649
2650 /* Conveniently construct a function call expression. FNDECL names the
2651 function to be called, N is the number of arguments, and the "..."
2652 parameters are the argument expressions. Unlike build_call_exr
2653 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2654
2655 static tree
2656 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2657 {
2658 va_list ap;
2659 tree fntype = TREE_TYPE (fndecl);
2660 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2661
2662 va_start (ap, n);
2663 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2664 va_end (ap);
2665 SET_EXPR_LOCATION (fn, loc);
2666 return fn;
2667 }
2668
2669 /* Expand a call to one of the builtin rounding functions gcc defines
2670 as an extension (lfloor and lceil). As these are gcc extensions we
2671 do not need to worry about setting errno to EDOM.
2672 If expanding via optab fails, lower expression to (int)(floor(x)).
2673 EXP is the expression that is a call to the builtin function;
2674 if convenient, the result should be placed in TARGET. */
2675
2676 static rtx
2677 expand_builtin_int_roundingfn (tree exp, rtx target)
2678 {
2679 convert_optab builtin_optab;
2680 rtx op0, insns, tmp;
2681 tree fndecl = get_callee_fndecl (exp);
2682 enum built_in_function fallback_fn;
2683 tree fallback_fndecl;
2684 enum machine_mode mode;
2685 tree arg;
2686
2687 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2688 gcc_unreachable ();
2689
2690 arg = CALL_EXPR_ARG (exp, 0);
2691
2692 switch (DECL_FUNCTION_CODE (fndecl))
2693 {
2694 CASE_FLT_FN (BUILT_IN_ICEIL):
2695 CASE_FLT_FN (BUILT_IN_LCEIL):
2696 CASE_FLT_FN (BUILT_IN_LLCEIL):
2697 builtin_optab = lceil_optab;
2698 fallback_fn = BUILT_IN_CEIL;
2699 break;
2700
2701 CASE_FLT_FN (BUILT_IN_IFLOOR):
2702 CASE_FLT_FN (BUILT_IN_LFLOOR):
2703 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2704 builtin_optab = lfloor_optab;
2705 fallback_fn = BUILT_IN_FLOOR;
2706 break;
2707
2708 default:
2709 gcc_unreachable ();
2710 }
2711
2712 /* Make a suitable register to place result in. */
2713 mode = TYPE_MODE (TREE_TYPE (exp));
2714
2715 target = gen_reg_rtx (mode);
2716
2717 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2718 need to expand the argument again. This way, we will not perform
2719 side-effects more the once. */
2720 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2721
2722 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2723
2724 start_sequence ();
2725
2726 /* Compute into TARGET. */
2727 if (expand_sfix_optab (target, op0, builtin_optab))
2728 {
2729 /* Output the entire sequence. */
2730 insns = get_insns ();
2731 end_sequence ();
2732 emit_insn (insns);
2733 return target;
2734 }
2735
2736 /* If we were unable to expand via the builtin, stop the sequence
2737 (without outputting the insns). */
2738 end_sequence ();
2739
2740 /* Fall back to floating point rounding optab. */
2741 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2742
2743 /* For non-C99 targets we may end up without a fallback fndecl here
2744 if the user called __builtin_lfloor directly. In this case emit
2745 a call to the floor/ceil variants nevertheless. This should result
2746 in the best user experience for not full C99 targets. */
2747 if (fallback_fndecl == NULL_TREE)
2748 {
2749 tree fntype;
2750 const char *name = NULL;
2751
2752 switch (DECL_FUNCTION_CODE (fndecl))
2753 {
2754 case BUILT_IN_ICEIL:
2755 case BUILT_IN_LCEIL:
2756 case BUILT_IN_LLCEIL:
2757 name = "ceil";
2758 break;
2759 case BUILT_IN_ICEILF:
2760 case BUILT_IN_LCEILF:
2761 case BUILT_IN_LLCEILF:
2762 name = "ceilf";
2763 break;
2764 case BUILT_IN_ICEILL:
2765 case BUILT_IN_LCEILL:
2766 case BUILT_IN_LLCEILL:
2767 name = "ceill";
2768 break;
2769 case BUILT_IN_IFLOOR:
2770 case BUILT_IN_LFLOOR:
2771 case BUILT_IN_LLFLOOR:
2772 name = "floor";
2773 break;
2774 case BUILT_IN_IFLOORF:
2775 case BUILT_IN_LFLOORF:
2776 case BUILT_IN_LLFLOORF:
2777 name = "floorf";
2778 break;
2779 case BUILT_IN_IFLOORL:
2780 case BUILT_IN_LFLOORL:
2781 case BUILT_IN_LLFLOORL:
2782 name = "floorl";
2783 break;
2784 default:
2785 gcc_unreachable ();
2786 }
2787
2788 fntype = build_function_type_list (TREE_TYPE (arg),
2789 TREE_TYPE (arg), NULL_TREE);
2790 fallback_fndecl = build_fn_decl (name, fntype);
2791 }
2792
2793 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2794
2795 tmp = expand_normal (exp);
2796 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2797
2798 /* Truncate the result of floating point optab to integer
2799 via expand_fix (). */
2800 target = gen_reg_rtx (mode);
2801 expand_fix (target, tmp, 0);
2802
2803 return target;
2804 }
2805
2806 /* Expand a call to one of the builtin math functions doing integer
2807 conversion (lrint).
2808 Return 0 if a normal call should be emitted rather than expanding the
2809 function in-line. EXP is the expression that is a call to the builtin
2810 function; if convenient, the result should be placed in TARGET. */
2811
2812 static rtx
2813 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2814 {
2815 convert_optab builtin_optab;
2816 rtx op0, insns;
2817 tree fndecl = get_callee_fndecl (exp);
2818 tree arg;
2819 enum machine_mode mode;
2820 enum built_in_function fallback_fn = BUILT_IN_NONE;
2821
2822 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2823 gcc_unreachable ();
2824
2825 arg = CALL_EXPR_ARG (exp, 0);
2826
2827 switch (DECL_FUNCTION_CODE (fndecl))
2828 {
2829 CASE_FLT_FN (BUILT_IN_IRINT):
2830 fallback_fn = BUILT_IN_LRINT;
2831 /* FALLTHRU */
2832 CASE_FLT_FN (BUILT_IN_LRINT):
2833 CASE_FLT_FN (BUILT_IN_LLRINT):
2834 builtin_optab = lrint_optab;
2835 break;
2836
2837 CASE_FLT_FN (BUILT_IN_IROUND):
2838 fallback_fn = BUILT_IN_LROUND;
2839 /* FALLTHRU */
2840 CASE_FLT_FN (BUILT_IN_LROUND):
2841 CASE_FLT_FN (BUILT_IN_LLROUND):
2842 builtin_optab = lround_optab;
2843 break;
2844
2845 default:
2846 gcc_unreachable ();
2847 }
2848
2849 /* There's no easy way to detect the case we need to set EDOM. */
2850 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2851 return NULL_RTX;
2852
2853 /* Make a suitable register to place result in. */
2854 mode = TYPE_MODE (TREE_TYPE (exp));
2855
2856 /* There's no easy way to detect the case we need to set EDOM. */
2857 if (!flag_errno_math)
2858 {
2859 rtx result = gen_reg_rtx (mode);
2860
2861 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2862 need to expand the argument again. This way, we will not perform
2863 side-effects more the once. */
2864 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2865
2866 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2867
2868 start_sequence ();
2869
2870 if (expand_sfix_optab (result, op0, builtin_optab))
2871 {
2872 /* Output the entire sequence. */
2873 insns = get_insns ();
2874 end_sequence ();
2875 emit_insn (insns);
2876 return result;
2877 }
2878
2879 /* If we were unable to expand via the builtin, stop the sequence
2880 (without outputting the insns) and call to the library function
2881 with the stabilized argument list. */
2882 end_sequence ();
2883 }
2884
2885 if (fallback_fn != BUILT_IN_NONE)
2886 {
2887 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2888 targets, (int) round (x) should never be transformed into
2889 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2890 a call to lround in the hope that the target provides at least some
2891 C99 functions. This should result in the best user experience for
2892 not full C99 targets. */
2893 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2894 fallback_fn, 0);
2895
2896 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2897 fallback_fndecl, 1, arg);
2898
2899 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2900 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2901 return convert_to_mode (mode, target, 0);
2902 }
2903
2904 return expand_call (exp, target, target == const0_rtx);
2905 }
2906
2907 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2908 a normal call should be emitted rather than expanding the function
2909 in-line. EXP is the expression that is a call to the builtin
2910 function; if convenient, the result should be placed in TARGET. */
2911
2912 static rtx
2913 expand_builtin_powi (tree exp, rtx target)
2914 {
2915 tree arg0, arg1;
2916 rtx op0, op1;
2917 enum machine_mode mode;
2918 enum machine_mode mode2;
2919
2920 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2921 return NULL_RTX;
2922
2923 arg0 = CALL_EXPR_ARG (exp, 0);
2924 arg1 = CALL_EXPR_ARG (exp, 1);
2925 mode = TYPE_MODE (TREE_TYPE (exp));
2926
2927 /* Emit a libcall to libgcc. */
2928
2929 /* Mode of the 2nd argument must match that of an int. */
2930 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2931
2932 if (target == NULL_RTX)
2933 target = gen_reg_rtx (mode);
2934
2935 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2936 if (GET_MODE (op0) != mode)
2937 op0 = convert_to_mode (mode, op0, 0);
2938 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2939 if (GET_MODE (op1) != mode2)
2940 op1 = convert_to_mode (mode2, op1, 0);
2941
2942 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2943 target, LCT_CONST, mode, 2,
2944 op0, mode, op1, mode2);
2945
2946 return target;
2947 }
2948
2949 /* Expand expression EXP which is a call to the strlen builtin. Return
2950 NULL_RTX if we failed the caller should emit a normal call, otherwise
2951 try to get the result in TARGET, if convenient. */
2952
2953 static rtx
2954 expand_builtin_strlen (tree exp, rtx target,
2955 enum machine_mode target_mode)
2956 {
2957 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2958 return NULL_RTX;
2959 else
2960 {
2961 struct expand_operand ops[4];
2962 rtx pat;
2963 tree len;
2964 tree src = CALL_EXPR_ARG (exp, 0);
2965 rtx src_reg, before_strlen;
2966 enum machine_mode insn_mode = target_mode;
2967 enum insn_code icode = CODE_FOR_nothing;
2968 unsigned int align;
2969
2970 /* If the length can be computed at compile-time, return it. */
2971 len = c_strlen (src, 0);
2972 if (len)
2973 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2974
2975 /* If the length can be computed at compile-time and is constant
2976 integer, but there are side-effects in src, evaluate
2977 src for side-effects, then return len.
2978 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2979 can be optimized into: i++; x = 3; */
2980 len = c_strlen (src, 1);
2981 if (len && TREE_CODE (len) == INTEGER_CST)
2982 {
2983 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2984 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2985 }
2986
2987 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2988
2989 /* If SRC is not a pointer type, don't do this operation inline. */
2990 if (align == 0)
2991 return NULL_RTX;
2992
2993 /* Bail out if we can't compute strlen in the right mode. */
2994 while (insn_mode != VOIDmode)
2995 {
2996 icode = optab_handler (strlen_optab, insn_mode);
2997 if (icode != CODE_FOR_nothing)
2998 break;
2999
3000 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3001 }
3002 if (insn_mode == VOIDmode)
3003 return NULL_RTX;
3004
3005 /* Make a place to hold the source address. We will not expand
3006 the actual source until we are sure that the expansion will
3007 not fail -- there are trees that cannot be expanded twice. */
3008 src_reg = gen_reg_rtx (Pmode);
3009
3010 /* Mark the beginning of the strlen sequence so we can emit the
3011 source operand later. */
3012 before_strlen = get_last_insn ();
3013
3014 create_output_operand (&ops[0], target, insn_mode);
3015 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3016 create_integer_operand (&ops[2], 0);
3017 create_integer_operand (&ops[3], align);
3018 if (!maybe_expand_insn (icode, 4, ops))
3019 return NULL_RTX;
3020
3021 /* Now that we are assured of success, expand the source. */
3022 start_sequence ();
3023 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3024 if (pat != src_reg)
3025 {
3026 #ifdef POINTERS_EXTEND_UNSIGNED
3027 if (GET_MODE (pat) != Pmode)
3028 pat = convert_to_mode (Pmode, pat,
3029 POINTERS_EXTEND_UNSIGNED);
3030 #endif
3031 emit_move_insn (src_reg, pat);
3032 }
3033 pat = get_insns ();
3034 end_sequence ();
3035
3036 if (before_strlen)
3037 emit_insn_after (pat, before_strlen);
3038 else
3039 emit_insn_before (pat, get_insns ());
3040
3041 /* Return the value in the proper mode for this function. */
3042 if (GET_MODE (ops[0].value) == target_mode)
3043 target = ops[0].value;
3044 else if (target != 0)
3045 convert_move (target, ops[0].value, 0);
3046 else
3047 target = convert_to_mode (target_mode, ops[0].value, 0);
3048
3049 return target;
3050 }
3051 }
3052
3053 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3054 bytes from constant string DATA + OFFSET and return it as target
3055 constant. */
3056
3057 static rtx
3058 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3059 enum machine_mode mode)
3060 {
3061 const char *str = (const char *) data;
3062
3063 gcc_assert (offset >= 0
3064 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3065 <= strlen (str) + 1));
3066
3067 return c_readstr (str + offset, mode);
3068 }
3069
3070 /* Expand a call EXP to the memcpy builtin.
3071 Return NULL_RTX if we failed, the caller should emit a normal call,
3072 otherwise try to get the result in TARGET, if convenient (and in
3073 mode MODE if that's convenient). */
3074
3075 static rtx
3076 expand_builtin_memcpy (tree exp, rtx target)
3077 {
3078 if (!validate_arglist (exp,
3079 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3080 return NULL_RTX;
3081 else
3082 {
3083 tree dest = CALL_EXPR_ARG (exp, 0);
3084 tree src = CALL_EXPR_ARG (exp, 1);
3085 tree len = CALL_EXPR_ARG (exp, 2);
3086 const char *src_str;
3087 unsigned int src_align = get_pointer_alignment (src);
3088 unsigned int dest_align = get_pointer_alignment (dest);
3089 rtx dest_mem, src_mem, dest_addr, len_rtx;
3090 HOST_WIDE_INT expected_size = -1;
3091 unsigned int expected_align = 0;
3092
3093 /* If DEST is not a pointer type, call the normal function. */
3094 if (dest_align == 0)
3095 return NULL_RTX;
3096
3097 /* If either SRC is not a pointer type, don't do this
3098 operation in-line. */
3099 if (src_align == 0)
3100 return NULL_RTX;
3101
3102 if (currently_expanding_gimple_stmt)
3103 stringop_block_profile (currently_expanding_gimple_stmt,
3104 &expected_align, &expected_size);
3105
3106 if (expected_align < dest_align)
3107 expected_align = dest_align;
3108 dest_mem = get_memory_rtx (dest, len);
3109 set_mem_align (dest_mem, dest_align);
3110 len_rtx = expand_normal (len);
3111 src_str = c_getstr (src);
3112
3113 /* If SRC is a string constant and block move would be done
3114 by pieces, we can avoid loading the string from memory
3115 and only stored the computed constants. */
3116 if (src_str
3117 && CONST_INT_P (len_rtx)
3118 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3119 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3120 CONST_CAST (char *, src_str),
3121 dest_align, false))
3122 {
3123 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3124 builtin_memcpy_read_str,
3125 CONST_CAST (char *, src_str),
3126 dest_align, false, 0);
3127 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3128 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3129 return dest_mem;
3130 }
3131
3132 src_mem = get_memory_rtx (src, len);
3133 set_mem_align (src_mem, src_align);
3134
3135 /* Copy word part most expediently. */
3136 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3137 CALL_EXPR_TAILCALL (exp)
3138 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3139 expected_align, expected_size);
3140
3141 if (dest_addr == 0)
3142 {
3143 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3144 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3145 }
3146 return dest_addr;
3147 }
3148 }
3149
3150 /* Expand a call EXP to the mempcpy builtin.
3151 Return NULL_RTX if we failed; the caller should emit a normal call,
3152 otherwise try to get the result in TARGET, if convenient (and in
3153 mode MODE if that's convenient). If ENDP is 0 return the
3154 destination pointer, if ENDP is 1 return the end pointer ala
3155 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3156 stpcpy. */
3157
3158 static rtx
3159 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3160 {
3161 if (!validate_arglist (exp,
3162 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3163 return NULL_RTX;
3164 else
3165 {
3166 tree dest = CALL_EXPR_ARG (exp, 0);
3167 tree src = CALL_EXPR_ARG (exp, 1);
3168 tree len = CALL_EXPR_ARG (exp, 2);
3169 return expand_builtin_mempcpy_args (dest, src, len,
3170 target, mode, /*endp=*/ 1);
3171 }
3172 }
3173
3174 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3175 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3176 so that this can also be called without constructing an actual CALL_EXPR.
3177 The other arguments and return value are the same as for
3178 expand_builtin_mempcpy. */
3179
3180 static rtx
3181 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3182 rtx target, enum machine_mode mode, int endp)
3183 {
3184 /* If return value is ignored, transform mempcpy into memcpy. */
3185 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3186 {
3187 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3188 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3189 dest, src, len);
3190 return expand_expr (result, target, mode, EXPAND_NORMAL);
3191 }
3192 else
3193 {
3194 const char *src_str;
3195 unsigned int src_align = get_pointer_alignment (src);
3196 unsigned int dest_align = get_pointer_alignment (dest);
3197 rtx dest_mem, src_mem, len_rtx;
3198
3199 /* If either SRC or DEST is not a pointer type, don't do this
3200 operation in-line. */
3201 if (dest_align == 0 || src_align == 0)
3202 return NULL_RTX;
3203
3204 /* If LEN is not constant, call the normal function. */
3205 if (! host_integerp (len, 1))
3206 return NULL_RTX;
3207
3208 len_rtx = expand_normal (len);
3209 src_str = c_getstr (src);
3210
3211 /* If SRC is a string constant and block move would be done
3212 by pieces, we can avoid loading the string from memory
3213 and only stored the computed constants. */
3214 if (src_str
3215 && CONST_INT_P (len_rtx)
3216 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3217 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3218 CONST_CAST (char *, src_str),
3219 dest_align, false))
3220 {
3221 dest_mem = get_memory_rtx (dest, len);
3222 set_mem_align (dest_mem, dest_align);
3223 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3224 builtin_memcpy_read_str,
3225 CONST_CAST (char *, src_str),
3226 dest_align, false, endp);
3227 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3228 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3229 return dest_mem;
3230 }
3231
3232 if (CONST_INT_P (len_rtx)
3233 && can_move_by_pieces (INTVAL (len_rtx),
3234 MIN (dest_align, src_align)))
3235 {
3236 dest_mem = get_memory_rtx (dest, len);
3237 set_mem_align (dest_mem, dest_align);
3238 src_mem = get_memory_rtx (src, len);
3239 set_mem_align (src_mem, src_align);
3240 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3241 MIN (dest_align, src_align), endp);
3242 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3243 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3244 return dest_mem;
3245 }
3246
3247 return NULL_RTX;
3248 }
3249 }
3250
3251 #ifndef HAVE_movstr
3252 # define HAVE_movstr 0
3253 # define CODE_FOR_movstr CODE_FOR_nothing
3254 #endif
3255
3256 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3257 we failed, the caller should emit a normal call, otherwise try to
3258 get the result in TARGET, if convenient. If ENDP is 0 return the
3259 destination pointer, if ENDP is 1 return the end pointer ala
3260 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3261 stpcpy. */
3262
3263 static rtx
3264 expand_movstr (tree dest, tree src, rtx target, int endp)
3265 {
3266 struct expand_operand ops[3];
3267 rtx dest_mem;
3268 rtx src_mem;
3269
3270 if (!HAVE_movstr)
3271 return NULL_RTX;
3272
3273 dest_mem = get_memory_rtx (dest, NULL);
3274 src_mem = get_memory_rtx (src, NULL);
3275 if (!endp)
3276 {
3277 target = force_reg (Pmode, XEXP (dest_mem, 0));
3278 dest_mem = replace_equiv_address (dest_mem, target);
3279 }
3280
3281 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3282 create_fixed_operand (&ops[1], dest_mem);
3283 create_fixed_operand (&ops[2], src_mem);
3284 expand_insn (CODE_FOR_movstr, 3, ops);
3285
3286 if (endp && target != const0_rtx)
3287 {
3288 target = ops[0].value;
3289 /* movstr is supposed to set end to the address of the NUL
3290 terminator. If the caller requested a mempcpy-like return value,
3291 adjust it. */
3292 if (endp == 1)
3293 {
3294 rtx tem = plus_constant (GET_MODE (target),
3295 gen_lowpart (GET_MODE (target), target), 1);
3296 emit_move_insn (target, force_operand (tem, NULL_RTX));
3297 }
3298 }
3299 return target;
3300 }
3301
3302 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3303 NULL_RTX if we failed the caller should emit a normal call, otherwise
3304 try to get the result in TARGET, if convenient (and in mode MODE if that's
3305 convenient). */
3306
3307 static rtx
3308 expand_builtin_strcpy (tree exp, rtx target)
3309 {
3310 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3311 {
3312 tree dest = CALL_EXPR_ARG (exp, 0);
3313 tree src = CALL_EXPR_ARG (exp, 1);
3314 return expand_builtin_strcpy_args (dest, src, target);
3315 }
3316 return NULL_RTX;
3317 }
3318
3319 /* Helper function to do the actual work for expand_builtin_strcpy. The
3320 arguments to the builtin_strcpy call DEST and SRC are broken out
3321 so that this can also be called without constructing an actual CALL_EXPR.
3322 The other arguments and return value are the same as for
3323 expand_builtin_strcpy. */
3324
3325 static rtx
3326 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3327 {
3328 return expand_movstr (dest, src, target, /*endp=*/0);
3329 }
3330
3331 /* Expand a call EXP to the stpcpy builtin.
3332 Return NULL_RTX if we failed the caller should emit a normal call,
3333 otherwise try to get the result in TARGET, if convenient (and in
3334 mode MODE if that's convenient). */
3335
3336 static rtx
3337 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3338 {
3339 tree dst, src;
3340 location_t loc = EXPR_LOCATION (exp);
3341
3342 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3343 return NULL_RTX;
3344
3345 dst = CALL_EXPR_ARG (exp, 0);
3346 src = CALL_EXPR_ARG (exp, 1);
3347
3348 /* If return value is ignored, transform stpcpy into strcpy. */
3349 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3350 {
3351 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3352 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3353 return expand_expr (result, target, mode, EXPAND_NORMAL);
3354 }
3355 else
3356 {
3357 tree len, lenp1;
3358 rtx ret;
3359
3360 /* Ensure we get an actual string whose length can be evaluated at
3361 compile-time, not an expression containing a string. This is
3362 because the latter will potentially produce pessimized code
3363 when used to produce the return value. */
3364 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3365 return expand_movstr (dst, src, target, /*endp=*/2);
3366
3367 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3368 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3369 target, mode, /*endp=*/2);
3370
3371 if (ret)
3372 return ret;
3373
3374 if (TREE_CODE (len) == INTEGER_CST)
3375 {
3376 rtx len_rtx = expand_normal (len);
3377
3378 if (CONST_INT_P (len_rtx))
3379 {
3380 ret = expand_builtin_strcpy_args (dst, src, target);
3381
3382 if (ret)
3383 {
3384 if (! target)
3385 {
3386 if (mode != VOIDmode)
3387 target = gen_reg_rtx (mode);
3388 else
3389 target = gen_reg_rtx (GET_MODE (ret));
3390 }
3391 if (GET_MODE (target) != GET_MODE (ret))
3392 ret = gen_lowpart (GET_MODE (target), ret);
3393
3394 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3395 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3396 gcc_assert (ret);
3397
3398 return target;
3399 }
3400 }
3401 }
3402
3403 return expand_movstr (dst, src, target, /*endp=*/2);
3404 }
3405 }
3406
3407 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3408 bytes from constant string DATA + OFFSET and return it as target
3409 constant. */
3410
3411 rtx
3412 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3413 enum machine_mode mode)
3414 {
3415 const char *str = (const char *) data;
3416
3417 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3418 return const0_rtx;
3419
3420 return c_readstr (str + offset, mode);
3421 }
3422
3423 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3424 NULL_RTX if we failed the caller should emit a normal call. */
3425
3426 static rtx
3427 expand_builtin_strncpy (tree exp, rtx target)
3428 {
3429 location_t loc = EXPR_LOCATION (exp);
3430
3431 if (validate_arglist (exp,
3432 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3433 {
3434 tree dest = CALL_EXPR_ARG (exp, 0);
3435 tree src = CALL_EXPR_ARG (exp, 1);
3436 tree len = CALL_EXPR_ARG (exp, 2);
3437 tree slen = c_strlen (src, 1);
3438
3439 /* We must be passed a constant len and src parameter. */
3440 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3441 return NULL_RTX;
3442
3443 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3444
3445 /* We're required to pad with trailing zeros if the requested
3446 len is greater than strlen(s2)+1. In that case try to
3447 use store_by_pieces, if it fails, punt. */
3448 if (tree_int_cst_lt (slen, len))
3449 {
3450 unsigned int dest_align = get_pointer_alignment (dest);
3451 const char *p = c_getstr (src);
3452 rtx dest_mem;
3453
3454 if (!p || dest_align == 0 || !host_integerp (len, 1)
3455 || !can_store_by_pieces (tree_low_cst (len, 1),
3456 builtin_strncpy_read_str,
3457 CONST_CAST (char *, p),
3458 dest_align, false))
3459 return NULL_RTX;
3460
3461 dest_mem = get_memory_rtx (dest, len);
3462 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3463 builtin_strncpy_read_str,
3464 CONST_CAST (char *, p), dest_align, false, 0);
3465 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3466 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3467 return dest_mem;
3468 }
3469 }
3470 return NULL_RTX;
3471 }
3472
3473 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3474 bytes from constant string DATA + OFFSET and return it as target
3475 constant. */
3476
3477 rtx
3478 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3479 enum machine_mode mode)
3480 {
3481 const char *c = (const char *) data;
3482 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3483
3484 memset (p, *c, GET_MODE_SIZE (mode));
3485
3486 return c_readstr (p, mode);
3487 }
3488
3489 /* Callback routine for store_by_pieces. Return the RTL of a register
3490 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3491 char value given in the RTL register data. For example, if mode is
3492 4 bytes wide, return the RTL for 0x01010101*data. */
3493
3494 static rtx
3495 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3496 enum machine_mode mode)
3497 {
3498 rtx target, coeff;
3499 size_t size;
3500 char *p;
3501
3502 size = GET_MODE_SIZE (mode);
3503 if (size == 1)
3504 return (rtx) data;
3505
3506 p = XALLOCAVEC (char, size);
3507 memset (p, 1, size);
3508 coeff = c_readstr (p, mode);
3509
3510 target = convert_to_mode (mode, (rtx) data, 1);
3511 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3512 return force_reg (mode, target);
3513 }
3514
3515 /* Expand expression EXP, which is a call to the memset builtin. Return
3516 NULL_RTX if we failed the caller should emit a normal call, otherwise
3517 try to get the result in TARGET, if convenient (and in mode MODE if that's
3518 convenient). */
3519
3520 static rtx
3521 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3522 {
3523 if (!validate_arglist (exp,
3524 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3525 return NULL_RTX;
3526 else
3527 {
3528 tree dest = CALL_EXPR_ARG (exp, 0);
3529 tree val = CALL_EXPR_ARG (exp, 1);
3530 tree len = CALL_EXPR_ARG (exp, 2);
3531 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3532 }
3533 }
3534
3535 /* Helper function to do the actual work for expand_builtin_memset. The
3536 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3537 so that this can also be called without constructing an actual CALL_EXPR.
3538 The other arguments and return value are the same as for
3539 expand_builtin_memset. */
3540
3541 static rtx
3542 expand_builtin_memset_args (tree dest, tree val, tree len,
3543 rtx target, enum machine_mode mode, tree orig_exp)
3544 {
3545 tree fndecl, fn;
3546 enum built_in_function fcode;
3547 enum machine_mode val_mode;
3548 char c;
3549 unsigned int dest_align;
3550 rtx dest_mem, dest_addr, len_rtx;
3551 HOST_WIDE_INT expected_size = -1;
3552 unsigned int expected_align = 0;
3553
3554 dest_align = get_pointer_alignment (dest);
3555
3556 /* If DEST is not a pointer type, don't do this operation in-line. */
3557 if (dest_align == 0)
3558 return NULL_RTX;
3559
3560 if (currently_expanding_gimple_stmt)
3561 stringop_block_profile (currently_expanding_gimple_stmt,
3562 &expected_align, &expected_size);
3563
3564 if (expected_align < dest_align)
3565 expected_align = dest_align;
3566
3567 /* If the LEN parameter is zero, return DEST. */
3568 if (integer_zerop (len))
3569 {
3570 /* Evaluate and ignore VAL in case it has side-effects. */
3571 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3572 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3573 }
3574
3575 /* Stabilize the arguments in case we fail. */
3576 dest = builtin_save_expr (dest);
3577 val = builtin_save_expr (val);
3578 len = builtin_save_expr (len);
3579
3580 len_rtx = expand_normal (len);
3581 dest_mem = get_memory_rtx (dest, len);
3582 val_mode = TYPE_MODE (unsigned_char_type_node);
3583
3584 if (TREE_CODE (val) != INTEGER_CST)
3585 {
3586 rtx val_rtx;
3587
3588 val_rtx = expand_normal (val);
3589 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3590
3591 /* Assume that we can memset by pieces if we can store
3592 * the coefficients by pieces (in the required modes).
3593 * We can't pass builtin_memset_gen_str as that emits RTL. */
3594 c = 1;
3595 if (host_integerp (len, 1)
3596 && can_store_by_pieces (tree_low_cst (len, 1),
3597 builtin_memset_read_str, &c, dest_align,
3598 true))
3599 {
3600 val_rtx = force_reg (val_mode, val_rtx);
3601 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3602 builtin_memset_gen_str, val_rtx, dest_align,
3603 true, 0);
3604 }
3605 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3606 dest_align, expected_align,
3607 expected_size))
3608 goto do_libcall;
3609
3610 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3611 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3612 return dest_mem;
3613 }
3614
3615 if (target_char_cast (val, &c))
3616 goto do_libcall;
3617
3618 if (c)
3619 {
3620 if (host_integerp (len, 1)
3621 && can_store_by_pieces (tree_low_cst (len, 1),
3622 builtin_memset_read_str, &c, dest_align,
3623 true))
3624 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3625 builtin_memset_read_str, &c, dest_align, true, 0);
3626 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3627 gen_int_mode (c, val_mode),
3628 dest_align, expected_align,
3629 expected_size))
3630 goto do_libcall;
3631
3632 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3633 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3634 return dest_mem;
3635 }
3636
3637 set_mem_align (dest_mem, dest_align);
3638 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3639 CALL_EXPR_TAILCALL (orig_exp)
3640 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3641 expected_align, expected_size);
3642
3643 if (dest_addr == 0)
3644 {
3645 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3646 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3647 }
3648
3649 return dest_addr;
3650
3651 do_libcall:
3652 fndecl = get_callee_fndecl (orig_exp);
3653 fcode = DECL_FUNCTION_CODE (fndecl);
3654 if (fcode == BUILT_IN_MEMSET)
3655 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3656 dest, val, len);
3657 else if (fcode == BUILT_IN_BZERO)
3658 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3659 dest, len);
3660 else
3661 gcc_unreachable ();
3662 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3663 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3664 return expand_call (fn, target, target == const0_rtx);
3665 }
3666
3667 /* Expand expression EXP, which is a call to the bzero builtin. Return
3668 NULL_RTX if we failed the caller should emit a normal call. */
3669
3670 static rtx
3671 expand_builtin_bzero (tree exp)
3672 {
3673 tree dest, size;
3674 location_t loc = EXPR_LOCATION (exp);
3675
3676 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3677 return NULL_RTX;
3678
3679 dest = CALL_EXPR_ARG (exp, 0);
3680 size = CALL_EXPR_ARG (exp, 1);
3681
3682 /* New argument list transforming bzero(ptr x, int y) to
3683 memset(ptr x, int 0, size_t y). This is done this way
3684 so that if it isn't expanded inline, we fallback to
3685 calling bzero instead of memset. */
3686
3687 return expand_builtin_memset_args (dest, integer_zero_node,
3688 fold_convert_loc (loc,
3689 size_type_node, size),
3690 const0_rtx, VOIDmode, exp);
3691 }
3692
3693 /* Expand expression EXP, which is a call to the memcmp built-in function.
3694 Return NULL_RTX if we failed and the caller should emit a normal call,
3695 otherwise try to get the result in TARGET, if convenient (and in mode
3696 MODE, if that's convenient). */
3697
3698 static rtx
3699 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3700 ATTRIBUTE_UNUSED enum machine_mode mode)
3701 {
3702 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3703
3704 if (!validate_arglist (exp,
3705 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3706 return NULL_RTX;
3707
3708 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3709 implementing memcmp because it will stop if it encounters two
3710 zero bytes. */
3711 #if defined HAVE_cmpmemsi
3712 {
3713 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3714 rtx result;
3715 rtx insn;
3716 tree arg1 = CALL_EXPR_ARG (exp, 0);
3717 tree arg2 = CALL_EXPR_ARG (exp, 1);
3718 tree len = CALL_EXPR_ARG (exp, 2);
3719
3720 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3721 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3722 enum machine_mode insn_mode;
3723
3724 if (HAVE_cmpmemsi)
3725 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3726 else
3727 return NULL_RTX;
3728
3729 /* If we don't have POINTER_TYPE, call the function. */
3730 if (arg1_align == 0 || arg2_align == 0)
3731 return NULL_RTX;
3732
3733 /* Make a place to write the result of the instruction. */
3734 result = target;
3735 if (! (result != 0
3736 && REG_P (result) && GET_MODE (result) == insn_mode
3737 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3738 result = gen_reg_rtx (insn_mode);
3739
3740 arg1_rtx = get_memory_rtx (arg1, len);
3741 arg2_rtx = get_memory_rtx (arg2, len);
3742 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3743
3744 /* Set MEM_SIZE as appropriate. */
3745 if (CONST_INT_P (arg3_rtx))
3746 {
3747 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3748 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3749 }
3750
3751 if (HAVE_cmpmemsi)
3752 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3753 GEN_INT (MIN (arg1_align, arg2_align)));
3754 else
3755 gcc_unreachable ();
3756
3757 if (insn)
3758 emit_insn (insn);
3759 else
3760 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3761 TYPE_MODE (integer_type_node), 3,
3762 XEXP (arg1_rtx, 0), Pmode,
3763 XEXP (arg2_rtx, 0), Pmode,
3764 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3765 TYPE_UNSIGNED (sizetype)),
3766 TYPE_MODE (sizetype));
3767
3768 /* Return the value in the proper mode for this function. */
3769 mode = TYPE_MODE (TREE_TYPE (exp));
3770 if (GET_MODE (result) == mode)
3771 return result;
3772 else if (target != 0)
3773 {
3774 convert_move (target, result, 0);
3775 return target;
3776 }
3777 else
3778 return convert_to_mode (mode, result, 0);
3779 }
3780 #endif /* HAVE_cmpmemsi. */
3781
3782 return NULL_RTX;
3783 }
3784
3785 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3786 if we failed the caller should emit a normal call, otherwise try to get
3787 the result in TARGET, if convenient. */
3788
3789 static rtx
3790 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3791 {
3792 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3793 return NULL_RTX;
3794
3795 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3796 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3797 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3798 {
3799 rtx arg1_rtx, arg2_rtx;
3800 rtx result, insn = NULL_RTX;
3801 tree fndecl, fn;
3802 tree arg1 = CALL_EXPR_ARG (exp, 0);
3803 tree arg2 = CALL_EXPR_ARG (exp, 1);
3804
3805 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3806 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3807
3808 /* If we don't have POINTER_TYPE, call the function. */
3809 if (arg1_align == 0 || arg2_align == 0)
3810 return NULL_RTX;
3811
3812 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3813 arg1 = builtin_save_expr (arg1);
3814 arg2 = builtin_save_expr (arg2);
3815
3816 arg1_rtx = get_memory_rtx (arg1, NULL);
3817 arg2_rtx = get_memory_rtx (arg2, NULL);
3818
3819 #ifdef HAVE_cmpstrsi
3820 /* Try to call cmpstrsi. */
3821 if (HAVE_cmpstrsi)
3822 {
3823 enum machine_mode insn_mode
3824 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3825
3826 /* Make a place to write the result of the instruction. */
3827 result = target;
3828 if (! (result != 0
3829 && REG_P (result) && GET_MODE (result) == insn_mode
3830 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3831 result = gen_reg_rtx (insn_mode);
3832
3833 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3834 GEN_INT (MIN (arg1_align, arg2_align)));
3835 }
3836 #endif
3837 #ifdef HAVE_cmpstrnsi
3838 /* Try to determine at least one length and call cmpstrnsi. */
3839 if (!insn && HAVE_cmpstrnsi)
3840 {
3841 tree len;
3842 rtx arg3_rtx;
3843
3844 enum machine_mode insn_mode
3845 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3846 tree len1 = c_strlen (arg1, 1);
3847 tree len2 = c_strlen (arg2, 1);
3848
3849 if (len1)
3850 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3851 if (len2)
3852 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3853
3854 /* If we don't have a constant length for the first, use the length
3855 of the second, if we know it. We don't require a constant for
3856 this case; some cost analysis could be done if both are available
3857 but neither is constant. For now, assume they're equally cheap,
3858 unless one has side effects. If both strings have constant lengths,
3859 use the smaller. */
3860
3861 if (!len1)
3862 len = len2;
3863 else if (!len2)
3864 len = len1;
3865 else if (TREE_SIDE_EFFECTS (len1))
3866 len = len2;
3867 else if (TREE_SIDE_EFFECTS (len2))
3868 len = len1;
3869 else if (TREE_CODE (len1) != INTEGER_CST)
3870 len = len2;
3871 else if (TREE_CODE (len2) != INTEGER_CST)
3872 len = len1;
3873 else if (tree_int_cst_lt (len1, len2))
3874 len = len1;
3875 else
3876 len = len2;
3877
3878 /* If both arguments have side effects, we cannot optimize. */
3879 if (!len || TREE_SIDE_EFFECTS (len))
3880 goto do_libcall;
3881
3882 arg3_rtx = expand_normal (len);
3883
3884 /* Make a place to write the result of the instruction. */
3885 result = target;
3886 if (! (result != 0
3887 && REG_P (result) && GET_MODE (result) == insn_mode
3888 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3889 result = gen_reg_rtx (insn_mode);
3890
3891 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3892 GEN_INT (MIN (arg1_align, arg2_align)));
3893 }
3894 #endif
3895
3896 if (insn)
3897 {
3898 enum machine_mode mode;
3899 emit_insn (insn);
3900
3901 /* Return the value in the proper mode for this function. */
3902 mode = TYPE_MODE (TREE_TYPE (exp));
3903 if (GET_MODE (result) == mode)
3904 return result;
3905 if (target == 0)
3906 return convert_to_mode (mode, result, 0);
3907 convert_move (target, result, 0);
3908 return target;
3909 }
3910
3911 /* Expand the library call ourselves using a stabilized argument
3912 list to avoid re-evaluating the function's arguments twice. */
3913 #ifdef HAVE_cmpstrnsi
3914 do_libcall:
3915 #endif
3916 fndecl = get_callee_fndecl (exp);
3917 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3918 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3919 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3920 return expand_call (fn, target, target == const0_rtx);
3921 }
3922 #endif
3923 return NULL_RTX;
3924 }
3925
3926 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3927 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3928 the result in TARGET, if convenient. */
3929
3930 static rtx
3931 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3932 ATTRIBUTE_UNUSED enum machine_mode mode)
3933 {
3934 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3935
3936 if (!validate_arglist (exp,
3937 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3938 return NULL_RTX;
3939
3940 /* If c_strlen can determine an expression for one of the string
3941 lengths, and it doesn't have side effects, then emit cmpstrnsi
3942 using length MIN(strlen(string)+1, arg3). */
3943 #ifdef HAVE_cmpstrnsi
3944 if (HAVE_cmpstrnsi)
3945 {
3946 tree len, len1, len2;
3947 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3948 rtx result, insn;
3949 tree fndecl, fn;
3950 tree arg1 = CALL_EXPR_ARG (exp, 0);
3951 tree arg2 = CALL_EXPR_ARG (exp, 1);
3952 tree arg3 = CALL_EXPR_ARG (exp, 2);
3953
3954 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3955 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3956 enum machine_mode insn_mode
3957 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3958
3959 len1 = c_strlen (arg1, 1);
3960 len2 = c_strlen (arg2, 1);
3961
3962 if (len1)
3963 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3964 if (len2)
3965 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3966
3967 /* If we don't have a constant length for the first, use the length
3968 of the second, if we know it. We don't require a constant for
3969 this case; some cost analysis could be done if both are available
3970 but neither is constant. For now, assume they're equally cheap,
3971 unless one has side effects. If both strings have constant lengths,
3972 use the smaller. */
3973
3974 if (!len1)
3975 len = len2;
3976 else if (!len2)
3977 len = len1;
3978 else if (TREE_SIDE_EFFECTS (len1))
3979 len = len2;
3980 else if (TREE_SIDE_EFFECTS (len2))
3981 len = len1;
3982 else if (TREE_CODE (len1) != INTEGER_CST)
3983 len = len2;
3984 else if (TREE_CODE (len2) != INTEGER_CST)
3985 len = len1;
3986 else if (tree_int_cst_lt (len1, len2))
3987 len = len1;
3988 else
3989 len = len2;
3990
3991 /* If both arguments have side effects, we cannot optimize. */
3992 if (!len || TREE_SIDE_EFFECTS (len))
3993 return NULL_RTX;
3994
3995 /* The actual new length parameter is MIN(len,arg3). */
3996 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
3997 fold_convert_loc (loc, TREE_TYPE (len), arg3));
3998
3999 /* If we don't have POINTER_TYPE, call the function. */
4000 if (arg1_align == 0 || arg2_align == 0)
4001 return NULL_RTX;
4002
4003 /* Make a place to write the result of the instruction. */
4004 result = target;
4005 if (! (result != 0
4006 && REG_P (result) && GET_MODE (result) == insn_mode
4007 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4008 result = gen_reg_rtx (insn_mode);
4009
4010 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4011 arg1 = builtin_save_expr (arg1);
4012 arg2 = builtin_save_expr (arg2);
4013 len = builtin_save_expr (len);
4014
4015 arg1_rtx = get_memory_rtx (arg1, len);
4016 arg2_rtx = get_memory_rtx (arg2, len);
4017 arg3_rtx = expand_normal (len);
4018 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4019 GEN_INT (MIN (arg1_align, arg2_align)));
4020 if (insn)
4021 {
4022 emit_insn (insn);
4023
4024 /* Return the value in the proper mode for this function. */
4025 mode = TYPE_MODE (TREE_TYPE (exp));
4026 if (GET_MODE (result) == mode)
4027 return result;
4028 if (target == 0)
4029 return convert_to_mode (mode, result, 0);
4030 convert_move (target, result, 0);
4031 return target;
4032 }
4033
4034 /* Expand the library call ourselves using a stabilized argument
4035 list to avoid re-evaluating the function's arguments twice. */
4036 fndecl = get_callee_fndecl (exp);
4037 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4038 arg1, arg2, len);
4039 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4040 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4041 return expand_call (fn, target, target == const0_rtx);
4042 }
4043 #endif
4044 return NULL_RTX;
4045 }
4046
4047 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4048 if that's convenient. */
4049
4050 rtx
4051 expand_builtin_saveregs (void)
4052 {
4053 rtx val, seq;
4054
4055 /* Don't do __builtin_saveregs more than once in a function.
4056 Save the result of the first call and reuse it. */
4057 if (saveregs_value != 0)
4058 return saveregs_value;
4059
4060 /* When this function is called, it means that registers must be
4061 saved on entry to this function. So we migrate the call to the
4062 first insn of this function. */
4063
4064 start_sequence ();
4065
4066 /* Do whatever the machine needs done in this case. */
4067 val = targetm.calls.expand_builtin_saveregs ();
4068
4069 seq = get_insns ();
4070 end_sequence ();
4071
4072 saveregs_value = val;
4073
4074 /* Put the insns after the NOTE that starts the function. If this
4075 is inside a start_sequence, make the outer-level insn chain current, so
4076 the code is placed at the start of the function. */
4077 push_topmost_sequence ();
4078 emit_insn_after (seq, entry_of_function ());
4079 pop_topmost_sequence ();
4080
4081 return val;
4082 }
4083
4084 /* Expand a call to __builtin_next_arg. */
4085
4086 static rtx
4087 expand_builtin_next_arg (void)
4088 {
4089 /* Checking arguments is already done in fold_builtin_next_arg
4090 that must be called before this function. */
4091 return expand_binop (ptr_mode, add_optab,
4092 crtl->args.internal_arg_pointer,
4093 crtl->args.arg_offset_rtx,
4094 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4095 }
4096
4097 /* Make it easier for the backends by protecting the valist argument
4098 from multiple evaluations. */
4099
4100 static tree
4101 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4102 {
4103 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4104
4105 /* The current way of determining the type of valist is completely
4106 bogus. We should have the information on the va builtin instead. */
4107 if (!vatype)
4108 vatype = targetm.fn_abi_va_list (cfun->decl);
4109
4110 if (TREE_CODE (vatype) == ARRAY_TYPE)
4111 {
4112 if (TREE_SIDE_EFFECTS (valist))
4113 valist = save_expr (valist);
4114
4115 /* For this case, the backends will be expecting a pointer to
4116 vatype, but it's possible we've actually been given an array
4117 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4118 So fix it. */
4119 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4120 {
4121 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4122 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4123 }
4124 }
4125 else
4126 {
4127 tree pt = build_pointer_type (vatype);
4128
4129 if (! needs_lvalue)
4130 {
4131 if (! TREE_SIDE_EFFECTS (valist))
4132 return valist;
4133
4134 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4135 TREE_SIDE_EFFECTS (valist) = 1;
4136 }
4137
4138 if (TREE_SIDE_EFFECTS (valist))
4139 valist = save_expr (valist);
4140 valist = fold_build2_loc (loc, MEM_REF,
4141 vatype, valist, build_int_cst (pt, 0));
4142 }
4143
4144 return valist;
4145 }
4146
4147 /* The "standard" definition of va_list is void*. */
4148
4149 tree
4150 std_build_builtin_va_list (void)
4151 {
4152 return ptr_type_node;
4153 }
4154
4155 /* The "standard" abi va_list is va_list_type_node. */
4156
4157 tree
4158 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4159 {
4160 return va_list_type_node;
4161 }
4162
4163 /* The "standard" type of va_list is va_list_type_node. */
4164
4165 tree
4166 std_canonical_va_list_type (tree type)
4167 {
4168 tree wtype, htype;
4169
4170 if (INDIRECT_REF_P (type))
4171 type = TREE_TYPE (type);
4172 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4173 type = TREE_TYPE (type);
4174 wtype = va_list_type_node;
4175 htype = type;
4176 /* Treat structure va_list types. */
4177 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4178 htype = TREE_TYPE (htype);
4179 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4180 {
4181 /* If va_list is an array type, the argument may have decayed
4182 to a pointer type, e.g. by being passed to another function.
4183 In that case, unwrap both types so that we can compare the
4184 underlying records. */
4185 if (TREE_CODE (htype) == ARRAY_TYPE
4186 || POINTER_TYPE_P (htype))
4187 {
4188 wtype = TREE_TYPE (wtype);
4189 htype = TREE_TYPE (htype);
4190 }
4191 }
4192 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4193 return va_list_type_node;
4194
4195 return NULL_TREE;
4196 }
4197
4198 /* The "standard" implementation of va_start: just assign `nextarg' to
4199 the variable. */
4200
4201 void
4202 std_expand_builtin_va_start (tree valist, rtx nextarg)
4203 {
4204 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4205 convert_move (va_r, nextarg, 0);
4206 }
4207
4208 /* Expand EXP, a call to __builtin_va_start. */
4209
4210 static rtx
4211 expand_builtin_va_start (tree exp)
4212 {
4213 rtx nextarg;
4214 tree valist;
4215 location_t loc = EXPR_LOCATION (exp);
4216
4217 if (call_expr_nargs (exp) < 2)
4218 {
4219 error_at (loc, "too few arguments to function %<va_start%>");
4220 return const0_rtx;
4221 }
4222
4223 if (fold_builtin_next_arg (exp, true))
4224 return const0_rtx;
4225
4226 nextarg = expand_builtin_next_arg ();
4227 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4228
4229 if (targetm.expand_builtin_va_start)
4230 targetm.expand_builtin_va_start (valist, nextarg);
4231 else
4232 std_expand_builtin_va_start (valist, nextarg);
4233
4234 return const0_rtx;
4235 }
4236
4237 /* Expand EXP, a call to __builtin_va_end. */
4238
4239 static rtx
4240 expand_builtin_va_end (tree exp)
4241 {
4242 tree valist = CALL_EXPR_ARG (exp, 0);
4243
4244 /* Evaluate for side effects, if needed. I hate macros that don't
4245 do that. */
4246 if (TREE_SIDE_EFFECTS (valist))
4247 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4248
4249 return const0_rtx;
4250 }
4251
4252 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4253 builtin rather than just as an assignment in stdarg.h because of the
4254 nastiness of array-type va_list types. */
4255
4256 static rtx
4257 expand_builtin_va_copy (tree exp)
4258 {
4259 tree dst, src, t;
4260 location_t loc = EXPR_LOCATION (exp);
4261
4262 dst = CALL_EXPR_ARG (exp, 0);
4263 src = CALL_EXPR_ARG (exp, 1);
4264
4265 dst = stabilize_va_list_loc (loc, dst, 1);
4266 src = stabilize_va_list_loc (loc, src, 0);
4267
4268 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4269
4270 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4271 {
4272 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4273 TREE_SIDE_EFFECTS (t) = 1;
4274 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4275 }
4276 else
4277 {
4278 rtx dstb, srcb, size;
4279
4280 /* Evaluate to pointers. */
4281 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4282 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4283 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4284 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4285
4286 dstb = convert_memory_address (Pmode, dstb);
4287 srcb = convert_memory_address (Pmode, srcb);
4288
4289 /* "Dereference" to BLKmode memories. */
4290 dstb = gen_rtx_MEM (BLKmode, dstb);
4291 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4292 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4293 srcb = gen_rtx_MEM (BLKmode, srcb);
4294 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4295 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4296
4297 /* Copy. */
4298 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4299 }
4300
4301 return const0_rtx;
4302 }
4303
4304 /* Expand a call to one of the builtin functions __builtin_frame_address or
4305 __builtin_return_address. */
4306
4307 static rtx
4308 expand_builtin_frame_address (tree fndecl, tree exp)
4309 {
4310 /* The argument must be a nonnegative integer constant.
4311 It counts the number of frames to scan up the stack.
4312 The value is the return address saved in that frame. */
4313 if (call_expr_nargs (exp) == 0)
4314 /* Warning about missing arg was already issued. */
4315 return const0_rtx;
4316 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4317 {
4318 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4319 error ("invalid argument to %<__builtin_frame_address%>");
4320 else
4321 error ("invalid argument to %<__builtin_return_address%>");
4322 return const0_rtx;
4323 }
4324 else
4325 {
4326 rtx tem
4327 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4328 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4329
4330 /* Some ports cannot access arbitrary stack frames. */
4331 if (tem == NULL)
4332 {
4333 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4334 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4335 else
4336 warning (0, "unsupported argument to %<__builtin_return_address%>");
4337 return const0_rtx;
4338 }
4339
4340 /* For __builtin_frame_address, return what we've got. */
4341 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4342 return tem;
4343
4344 if (!REG_P (tem)
4345 && ! CONSTANT_P (tem))
4346 tem = copy_addr_to_reg (tem);
4347 return tem;
4348 }
4349 }
4350
4351 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4352 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4353 is the same as for allocate_dynamic_stack_space. */
4354
4355 static rtx
4356 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4357 {
4358 rtx op0;
4359 rtx result;
4360 bool valid_arglist;
4361 unsigned int align;
4362 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4363 == BUILT_IN_ALLOCA_WITH_ALIGN);
4364
4365 valid_arglist
4366 = (alloca_with_align
4367 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4368 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4369
4370 if (!valid_arglist)
4371 return NULL_RTX;
4372
4373 /* Compute the argument. */
4374 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4375
4376 /* Compute the alignment. */
4377 align = (alloca_with_align
4378 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4379 : BIGGEST_ALIGNMENT);
4380
4381 /* Allocate the desired space. */
4382 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4383 result = convert_memory_address (ptr_mode, result);
4384
4385 return result;
4386 }
4387
4388 /* Expand a call to bswap builtin in EXP.
4389 Return NULL_RTX if a normal call should be emitted rather than expanding the
4390 function in-line. If convenient, the result should be placed in TARGET.
4391 SUBTARGET may be used as the target for computing one of EXP's operands. */
4392
4393 static rtx
4394 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4395 rtx subtarget)
4396 {
4397 tree arg;
4398 rtx op0;
4399
4400 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4401 return NULL_RTX;
4402
4403 arg = CALL_EXPR_ARG (exp, 0);
4404 op0 = expand_expr (arg,
4405 subtarget && GET_MODE (subtarget) == target_mode
4406 ? subtarget : NULL_RTX,
4407 target_mode, EXPAND_NORMAL);
4408 if (GET_MODE (op0) != target_mode)
4409 op0 = convert_to_mode (target_mode, op0, 1);
4410
4411 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4412
4413 gcc_assert (target);
4414
4415 return convert_to_mode (target_mode, target, 1);
4416 }
4417
4418 /* Expand a call to a unary builtin in EXP.
4419 Return NULL_RTX if a normal call should be emitted rather than expanding the
4420 function in-line. If convenient, the result should be placed in TARGET.
4421 SUBTARGET may be used as the target for computing one of EXP's operands. */
4422
4423 static rtx
4424 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4425 rtx subtarget, optab op_optab)
4426 {
4427 rtx op0;
4428
4429 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4430 return NULL_RTX;
4431
4432 /* Compute the argument. */
4433 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4434 (subtarget
4435 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4436 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4437 VOIDmode, EXPAND_NORMAL);
4438 /* Compute op, into TARGET if possible.
4439 Set TARGET to wherever the result comes back. */
4440 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4441 op_optab, op0, target, op_optab != clrsb_optab);
4442 gcc_assert (target);
4443
4444 return convert_to_mode (target_mode, target, 0);
4445 }
4446
4447 /* Expand a call to __builtin_expect. We just return our argument
4448 as the builtin_expect semantic should've been already executed by
4449 tree branch prediction pass. */
4450
4451 static rtx
4452 expand_builtin_expect (tree exp, rtx target)
4453 {
4454 tree arg;
4455
4456 if (call_expr_nargs (exp) < 2)
4457 return const0_rtx;
4458 arg = CALL_EXPR_ARG (exp, 0);
4459
4460 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4461 /* When guessing was done, the hints should be already stripped away. */
4462 gcc_assert (!flag_guess_branch_prob
4463 || optimize == 0 || seen_error ());
4464 return target;
4465 }
4466
4467 /* Expand a call to __builtin_assume_aligned. We just return our first
4468 argument as the builtin_assume_aligned semantic should've been already
4469 executed by CCP. */
4470
4471 static rtx
4472 expand_builtin_assume_aligned (tree exp, rtx target)
4473 {
4474 if (call_expr_nargs (exp) < 2)
4475 return const0_rtx;
4476 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4477 EXPAND_NORMAL);
4478 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4479 && (call_expr_nargs (exp) < 3
4480 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4481 return target;
4482 }
4483
4484 void
4485 expand_builtin_trap (void)
4486 {
4487 #ifdef HAVE_trap
4488 if (HAVE_trap)
4489 {
4490 rtx insn = emit_insn (gen_trap ());
4491 /* For trap insns when not accumulating outgoing args force
4492 REG_ARGS_SIZE note to prevent crossjumping of calls with
4493 different args sizes. */
4494 if (!ACCUMULATE_OUTGOING_ARGS)
4495 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4496 }
4497 else
4498 #endif
4499 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4500 emit_barrier ();
4501 }
4502
4503 /* Expand a call to __builtin_unreachable. We do nothing except emit
4504 a barrier saying that control flow will not pass here.
4505
4506 It is the responsibility of the program being compiled to ensure
4507 that control flow does never reach __builtin_unreachable. */
4508 static void
4509 expand_builtin_unreachable (void)
4510 {
4511 emit_barrier ();
4512 }
4513
4514 /* Expand EXP, a call to fabs, fabsf or fabsl.
4515 Return NULL_RTX if a normal call should be emitted rather than expanding
4516 the function inline. If convenient, the result should be placed
4517 in TARGET. SUBTARGET may be used as the target for computing
4518 the operand. */
4519
4520 static rtx
4521 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4522 {
4523 enum machine_mode mode;
4524 tree arg;
4525 rtx op0;
4526
4527 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4528 return NULL_RTX;
4529
4530 arg = CALL_EXPR_ARG (exp, 0);
4531 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4532 mode = TYPE_MODE (TREE_TYPE (arg));
4533 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4534 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4535 }
4536
4537 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4538 Return NULL is a normal call should be emitted rather than expanding the
4539 function inline. If convenient, the result should be placed in TARGET.
4540 SUBTARGET may be used as the target for computing the operand. */
4541
4542 static rtx
4543 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4544 {
4545 rtx op0, op1;
4546 tree arg;
4547
4548 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4549 return NULL_RTX;
4550
4551 arg = CALL_EXPR_ARG (exp, 0);
4552 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4553
4554 arg = CALL_EXPR_ARG (exp, 1);
4555 op1 = expand_normal (arg);
4556
4557 return expand_copysign (op0, op1, target);
4558 }
4559
4560 /* Create a new constant string literal and return a char* pointer to it.
4561 The STRING_CST value is the LEN characters at STR. */
4562 tree
4563 build_string_literal (int len, const char *str)
4564 {
4565 tree t, elem, index, type;
4566
4567 t = build_string (len, str);
4568 elem = build_type_variant (char_type_node, 1, 0);
4569 index = build_index_type (size_int (len - 1));
4570 type = build_array_type (elem, index);
4571 TREE_TYPE (t) = type;
4572 TREE_CONSTANT (t) = 1;
4573 TREE_READONLY (t) = 1;
4574 TREE_STATIC (t) = 1;
4575
4576 type = build_pointer_type (elem);
4577 t = build1 (ADDR_EXPR, type,
4578 build4 (ARRAY_REF, elem,
4579 t, integer_zero_node, NULL_TREE, NULL_TREE));
4580 return t;
4581 }
4582
4583 /* Expand a call to __builtin___clear_cache. */
4584
4585 static rtx
4586 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4587 {
4588 #ifndef HAVE_clear_cache
4589 #ifdef CLEAR_INSN_CACHE
4590 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4591 does something. Just do the default expansion to a call to
4592 __clear_cache(). */
4593 return NULL_RTX;
4594 #else
4595 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4596 does nothing. There is no need to call it. Do nothing. */
4597 return const0_rtx;
4598 #endif /* CLEAR_INSN_CACHE */
4599 #else
4600 /* We have a "clear_cache" insn, and it will handle everything. */
4601 tree begin, end;
4602 rtx begin_rtx, end_rtx;
4603
4604 /* We must not expand to a library call. If we did, any
4605 fallback library function in libgcc that might contain a call to
4606 __builtin___clear_cache() would recurse infinitely. */
4607 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4608 {
4609 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4610 return const0_rtx;
4611 }
4612
4613 if (HAVE_clear_cache)
4614 {
4615 struct expand_operand ops[2];
4616
4617 begin = CALL_EXPR_ARG (exp, 0);
4618 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4619
4620 end = CALL_EXPR_ARG (exp, 1);
4621 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4622
4623 create_address_operand (&ops[0], begin_rtx);
4624 create_address_operand (&ops[1], end_rtx);
4625 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4626 return const0_rtx;
4627 }
4628 return const0_rtx;
4629 #endif /* HAVE_clear_cache */
4630 }
4631
4632 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4633
4634 static rtx
4635 round_trampoline_addr (rtx tramp)
4636 {
4637 rtx temp, addend, mask;
4638
4639 /* If we don't need too much alignment, we'll have been guaranteed
4640 proper alignment by get_trampoline_type. */
4641 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4642 return tramp;
4643
4644 /* Round address up to desired boundary. */
4645 temp = gen_reg_rtx (Pmode);
4646 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4647 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4648
4649 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4650 temp, 0, OPTAB_LIB_WIDEN);
4651 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4652 temp, 0, OPTAB_LIB_WIDEN);
4653
4654 return tramp;
4655 }
4656
4657 static rtx
4658 expand_builtin_init_trampoline (tree exp, bool onstack)
4659 {
4660 tree t_tramp, t_func, t_chain;
4661 rtx m_tramp, r_tramp, r_chain, tmp;
4662
4663 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4664 POINTER_TYPE, VOID_TYPE))
4665 return NULL_RTX;
4666
4667 t_tramp = CALL_EXPR_ARG (exp, 0);
4668 t_func = CALL_EXPR_ARG (exp, 1);
4669 t_chain = CALL_EXPR_ARG (exp, 2);
4670
4671 r_tramp = expand_normal (t_tramp);
4672 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4673 MEM_NOTRAP_P (m_tramp) = 1;
4674
4675 /* If ONSTACK, the TRAMP argument should be the address of a field
4676 within the local function's FRAME decl. Either way, let's see if
4677 we can fill in the MEM_ATTRs for this memory. */
4678 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4679 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4680
4681 /* Creator of a heap trampoline is responsible for making sure the
4682 address is aligned to at least STACK_BOUNDARY. Normally malloc
4683 will ensure this anyhow. */
4684 tmp = round_trampoline_addr (r_tramp);
4685 if (tmp != r_tramp)
4686 {
4687 m_tramp = change_address (m_tramp, BLKmode, tmp);
4688 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4689 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4690 }
4691
4692 /* The FUNC argument should be the address of the nested function.
4693 Extract the actual function decl to pass to the hook. */
4694 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4695 t_func = TREE_OPERAND (t_func, 0);
4696 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4697
4698 r_chain = expand_normal (t_chain);
4699
4700 /* Generate insns to initialize the trampoline. */
4701 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4702
4703 if (onstack)
4704 {
4705 trampolines_created = 1;
4706
4707 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4708 "trampoline generated for nested function %qD", t_func);
4709 }
4710
4711 return const0_rtx;
4712 }
4713
4714 static rtx
4715 expand_builtin_adjust_trampoline (tree exp)
4716 {
4717 rtx tramp;
4718
4719 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4720 return NULL_RTX;
4721
4722 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4723 tramp = round_trampoline_addr (tramp);
4724 if (targetm.calls.trampoline_adjust_address)
4725 tramp = targetm.calls.trampoline_adjust_address (tramp);
4726
4727 return tramp;
4728 }
4729
4730 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4731 function. The function first checks whether the back end provides
4732 an insn to implement signbit for the respective mode. If not, it
4733 checks whether the floating point format of the value is such that
4734 the sign bit can be extracted. If that is not the case, the
4735 function returns NULL_RTX to indicate that a normal call should be
4736 emitted rather than expanding the function in-line. EXP is the
4737 expression that is a call to the builtin function; if convenient,
4738 the result should be placed in TARGET. */
4739 static rtx
4740 expand_builtin_signbit (tree exp, rtx target)
4741 {
4742 const struct real_format *fmt;
4743 enum machine_mode fmode, imode, rmode;
4744 tree arg;
4745 int word, bitpos;
4746 enum insn_code icode;
4747 rtx temp;
4748 location_t loc = EXPR_LOCATION (exp);
4749
4750 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4751 return NULL_RTX;
4752
4753 arg = CALL_EXPR_ARG (exp, 0);
4754 fmode = TYPE_MODE (TREE_TYPE (arg));
4755 rmode = TYPE_MODE (TREE_TYPE (exp));
4756 fmt = REAL_MODE_FORMAT (fmode);
4757
4758 arg = builtin_save_expr (arg);
4759
4760 /* Expand the argument yielding a RTX expression. */
4761 temp = expand_normal (arg);
4762
4763 /* Check if the back end provides an insn that handles signbit for the
4764 argument's mode. */
4765 icode = optab_handler (signbit_optab, fmode);
4766 if (icode != CODE_FOR_nothing)
4767 {
4768 rtx last = get_last_insn ();
4769 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4770 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4771 return target;
4772 delete_insns_since (last);
4773 }
4774
4775 /* For floating point formats without a sign bit, implement signbit
4776 as "ARG < 0.0". */
4777 bitpos = fmt->signbit_ro;
4778 if (bitpos < 0)
4779 {
4780 /* But we can't do this if the format supports signed zero. */
4781 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4782 return NULL_RTX;
4783
4784 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4785 build_real (TREE_TYPE (arg), dconst0));
4786 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4787 }
4788
4789 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4790 {
4791 imode = int_mode_for_mode (fmode);
4792 if (imode == BLKmode)
4793 return NULL_RTX;
4794 temp = gen_lowpart (imode, temp);
4795 }
4796 else
4797 {
4798 imode = word_mode;
4799 /* Handle targets with different FP word orders. */
4800 if (FLOAT_WORDS_BIG_ENDIAN)
4801 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4802 else
4803 word = bitpos / BITS_PER_WORD;
4804 temp = operand_subword_force (temp, word, fmode);
4805 bitpos = bitpos % BITS_PER_WORD;
4806 }
4807
4808 /* Force the intermediate word_mode (or narrower) result into a
4809 register. This avoids attempting to create paradoxical SUBREGs
4810 of floating point modes below. */
4811 temp = force_reg (imode, temp);
4812
4813 /* If the bitpos is within the "result mode" lowpart, the operation
4814 can be implement with a single bitwise AND. Otherwise, we need
4815 a right shift and an AND. */
4816
4817 if (bitpos < GET_MODE_BITSIZE (rmode))
4818 {
4819 double_int mask = double_int_zero.set_bit (bitpos);
4820
4821 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4822 temp = gen_lowpart (rmode, temp);
4823 temp = expand_binop (rmode, and_optab, temp,
4824 immed_double_int_const (mask, rmode),
4825 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4826 }
4827 else
4828 {
4829 /* Perform a logical right shift to place the signbit in the least
4830 significant bit, then truncate the result to the desired mode
4831 and mask just this bit. */
4832 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4833 temp = gen_lowpart (rmode, temp);
4834 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4835 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4836 }
4837
4838 return temp;
4839 }
4840
4841 /* Expand fork or exec calls. TARGET is the desired target of the
4842 call. EXP is the call. FN is the
4843 identificator of the actual function. IGNORE is nonzero if the
4844 value is to be ignored. */
4845
4846 static rtx
4847 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4848 {
4849 tree id, decl;
4850 tree call;
4851
4852 /* If we are not profiling, just call the function. */
4853 if (!profile_arc_flag)
4854 return NULL_RTX;
4855
4856 /* Otherwise call the wrapper. This should be equivalent for the rest of
4857 compiler, so the code does not diverge, and the wrapper may run the
4858 code necessary for keeping the profiling sane. */
4859
4860 switch (DECL_FUNCTION_CODE (fn))
4861 {
4862 case BUILT_IN_FORK:
4863 id = get_identifier ("__gcov_fork");
4864 break;
4865
4866 case BUILT_IN_EXECL:
4867 id = get_identifier ("__gcov_execl");
4868 break;
4869
4870 case BUILT_IN_EXECV:
4871 id = get_identifier ("__gcov_execv");
4872 break;
4873
4874 case BUILT_IN_EXECLP:
4875 id = get_identifier ("__gcov_execlp");
4876 break;
4877
4878 case BUILT_IN_EXECLE:
4879 id = get_identifier ("__gcov_execle");
4880 break;
4881
4882 case BUILT_IN_EXECVP:
4883 id = get_identifier ("__gcov_execvp");
4884 break;
4885
4886 case BUILT_IN_EXECVE:
4887 id = get_identifier ("__gcov_execve");
4888 break;
4889
4890 default:
4891 gcc_unreachable ();
4892 }
4893
4894 decl = build_decl (DECL_SOURCE_LOCATION (fn),
4895 FUNCTION_DECL, id, TREE_TYPE (fn));
4896 DECL_EXTERNAL (decl) = 1;
4897 TREE_PUBLIC (decl) = 1;
4898 DECL_ARTIFICIAL (decl) = 1;
4899 TREE_NOTHROW (decl) = 1;
4900 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
4901 DECL_VISIBILITY_SPECIFIED (decl) = 1;
4902 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
4903 return expand_call (call, target, ignore);
4904 }
4905
4906
4907 \f
4908 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
4909 the pointer in these functions is void*, the tree optimizers may remove
4910 casts. The mode computed in expand_builtin isn't reliable either, due
4911 to __sync_bool_compare_and_swap.
4912
4913 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
4914 group of builtins. This gives us log2 of the mode size. */
4915
4916 static inline enum machine_mode
4917 get_builtin_sync_mode (int fcode_diff)
4918 {
4919 /* The size is not negotiable, so ask not to get BLKmode in return
4920 if the target indicates that a smaller size would be better. */
4921 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
4922 }
4923
4924 /* Expand the memory expression LOC and return the appropriate memory operand
4925 for the builtin_sync operations. */
4926
4927 static rtx
4928 get_builtin_sync_mem (tree loc, enum machine_mode mode)
4929 {
4930 rtx addr, mem;
4931
4932 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
4933 addr = convert_memory_address (Pmode, addr);
4934
4935 /* Note that we explicitly do not want any alias information for this
4936 memory, so that we kill all other live memories. Otherwise we don't
4937 satisfy the full barrier semantics of the intrinsic. */
4938 mem = validize_mem (gen_rtx_MEM (mode, addr));
4939
4940 /* The alignment needs to be at least according to that of the mode. */
4941 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
4942 get_pointer_alignment (loc)));
4943 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
4944 MEM_VOLATILE_P (mem) = 1;
4945
4946 return mem;
4947 }
4948
4949 /* Make sure an argument is in the right mode.
4950 EXP is the tree argument.
4951 MODE is the mode it should be in. */
4952
4953 static rtx
4954 expand_expr_force_mode (tree exp, enum machine_mode mode)
4955 {
4956 rtx val;
4957 enum machine_mode old_mode;
4958
4959 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
4960 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
4961 of CONST_INTs, where we know the old_mode only from the call argument. */
4962
4963 old_mode = GET_MODE (val);
4964 if (old_mode == VOIDmode)
4965 old_mode = TYPE_MODE (TREE_TYPE (exp));
4966 val = convert_modes (mode, old_mode, val, 1);
4967 return val;
4968 }
4969
4970
4971 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
4972 EXP is the CALL_EXPR. CODE is the rtx code
4973 that corresponds to the arithmetic or logical operation from the name;
4974 an exception here is that NOT actually means NAND. TARGET is an optional
4975 place for us to store the results; AFTER is true if this is the
4976 fetch_and_xxx form. */
4977
4978 static rtx
4979 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
4980 enum rtx_code code, bool after,
4981 rtx target)
4982 {
4983 rtx val, mem;
4984 location_t loc = EXPR_LOCATION (exp);
4985
4986 if (code == NOT && warn_sync_nand)
4987 {
4988 tree fndecl = get_callee_fndecl (exp);
4989 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4990
4991 static bool warned_f_a_n, warned_n_a_f;
4992
4993 switch (fcode)
4994 {
4995 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
4996 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
4997 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
4998 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
4999 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5000 if (warned_f_a_n)
5001 break;
5002
5003 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5004 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5005 warned_f_a_n = true;
5006 break;
5007
5008 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5009 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5010 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5011 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5012 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5013 if (warned_n_a_f)
5014 break;
5015
5016 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5017 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5018 warned_n_a_f = true;
5019 break;
5020
5021 default:
5022 gcc_unreachable ();
5023 }
5024 }
5025
5026 /* Expand the operands. */
5027 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5028 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5029
5030 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5031 after);
5032 }
5033
5034 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5035 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5036 true if this is the boolean form. TARGET is a place for us to store the
5037 results; this is NOT optional if IS_BOOL is true. */
5038
5039 static rtx
5040 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5041 bool is_bool, rtx target)
5042 {
5043 rtx old_val, new_val, mem;
5044 rtx *pbool, *poval;
5045
5046 /* Expand the operands. */
5047 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5048 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5049 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5050
5051 pbool = poval = NULL;
5052 if (target != const0_rtx)
5053 {
5054 if (is_bool)
5055 pbool = &target;
5056 else
5057 poval = &target;
5058 }
5059 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5060 false, MEMMODEL_SEQ_CST,
5061 MEMMODEL_SEQ_CST))
5062 return NULL_RTX;
5063
5064 return target;
5065 }
5066
5067 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5068 general form is actually an atomic exchange, and some targets only
5069 support a reduced form with the second argument being a constant 1.
5070 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5071 the results. */
5072
5073 static rtx
5074 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5075 rtx target)
5076 {
5077 rtx val, mem;
5078
5079 /* Expand the operands. */
5080 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5081 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5082
5083 return expand_sync_lock_test_and_set (target, mem, val);
5084 }
5085
5086 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5087
5088 static void
5089 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5090 {
5091 rtx mem;
5092
5093 /* Expand the operands. */
5094 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5095
5096 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5097 }
5098
5099 /* Given an integer representing an ``enum memmodel'', verify its
5100 correctness and return the memory model enum. */
5101
5102 static enum memmodel
5103 get_memmodel (tree exp)
5104 {
5105 rtx op;
5106 unsigned HOST_WIDE_INT val;
5107
5108 /* If the parameter is not a constant, it's a run time value so we'll just
5109 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5110 if (TREE_CODE (exp) != INTEGER_CST)
5111 return MEMMODEL_SEQ_CST;
5112
5113 op = expand_normal (exp);
5114
5115 val = INTVAL (op);
5116 if (targetm.memmodel_check)
5117 val = targetm.memmodel_check (val);
5118 else if (val & ~MEMMODEL_MASK)
5119 {
5120 warning (OPT_Winvalid_memory_model,
5121 "Unknown architecture specifier in memory model to builtin.");
5122 return MEMMODEL_SEQ_CST;
5123 }
5124
5125 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5126 {
5127 warning (OPT_Winvalid_memory_model,
5128 "invalid memory model argument to builtin");
5129 return MEMMODEL_SEQ_CST;
5130 }
5131
5132 return (enum memmodel) val;
5133 }
5134
5135 /* Expand the __atomic_exchange intrinsic:
5136 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5137 EXP is the CALL_EXPR.
5138 TARGET is an optional place for us to store the results. */
5139
5140 static rtx
5141 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5142 {
5143 rtx val, mem;
5144 enum memmodel model;
5145
5146 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5147 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5148 {
5149 error ("invalid memory model for %<__atomic_exchange%>");
5150 return NULL_RTX;
5151 }
5152
5153 if (!flag_inline_atomics)
5154 return NULL_RTX;
5155
5156 /* Expand the operands. */
5157 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5158 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5159
5160 return expand_atomic_exchange (target, mem, val, model);
5161 }
5162
5163 /* Expand the __atomic_compare_exchange intrinsic:
5164 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5165 TYPE desired, BOOL weak,
5166 enum memmodel success,
5167 enum memmodel failure)
5168 EXP is the CALL_EXPR.
5169 TARGET is an optional place for us to store the results. */
5170
5171 static rtx
5172 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5173 rtx target)
5174 {
5175 rtx expect, desired, mem, oldval;
5176 enum memmodel success, failure;
5177 tree weak;
5178 bool is_weak;
5179
5180 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5181 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5182
5183 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5184 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5185 {
5186 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5187 return NULL_RTX;
5188 }
5189
5190 if (failure > success)
5191 {
5192 error ("failure memory model cannot be stronger than success "
5193 "memory model for %<__atomic_compare_exchange%>");
5194 return NULL_RTX;
5195 }
5196
5197 if (!flag_inline_atomics)
5198 return NULL_RTX;
5199
5200 /* Expand the operands. */
5201 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5202
5203 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5204 expect = convert_memory_address (Pmode, expect);
5205 expect = gen_rtx_MEM (mode, expect);
5206 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5207
5208 weak = CALL_EXPR_ARG (exp, 3);
5209 is_weak = false;
5210 if (host_integerp (weak, 0) && tree_low_cst (weak, 0) != 0)
5211 is_weak = true;
5212
5213 oldval = expect;
5214 if (!expand_atomic_compare_and_swap ((target == const0_rtx ? NULL : &target),
5215 &oldval, mem, oldval, desired,
5216 is_weak, success, failure))
5217 return NULL_RTX;
5218
5219 if (oldval != expect)
5220 emit_move_insn (expect, oldval);
5221
5222 return target;
5223 }
5224
5225 /* Expand the __atomic_load intrinsic:
5226 TYPE __atomic_load (TYPE *object, enum memmodel)
5227 EXP is the CALL_EXPR.
5228 TARGET is an optional place for us to store the results. */
5229
5230 static rtx
5231 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5232 {
5233 rtx mem;
5234 enum memmodel model;
5235
5236 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5237 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5238 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5239 {
5240 error ("invalid memory model for %<__atomic_load%>");
5241 return NULL_RTX;
5242 }
5243
5244 if (!flag_inline_atomics)
5245 return NULL_RTX;
5246
5247 /* Expand the operand. */
5248 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5249
5250 return expand_atomic_load (target, mem, model);
5251 }
5252
5253
5254 /* Expand the __atomic_store intrinsic:
5255 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5256 EXP is the CALL_EXPR.
5257 TARGET is an optional place for us to store the results. */
5258
5259 static rtx
5260 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5261 {
5262 rtx mem, val;
5263 enum memmodel model;
5264
5265 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5266 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5267 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5268 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5269 {
5270 error ("invalid memory model for %<__atomic_store%>");
5271 return NULL_RTX;
5272 }
5273
5274 if (!flag_inline_atomics)
5275 return NULL_RTX;
5276
5277 /* Expand the operands. */
5278 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5279 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5280
5281 return expand_atomic_store (mem, val, model, false);
5282 }
5283
5284 /* Expand the __atomic_fetch_XXX intrinsic:
5285 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5286 EXP is the CALL_EXPR.
5287 TARGET is an optional place for us to store the results.
5288 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5289 FETCH_AFTER is true if returning the result of the operation.
5290 FETCH_AFTER is false if returning the value before the operation.
5291 IGNORE is true if the result is not used.
5292 EXT_CALL is the correct builtin for an external call if this cannot be
5293 resolved to an instruction sequence. */
5294
5295 static rtx
5296 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5297 enum rtx_code code, bool fetch_after,
5298 bool ignore, enum built_in_function ext_call)
5299 {
5300 rtx val, mem, ret;
5301 enum memmodel model;
5302 tree fndecl;
5303 tree addr;
5304
5305 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5306
5307 /* Expand the operands. */
5308 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5309 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5310
5311 /* Only try generating instructions if inlining is turned on. */
5312 if (flag_inline_atomics)
5313 {
5314 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5315 if (ret)
5316 return ret;
5317 }
5318
5319 /* Return if a different routine isn't needed for the library call. */
5320 if (ext_call == BUILT_IN_NONE)
5321 return NULL_RTX;
5322
5323 /* Change the call to the specified function. */
5324 fndecl = get_callee_fndecl (exp);
5325 addr = CALL_EXPR_FN (exp);
5326 STRIP_NOPS (addr);
5327
5328 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5329 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5330
5331 /* Expand the call here so we can emit trailing code. */
5332 ret = expand_call (exp, target, ignore);
5333
5334 /* Replace the original function just in case it matters. */
5335 TREE_OPERAND (addr, 0) = fndecl;
5336
5337 /* Then issue the arithmetic correction to return the right result. */
5338 if (!ignore)
5339 {
5340 if (code == NOT)
5341 {
5342 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5343 OPTAB_LIB_WIDEN);
5344 ret = expand_simple_unop (mode, NOT, ret, target, true);
5345 }
5346 else
5347 ret = expand_simple_binop (mode, code, ret, val, target, true,
5348 OPTAB_LIB_WIDEN);
5349 }
5350 return ret;
5351 }
5352
5353
5354 #ifndef HAVE_atomic_clear
5355 # define HAVE_atomic_clear 0
5356 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5357 #endif
5358
5359 /* Expand an atomic clear operation.
5360 void _atomic_clear (BOOL *obj, enum memmodel)
5361 EXP is the call expression. */
5362
5363 static rtx
5364 expand_builtin_atomic_clear (tree exp)
5365 {
5366 enum machine_mode mode;
5367 rtx mem, ret;
5368 enum memmodel model;
5369
5370 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5371 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5372 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5373
5374 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5375 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5376 {
5377 error ("invalid memory model for %<__atomic_store%>");
5378 return const0_rtx;
5379 }
5380
5381 if (HAVE_atomic_clear)
5382 {
5383 emit_insn (gen_atomic_clear (mem, model));
5384 return const0_rtx;
5385 }
5386
5387 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5388 Failing that, a store is issued by __atomic_store. The only way this can
5389 fail is if the bool type is larger than a word size. Unlikely, but
5390 handle it anyway for completeness. Assume a single threaded model since
5391 there is no atomic support in this case, and no barriers are required. */
5392 ret = expand_atomic_store (mem, const0_rtx, model, true);
5393 if (!ret)
5394 emit_move_insn (mem, const0_rtx);
5395 return const0_rtx;
5396 }
5397
5398 /* Expand an atomic test_and_set operation.
5399 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5400 EXP is the call expression. */
5401
5402 static rtx
5403 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5404 {
5405 rtx mem;
5406 enum memmodel model;
5407 enum machine_mode mode;
5408
5409 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5410 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5411 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5412
5413 return expand_atomic_test_and_set (target, mem, model);
5414 }
5415
5416
5417 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5418 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5419
5420 static tree
5421 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5422 {
5423 int size;
5424 enum machine_mode mode;
5425 unsigned int mode_align, type_align;
5426
5427 if (TREE_CODE (arg0) != INTEGER_CST)
5428 return NULL_TREE;
5429
5430 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5431 mode = mode_for_size (size, MODE_INT, 0);
5432 mode_align = GET_MODE_ALIGNMENT (mode);
5433
5434 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5435 type_align = mode_align;
5436 else
5437 {
5438 tree ttype = TREE_TYPE (arg1);
5439
5440 /* This function is usually invoked and folded immediately by the front
5441 end before anything else has a chance to look at it. The pointer
5442 parameter at this point is usually cast to a void *, so check for that
5443 and look past the cast. */
5444 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5445 && VOID_TYPE_P (TREE_TYPE (ttype)))
5446 arg1 = TREE_OPERAND (arg1, 0);
5447
5448 ttype = TREE_TYPE (arg1);
5449 gcc_assert (POINTER_TYPE_P (ttype));
5450
5451 /* Get the underlying type of the object. */
5452 ttype = TREE_TYPE (ttype);
5453 type_align = TYPE_ALIGN (ttype);
5454 }
5455
5456 /* If the object has smaller alignment, the the lock free routines cannot
5457 be used. */
5458 if (type_align < mode_align)
5459 return boolean_false_node;
5460
5461 /* Check if a compare_and_swap pattern exists for the mode which represents
5462 the required size. The pattern is not allowed to fail, so the existence
5463 of the pattern indicates support is present. */
5464 if (can_compare_and_swap_p (mode, true))
5465 return boolean_true_node;
5466 else
5467 return boolean_false_node;
5468 }
5469
5470 /* Return true if the parameters to call EXP represent an object which will
5471 always generate lock free instructions. The first argument represents the
5472 size of the object, and the second parameter is a pointer to the object
5473 itself. If NULL is passed for the object, then the result is based on
5474 typical alignment for an object of the specified size. Otherwise return
5475 false. */
5476
5477 static rtx
5478 expand_builtin_atomic_always_lock_free (tree exp)
5479 {
5480 tree size;
5481 tree arg0 = CALL_EXPR_ARG (exp, 0);
5482 tree arg1 = CALL_EXPR_ARG (exp, 1);
5483
5484 if (TREE_CODE (arg0) != INTEGER_CST)
5485 {
5486 error ("non-constant argument 1 to __atomic_always_lock_free");
5487 return const0_rtx;
5488 }
5489
5490 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5491 if (size == boolean_true_node)
5492 return const1_rtx;
5493 return const0_rtx;
5494 }
5495
5496 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5497 is lock free on this architecture. */
5498
5499 static tree
5500 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5501 {
5502 if (!flag_inline_atomics)
5503 return NULL_TREE;
5504
5505 /* If it isn't always lock free, don't generate a result. */
5506 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5507 return boolean_true_node;
5508
5509 return NULL_TREE;
5510 }
5511
5512 /* Return true if the parameters to call EXP represent an object which will
5513 always generate lock free instructions. The first argument represents the
5514 size of the object, and the second parameter is a pointer to the object
5515 itself. If NULL is passed for the object, then the result is based on
5516 typical alignment for an object of the specified size. Otherwise return
5517 NULL*/
5518
5519 static rtx
5520 expand_builtin_atomic_is_lock_free (tree exp)
5521 {
5522 tree size;
5523 tree arg0 = CALL_EXPR_ARG (exp, 0);
5524 tree arg1 = CALL_EXPR_ARG (exp, 1);
5525
5526 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5527 {
5528 error ("non-integer argument 1 to __atomic_is_lock_free");
5529 return NULL_RTX;
5530 }
5531
5532 if (!flag_inline_atomics)
5533 return NULL_RTX;
5534
5535 /* If the value is known at compile time, return the RTX for it. */
5536 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5537 if (size == boolean_true_node)
5538 return const1_rtx;
5539
5540 return NULL_RTX;
5541 }
5542
5543 /* Expand the __atomic_thread_fence intrinsic:
5544 void __atomic_thread_fence (enum memmodel)
5545 EXP is the CALL_EXPR. */
5546
5547 static void
5548 expand_builtin_atomic_thread_fence (tree exp)
5549 {
5550 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5551 expand_mem_thread_fence (model);
5552 }
5553
5554 /* Expand the __atomic_signal_fence intrinsic:
5555 void __atomic_signal_fence (enum memmodel)
5556 EXP is the CALL_EXPR. */
5557
5558 static void
5559 expand_builtin_atomic_signal_fence (tree exp)
5560 {
5561 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5562 expand_mem_signal_fence (model);
5563 }
5564
5565 /* Expand the __sync_synchronize intrinsic. */
5566
5567 static void
5568 expand_builtin_sync_synchronize (void)
5569 {
5570 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5571 }
5572
5573 static rtx
5574 expand_builtin_thread_pointer (tree exp, rtx target)
5575 {
5576 enum insn_code icode;
5577 if (!validate_arglist (exp, VOID_TYPE))
5578 return const0_rtx;
5579 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5580 if (icode != CODE_FOR_nothing)
5581 {
5582 struct expand_operand op;
5583 if (!REG_P (target) || GET_MODE (target) != Pmode)
5584 target = gen_reg_rtx (Pmode);
5585 create_output_operand (&op, target, Pmode);
5586 expand_insn (icode, 1, &op);
5587 return target;
5588 }
5589 error ("__builtin_thread_pointer is not supported on this target");
5590 return const0_rtx;
5591 }
5592
5593 static void
5594 expand_builtin_set_thread_pointer (tree exp)
5595 {
5596 enum insn_code icode;
5597 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5598 return;
5599 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5600 if (icode != CODE_FOR_nothing)
5601 {
5602 struct expand_operand op;
5603 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5604 Pmode, EXPAND_NORMAL);
5605 create_input_operand (&op, val, Pmode);
5606 expand_insn (icode, 1, &op);
5607 return;
5608 }
5609 error ("__builtin_set_thread_pointer is not supported on this target");
5610 }
5611
5612 \f
5613 /* Expand an expression EXP that calls a built-in function,
5614 with result going to TARGET if that's convenient
5615 (and in mode MODE if that's convenient).
5616 SUBTARGET may be used as the target for computing one of EXP's operands.
5617 IGNORE is nonzero if the value is to be ignored. */
5618
5619 rtx
5620 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5621 int ignore)
5622 {
5623 tree fndecl = get_callee_fndecl (exp);
5624 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5625 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5626 int flags;
5627
5628 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5629 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5630
5631 /* When not optimizing, generate calls to library functions for a certain
5632 set of builtins. */
5633 if (!optimize
5634 && !called_as_built_in (fndecl)
5635 && fcode != BUILT_IN_FORK
5636 && fcode != BUILT_IN_EXECL
5637 && fcode != BUILT_IN_EXECV
5638 && fcode != BUILT_IN_EXECLP
5639 && fcode != BUILT_IN_EXECLE
5640 && fcode != BUILT_IN_EXECVP
5641 && fcode != BUILT_IN_EXECVE
5642 && fcode != BUILT_IN_ALLOCA
5643 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5644 && fcode != BUILT_IN_FREE)
5645 return expand_call (exp, target, ignore);
5646
5647 /* The built-in function expanders test for target == const0_rtx
5648 to determine whether the function's result will be ignored. */
5649 if (ignore)
5650 target = const0_rtx;
5651
5652 /* If the result of a pure or const built-in function is ignored, and
5653 none of its arguments are volatile, we can avoid expanding the
5654 built-in call and just evaluate the arguments for side-effects. */
5655 if (target == const0_rtx
5656 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5657 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5658 {
5659 bool volatilep = false;
5660 tree arg;
5661 call_expr_arg_iterator iter;
5662
5663 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5664 if (TREE_THIS_VOLATILE (arg))
5665 {
5666 volatilep = true;
5667 break;
5668 }
5669
5670 if (! volatilep)
5671 {
5672 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5673 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5674 return const0_rtx;
5675 }
5676 }
5677
5678 switch (fcode)
5679 {
5680 CASE_FLT_FN (BUILT_IN_FABS):
5681 case BUILT_IN_FABSD32:
5682 case BUILT_IN_FABSD64:
5683 case BUILT_IN_FABSD128:
5684 target = expand_builtin_fabs (exp, target, subtarget);
5685 if (target)
5686 return target;
5687 break;
5688
5689 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5690 target = expand_builtin_copysign (exp, target, subtarget);
5691 if (target)
5692 return target;
5693 break;
5694
5695 /* Just do a normal library call if we were unable to fold
5696 the values. */
5697 CASE_FLT_FN (BUILT_IN_CABS):
5698 break;
5699
5700 CASE_FLT_FN (BUILT_IN_EXP):
5701 CASE_FLT_FN (BUILT_IN_EXP10):
5702 CASE_FLT_FN (BUILT_IN_POW10):
5703 CASE_FLT_FN (BUILT_IN_EXP2):
5704 CASE_FLT_FN (BUILT_IN_EXPM1):
5705 CASE_FLT_FN (BUILT_IN_LOGB):
5706 CASE_FLT_FN (BUILT_IN_LOG):
5707 CASE_FLT_FN (BUILT_IN_LOG10):
5708 CASE_FLT_FN (BUILT_IN_LOG2):
5709 CASE_FLT_FN (BUILT_IN_LOG1P):
5710 CASE_FLT_FN (BUILT_IN_TAN):
5711 CASE_FLT_FN (BUILT_IN_ASIN):
5712 CASE_FLT_FN (BUILT_IN_ACOS):
5713 CASE_FLT_FN (BUILT_IN_ATAN):
5714 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5715 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5716 because of possible accuracy problems. */
5717 if (! flag_unsafe_math_optimizations)
5718 break;
5719 CASE_FLT_FN (BUILT_IN_SQRT):
5720 CASE_FLT_FN (BUILT_IN_FLOOR):
5721 CASE_FLT_FN (BUILT_IN_CEIL):
5722 CASE_FLT_FN (BUILT_IN_TRUNC):
5723 CASE_FLT_FN (BUILT_IN_ROUND):
5724 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5725 CASE_FLT_FN (BUILT_IN_RINT):
5726 target = expand_builtin_mathfn (exp, target, subtarget);
5727 if (target)
5728 return target;
5729 break;
5730
5731 CASE_FLT_FN (BUILT_IN_FMA):
5732 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5733 if (target)
5734 return target;
5735 break;
5736
5737 CASE_FLT_FN (BUILT_IN_ILOGB):
5738 if (! flag_unsafe_math_optimizations)
5739 break;
5740 CASE_FLT_FN (BUILT_IN_ISINF):
5741 CASE_FLT_FN (BUILT_IN_FINITE):
5742 case BUILT_IN_ISFINITE:
5743 case BUILT_IN_ISNORMAL:
5744 target = expand_builtin_interclass_mathfn (exp, target);
5745 if (target)
5746 return target;
5747 break;
5748
5749 CASE_FLT_FN (BUILT_IN_ICEIL):
5750 CASE_FLT_FN (BUILT_IN_LCEIL):
5751 CASE_FLT_FN (BUILT_IN_LLCEIL):
5752 CASE_FLT_FN (BUILT_IN_LFLOOR):
5753 CASE_FLT_FN (BUILT_IN_IFLOOR):
5754 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5755 target = expand_builtin_int_roundingfn (exp, target);
5756 if (target)
5757 return target;
5758 break;
5759
5760 CASE_FLT_FN (BUILT_IN_IRINT):
5761 CASE_FLT_FN (BUILT_IN_LRINT):
5762 CASE_FLT_FN (BUILT_IN_LLRINT):
5763 CASE_FLT_FN (BUILT_IN_IROUND):
5764 CASE_FLT_FN (BUILT_IN_LROUND):
5765 CASE_FLT_FN (BUILT_IN_LLROUND):
5766 target = expand_builtin_int_roundingfn_2 (exp, target);
5767 if (target)
5768 return target;
5769 break;
5770
5771 CASE_FLT_FN (BUILT_IN_POWI):
5772 target = expand_builtin_powi (exp, target);
5773 if (target)
5774 return target;
5775 break;
5776
5777 CASE_FLT_FN (BUILT_IN_ATAN2):
5778 CASE_FLT_FN (BUILT_IN_LDEXP):
5779 CASE_FLT_FN (BUILT_IN_SCALB):
5780 CASE_FLT_FN (BUILT_IN_SCALBN):
5781 CASE_FLT_FN (BUILT_IN_SCALBLN):
5782 if (! flag_unsafe_math_optimizations)
5783 break;
5784
5785 CASE_FLT_FN (BUILT_IN_FMOD):
5786 CASE_FLT_FN (BUILT_IN_REMAINDER):
5787 CASE_FLT_FN (BUILT_IN_DREM):
5788 CASE_FLT_FN (BUILT_IN_POW):
5789 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5790 if (target)
5791 return target;
5792 break;
5793
5794 CASE_FLT_FN (BUILT_IN_CEXPI):
5795 target = expand_builtin_cexpi (exp, target);
5796 gcc_assert (target);
5797 return target;
5798
5799 CASE_FLT_FN (BUILT_IN_SIN):
5800 CASE_FLT_FN (BUILT_IN_COS):
5801 if (! flag_unsafe_math_optimizations)
5802 break;
5803 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5804 if (target)
5805 return target;
5806 break;
5807
5808 CASE_FLT_FN (BUILT_IN_SINCOS):
5809 if (! flag_unsafe_math_optimizations)
5810 break;
5811 target = expand_builtin_sincos (exp);
5812 if (target)
5813 return target;
5814 break;
5815
5816 case BUILT_IN_APPLY_ARGS:
5817 return expand_builtin_apply_args ();
5818
5819 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5820 FUNCTION with a copy of the parameters described by
5821 ARGUMENTS, and ARGSIZE. It returns a block of memory
5822 allocated on the stack into which is stored all the registers
5823 that might possibly be used for returning the result of a
5824 function. ARGUMENTS is the value returned by
5825 __builtin_apply_args. ARGSIZE is the number of bytes of
5826 arguments that must be copied. ??? How should this value be
5827 computed? We'll also need a safe worst case value for varargs
5828 functions. */
5829 case BUILT_IN_APPLY:
5830 if (!validate_arglist (exp, POINTER_TYPE,
5831 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5832 && !validate_arglist (exp, REFERENCE_TYPE,
5833 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5834 return const0_rtx;
5835 else
5836 {
5837 rtx ops[3];
5838
5839 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5840 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5841 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5842
5843 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5844 }
5845
5846 /* __builtin_return (RESULT) causes the function to return the
5847 value described by RESULT. RESULT is address of the block of
5848 memory returned by __builtin_apply. */
5849 case BUILT_IN_RETURN:
5850 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5851 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5852 return const0_rtx;
5853
5854 case BUILT_IN_SAVEREGS:
5855 return expand_builtin_saveregs ();
5856
5857 case BUILT_IN_VA_ARG_PACK:
5858 /* All valid uses of __builtin_va_arg_pack () are removed during
5859 inlining. */
5860 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5861 return const0_rtx;
5862
5863 case BUILT_IN_VA_ARG_PACK_LEN:
5864 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5865 inlining. */
5866 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5867 return const0_rtx;
5868
5869 /* Return the address of the first anonymous stack arg. */
5870 case BUILT_IN_NEXT_ARG:
5871 if (fold_builtin_next_arg (exp, false))
5872 return const0_rtx;
5873 return expand_builtin_next_arg ();
5874
5875 case BUILT_IN_CLEAR_CACHE:
5876 target = expand_builtin___clear_cache (exp);
5877 if (target)
5878 return target;
5879 break;
5880
5881 case BUILT_IN_CLASSIFY_TYPE:
5882 return expand_builtin_classify_type (exp);
5883
5884 case BUILT_IN_CONSTANT_P:
5885 return const0_rtx;
5886
5887 case BUILT_IN_FRAME_ADDRESS:
5888 case BUILT_IN_RETURN_ADDRESS:
5889 return expand_builtin_frame_address (fndecl, exp);
5890
5891 /* Returns the address of the area where the structure is returned.
5892 0 otherwise. */
5893 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5894 if (call_expr_nargs (exp) != 0
5895 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5896 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5897 return const0_rtx;
5898 else
5899 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5900
5901 case BUILT_IN_ALLOCA:
5902 case BUILT_IN_ALLOCA_WITH_ALIGN:
5903 /* If the allocation stems from the declaration of a variable-sized
5904 object, it cannot accumulate. */
5905 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
5906 if (target)
5907 return target;
5908 break;
5909
5910 case BUILT_IN_STACK_SAVE:
5911 return expand_stack_save ();
5912
5913 case BUILT_IN_STACK_RESTORE:
5914 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5915 return const0_rtx;
5916
5917 case BUILT_IN_BSWAP16:
5918 case BUILT_IN_BSWAP32:
5919 case BUILT_IN_BSWAP64:
5920 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
5921 if (target)
5922 return target;
5923 break;
5924
5925 CASE_INT_FN (BUILT_IN_FFS):
5926 target = expand_builtin_unop (target_mode, exp, target,
5927 subtarget, ffs_optab);
5928 if (target)
5929 return target;
5930 break;
5931
5932 CASE_INT_FN (BUILT_IN_CLZ):
5933 target = expand_builtin_unop (target_mode, exp, target,
5934 subtarget, clz_optab);
5935 if (target)
5936 return target;
5937 break;
5938
5939 CASE_INT_FN (BUILT_IN_CTZ):
5940 target = expand_builtin_unop (target_mode, exp, target,
5941 subtarget, ctz_optab);
5942 if (target)
5943 return target;
5944 break;
5945
5946 CASE_INT_FN (BUILT_IN_CLRSB):
5947 target = expand_builtin_unop (target_mode, exp, target,
5948 subtarget, clrsb_optab);
5949 if (target)
5950 return target;
5951 break;
5952
5953 CASE_INT_FN (BUILT_IN_POPCOUNT):
5954 target = expand_builtin_unop (target_mode, exp, target,
5955 subtarget, popcount_optab);
5956 if (target)
5957 return target;
5958 break;
5959
5960 CASE_INT_FN (BUILT_IN_PARITY):
5961 target = expand_builtin_unop (target_mode, exp, target,
5962 subtarget, parity_optab);
5963 if (target)
5964 return target;
5965 break;
5966
5967 case BUILT_IN_STRLEN:
5968 target = expand_builtin_strlen (exp, target, target_mode);
5969 if (target)
5970 return target;
5971 break;
5972
5973 case BUILT_IN_STRCPY:
5974 target = expand_builtin_strcpy (exp, target);
5975 if (target)
5976 return target;
5977 break;
5978
5979 case BUILT_IN_STRNCPY:
5980 target = expand_builtin_strncpy (exp, target);
5981 if (target)
5982 return target;
5983 break;
5984
5985 case BUILT_IN_STPCPY:
5986 target = expand_builtin_stpcpy (exp, target, mode);
5987 if (target)
5988 return target;
5989 break;
5990
5991 case BUILT_IN_MEMCPY:
5992 target = expand_builtin_memcpy (exp, target);
5993 if (target)
5994 return target;
5995 break;
5996
5997 case BUILT_IN_MEMPCPY:
5998 target = expand_builtin_mempcpy (exp, target, mode);
5999 if (target)
6000 return target;
6001 break;
6002
6003 case BUILT_IN_MEMSET:
6004 target = expand_builtin_memset (exp, target, mode);
6005 if (target)
6006 return target;
6007 break;
6008
6009 case BUILT_IN_BZERO:
6010 target = expand_builtin_bzero (exp);
6011 if (target)
6012 return target;
6013 break;
6014
6015 case BUILT_IN_STRCMP:
6016 target = expand_builtin_strcmp (exp, target);
6017 if (target)
6018 return target;
6019 break;
6020
6021 case BUILT_IN_STRNCMP:
6022 target = expand_builtin_strncmp (exp, target, mode);
6023 if (target)
6024 return target;
6025 break;
6026
6027 case BUILT_IN_BCMP:
6028 case BUILT_IN_MEMCMP:
6029 target = expand_builtin_memcmp (exp, target, mode);
6030 if (target)
6031 return target;
6032 break;
6033
6034 case BUILT_IN_SETJMP:
6035 /* This should have been lowered to the builtins below. */
6036 gcc_unreachable ();
6037
6038 case BUILT_IN_SETJMP_SETUP:
6039 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6040 and the receiver label. */
6041 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6042 {
6043 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6044 VOIDmode, EXPAND_NORMAL);
6045 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6046 rtx label_r = label_rtx (label);
6047
6048 /* This is copied from the handling of non-local gotos. */
6049 expand_builtin_setjmp_setup (buf_addr, label_r);
6050 nonlocal_goto_handler_labels
6051 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6052 nonlocal_goto_handler_labels);
6053 /* ??? Do not let expand_label treat us as such since we would
6054 not want to be both on the list of non-local labels and on
6055 the list of forced labels. */
6056 FORCED_LABEL (label) = 0;
6057 return const0_rtx;
6058 }
6059 break;
6060
6061 case BUILT_IN_SETJMP_DISPATCHER:
6062 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6063 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6064 {
6065 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6066 rtx label_r = label_rtx (label);
6067
6068 /* Remove the dispatcher label from the list of non-local labels
6069 since the receiver labels have been added to it above. */
6070 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6071 return const0_rtx;
6072 }
6073 break;
6074
6075 case BUILT_IN_SETJMP_RECEIVER:
6076 /* __builtin_setjmp_receiver is passed the receiver label. */
6077 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6078 {
6079 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6080 rtx label_r = label_rtx (label);
6081
6082 expand_builtin_setjmp_receiver (label_r);
6083 return const0_rtx;
6084 }
6085 break;
6086
6087 /* __builtin_longjmp is passed a pointer to an array of five words.
6088 It's similar to the C library longjmp function but works with
6089 __builtin_setjmp above. */
6090 case BUILT_IN_LONGJMP:
6091 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6092 {
6093 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6094 VOIDmode, EXPAND_NORMAL);
6095 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6096
6097 if (value != const1_rtx)
6098 {
6099 error ("%<__builtin_longjmp%> second argument must be 1");
6100 return const0_rtx;
6101 }
6102
6103 expand_builtin_longjmp (buf_addr, value);
6104 return const0_rtx;
6105 }
6106 break;
6107
6108 case BUILT_IN_NONLOCAL_GOTO:
6109 target = expand_builtin_nonlocal_goto (exp);
6110 if (target)
6111 return target;
6112 break;
6113
6114 /* This updates the setjmp buffer that is its argument with the value
6115 of the current stack pointer. */
6116 case BUILT_IN_UPDATE_SETJMP_BUF:
6117 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6118 {
6119 rtx buf_addr
6120 = expand_normal (CALL_EXPR_ARG (exp, 0));
6121
6122 expand_builtin_update_setjmp_buf (buf_addr);
6123 return const0_rtx;
6124 }
6125 break;
6126
6127 case BUILT_IN_TRAP:
6128 expand_builtin_trap ();
6129 return const0_rtx;
6130
6131 case BUILT_IN_UNREACHABLE:
6132 expand_builtin_unreachable ();
6133 return const0_rtx;
6134
6135 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6136 case BUILT_IN_SIGNBITD32:
6137 case BUILT_IN_SIGNBITD64:
6138 case BUILT_IN_SIGNBITD128:
6139 target = expand_builtin_signbit (exp, target);
6140 if (target)
6141 return target;
6142 break;
6143
6144 /* Various hooks for the DWARF 2 __throw routine. */
6145 case BUILT_IN_UNWIND_INIT:
6146 expand_builtin_unwind_init ();
6147 return const0_rtx;
6148 case BUILT_IN_DWARF_CFA:
6149 return virtual_cfa_rtx;
6150 #ifdef DWARF2_UNWIND_INFO
6151 case BUILT_IN_DWARF_SP_COLUMN:
6152 return expand_builtin_dwarf_sp_column ();
6153 case BUILT_IN_INIT_DWARF_REG_SIZES:
6154 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6155 return const0_rtx;
6156 #endif
6157 case BUILT_IN_FROB_RETURN_ADDR:
6158 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6159 case BUILT_IN_EXTRACT_RETURN_ADDR:
6160 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6161 case BUILT_IN_EH_RETURN:
6162 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6163 CALL_EXPR_ARG (exp, 1));
6164 return const0_rtx;
6165 #ifdef EH_RETURN_DATA_REGNO
6166 case BUILT_IN_EH_RETURN_DATA_REGNO:
6167 return expand_builtin_eh_return_data_regno (exp);
6168 #endif
6169 case BUILT_IN_EXTEND_POINTER:
6170 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6171 case BUILT_IN_EH_POINTER:
6172 return expand_builtin_eh_pointer (exp);
6173 case BUILT_IN_EH_FILTER:
6174 return expand_builtin_eh_filter (exp);
6175 case BUILT_IN_EH_COPY_VALUES:
6176 return expand_builtin_eh_copy_values (exp);
6177
6178 case BUILT_IN_VA_START:
6179 return expand_builtin_va_start (exp);
6180 case BUILT_IN_VA_END:
6181 return expand_builtin_va_end (exp);
6182 case BUILT_IN_VA_COPY:
6183 return expand_builtin_va_copy (exp);
6184 case BUILT_IN_EXPECT:
6185 return expand_builtin_expect (exp, target);
6186 case BUILT_IN_ASSUME_ALIGNED:
6187 return expand_builtin_assume_aligned (exp, target);
6188 case BUILT_IN_PREFETCH:
6189 expand_builtin_prefetch (exp);
6190 return const0_rtx;
6191
6192 case BUILT_IN_INIT_TRAMPOLINE:
6193 return expand_builtin_init_trampoline (exp, true);
6194 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6195 return expand_builtin_init_trampoline (exp, false);
6196 case BUILT_IN_ADJUST_TRAMPOLINE:
6197 return expand_builtin_adjust_trampoline (exp);
6198
6199 case BUILT_IN_FORK:
6200 case BUILT_IN_EXECL:
6201 case BUILT_IN_EXECV:
6202 case BUILT_IN_EXECLP:
6203 case BUILT_IN_EXECLE:
6204 case BUILT_IN_EXECVP:
6205 case BUILT_IN_EXECVE:
6206 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6207 if (target)
6208 return target;
6209 break;
6210
6211 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6212 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6213 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6214 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6215 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6216 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6217 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6218 if (target)
6219 return target;
6220 break;
6221
6222 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6223 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6224 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6225 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6226 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6227 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6228 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6229 if (target)
6230 return target;
6231 break;
6232
6233 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6234 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6235 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6236 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6237 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6238 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6239 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6240 if (target)
6241 return target;
6242 break;
6243
6244 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6245 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6246 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6247 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6248 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6249 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6250 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6251 if (target)
6252 return target;
6253 break;
6254
6255 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6256 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6257 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6258 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6259 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6260 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6261 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6262 if (target)
6263 return target;
6264 break;
6265
6266 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6267 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6268 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6269 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6270 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6271 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6272 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6273 if (target)
6274 return target;
6275 break;
6276
6277 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6278 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6279 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6280 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6281 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6282 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6283 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6284 if (target)
6285 return target;
6286 break;
6287
6288 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6289 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6290 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6291 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6292 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6293 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6294 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6295 if (target)
6296 return target;
6297 break;
6298
6299 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6300 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6301 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6302 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6303 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6304 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6305 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6306 if (target)
6307 return target;
6308 break;
6309
6310 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6311 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6312 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6313 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6314 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6315 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6316 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6317 if (target)
6318 return target;
6319 break;
6320
6321 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6322 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6323 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6324 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6325 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6326 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6327 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6328 if (target)
6329 return target;
6330 break;
6331
6332 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6333 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6334 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6335 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6336 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6337 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6338 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6339 if (target)
6340 return target;
6341 break;
6342
6343 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6344 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6345 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6346 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6347 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6348 if (mode == VOIDmode)
6349 mode = TYPE_MODE (boolean_type_node);
6350 if (!target || !register_operand (target, mode))
6351 target = gen_reg_rtx (mode);
6352
6353 mode = get_builtin_sync_mode
6354 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6355 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6356 if (target)
6357 return target;
6358 break;
6359
6360 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6361 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6362 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6363 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6364 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6365 mode = get_builtin_sync_mode
6366 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6367 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6368 if (target)
6369 return target;
6370 break;
6371
6372 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6373 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6374 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6375 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6376 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6377 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6378 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6379 if (target)
6380 return target;
6381 break;
6382
6383 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6384 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6385 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6386 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6387 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6388 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6389 expand_builtin_sync_lock_release (mode, exp);
6390 return const0_rtx;
6391
6392 case BUILT_IN_SYNC_SYNCHRONIZE:
6393 expand_builtin_sync_synchronize ();
6394 return const0_rtx;
6395
6396 case BUILT_IN_ATOMIC_EXCHANGE_1:
6397 case BUILT_IN_ATOMIC_EXCHANGE_2:
6398 case BUILT_IN_ATOMIC_EXCHANGE_4:
6399 case BUILT_IN_ATOMIC_EXCHANGE_8:
6400 case BUILT_IN_ATOMIC_EXCHANGE_16:
6401 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6402 target = expand_builtin_atomic_exchange (mode, exp, target);
6403 if (target)
6404 return target;
6405 break;
6406
6407 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6408 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6409 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6410 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6411 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6412 {
6413 unsigned int nargs, z;
6414 vec<tree, va_gc> *vec;
6415
6416 mode =
6417 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6418 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6419 if (target)
6420 return target;
6421
6422 /* If this is turned into an external library call, the weak parameter
6423 must be dropped to match the expected parameter list. */
6424 nargs = call_expr_nargs (exp);
6425 vec_alloc (vec, nargs - 1);
6426 for (z = 0; z < 3; z++)
6427 vec->quick_push (CALL_EXPR_ARG (exp, z));
6428 /* Skip the boolean weak parameter. */
6429 for (z = 4; z < 6; z++)
6430 vec->quick_push (CALL_EXPR_ARG (exp, z));
6431 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6432 break;
6433 }
6434
6435 case BUILT_IN_ATOMIC_LOAD_1:
6436 case BUILT_IN_ATOMIC_LOAD_2:
6437 case BUILT_IN_ATOMIC_LOAD_4:
6438 case BUILT_IN_ATOMIC_LOAD_8:
6439 case BUILT_IN_ATOMIC_LOAD_16:
6440 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6441 target = expand_builtin_atomic_load (mode, exp, target);
6442 if (target)
6443 return target;
6444 break;
6445
6446 case BUILT_IN_ATOMIC_STORE_1:
6447 case BUILT_IN_ATOMIC_STORE_2:
6448 case BUILT_IN_ATOMIC_STORE_4:
6449 case BUILT_IN_ATOMIC_STORE_8:
6450 case BUILT_IN_ATOMIC_STORE_16:
6451 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6452 target = expand_builtin_atomic_store (mode, exp);
6453 if (target)
6454 return const0_rtx;
6455 break;
6456
6457 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6458 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6459 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6460 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6461 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6462 {
6463 enum built_in_function lib;
6464 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6465 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6466 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6467 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6468 ignore, lib);
6469 if (target)
6470 return target;
6471 break;
6472 }
6473 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6474 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6475 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6476 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6477 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6478 {
6479 enum built_in_function lib;
6480 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6481 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6482 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6483 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6484 ignore, lib);
6485 if (target)
6486 return target;
6487 break;
6488 }
6489 case BUILT_IN_ATOMIC_AND_FETCH_1:
6490 case BUILT_IN_ATOMIC_AND_FETCH_2:
6491 case BUILT_IN_ATOMIC_AND_FETCH_4:
6492 case BUILT_IN_ATOMIC_AND_FETCH_8:
6493 case BUILT_IN_ATOMIC_AND_FETCH_16:
6494 {
6495 enum built_in_function lib;
6496 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6497 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6498 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6499 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6500 ignore, lib);
6501 if (target)
6502 return target;
6503 break;
6504 }
6505 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6506 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6507 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6508 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6509 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6510 {
6511 enum built_in_function lib;
6512 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6513 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6514 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6515 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6516 ignore, lib);
6517 if (target)
6518 return target;
6519 break;
6520 }
6521 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6522 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6523 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6524 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6525 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6526 {
6527 enum built_in_function lib;
6528 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6529 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6530 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6531 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6532 ignore, lib);
6533 if (target)
6534 return target;
6535 break;
6536 }
6537 case BUILT_IN_ATOMIC_OR_FETCH_1:
6538 case BUILT_IN_ATOMIC_OR_FETCH_2:
6539 case BUILT_IN_ATOMIC_OR_FETCH_4:
6540 case BUILT_IN_ATOMIC_OR_FETCH_8:
6541 case BUILT_IN_ATOMIC_OR_FETCH_16:
6542 {
6543 enum built_in_function lib;
6544 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6545 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6546 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6547 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6548 ignore, lib);
6549 if (target)
6550 return target;
6551 break;
6552 }
6553 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6554 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6555 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6556 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6557 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6558 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6559 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6560 ignore, BUILT_IN_NONE);
6561 if (target)
6562 return target;
6563 break;
6564
6565 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6566 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6567 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6568 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6569 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6570 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6571 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6572 ignore, BUILT_IN_NONE);
6573 if (target)
6574 return target;
6575 break;
6576
6577 case BUILT_IN_ATOMIC_FETCH_AND_1:
6578 case BUILT_IN_ATOMIC_FETCH_AND_2:
6579 case BUILT_IN_ATOMIC_FETCH_AND_4:
6580 case BUILT_IN_ATOMIC_FETCH_AND_8:
6581 case BUILT_IN_ATOMIC_FETCH_AND_16:
6582 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6583 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6584 ignore, BUILT_IN_NONE);
6585 if (target)
6586 return target;
6587 break;
6588
6589 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6590 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6591 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6592 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6593 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6594 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6595 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6596 ignore, BUILT_IN_NONE);
6597 if (target)
6598 return target;
6599 break;
6600
6601 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6602 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6603 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6604 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6605 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6606 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6607 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6608 ignore, BUILT_IN_NONE);
6609 if (target)
6610 return target;
6611 break;
6612
6613 case BUILT_IN_ATOMIC_FETCH_OR_1:
6614 case BUILT_IN_ATOMIC_FETCH_OR_2:
6615 case BUILT_IN_ATOMIC_FETCH_OR_4:
6616 case BUILT_IN_ATOMIC_FETCH_OR_8:
6617 case BUILT_IN_ATOMIC_FETCH_OR_16:
6618 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6619 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6620 ignore, BUILT_IN_NONE);
6621 if (target)
6622 return target;
6623 break;
6624
6625 case BUILT_IN_ATOMIC_TEST_AND_SET:
6626 return expand_builtin_atomic_test_and_set (exp, target);
6627
6628 case BUILT_IN_ATOMIC_CLEAR:
6629 return expand_builtin_atomic_clear (exp);
6630
6631 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6632 return expand_builtin_atomic_always_lock_free (exp);
6633
6634 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6635 target = expand_builtin_atomic_is_lock_free (exp);
6636 if (target)
6637 return target;
6638 break;
6639
6640 case BUILT_IN_ATOMIC_THREAD_FENCE:
6641 expand_builtin_atomic_thread_fence (exp);
6642 return const0_rtx;
6643
6644 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6645 expand_builtin_atomic_signal_fence (exp);
6646 return const0_rtx;
6647
6648 case BUILT_IN_OBJECT_SIZE:
6649 return expand_builtin_object_size (exp);
6650
6651 case BUILT_IN_MEMCPY_CHK:
6652 case BUILT_IN_MEMPCPY_CHK:
6653 case BUILT_IN_MEMMOVE_CHK:
6654 case BUILT_IN_MEMSET_CHK:
6655 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6656 if (target)
6657 return target;
6658 break;
6659
6660 case BUILT_IN_STRCPY_CHK:
6661 case BUILT_IN_STPCPY_CHK:
6662 case BUILT_IN_STRNCPY_CHK:
6663 case BUILT_IN_STPNCPY_CHK:
6664 case BUILT_IN_STRCAT_CHK:
6665 case BUILT_IN_STRNCAT_CHK:
6666 case BUILT_IN_SNPRINTF_CHK:
6667 case BUILT_IN_VSNPRINTF_CHK:
6668 maybe_emit_chk_warning (exp, fcode);
6669 break;
6670
6671 case BUILT_IN_SPRINTF_CHK:
6672 case BUILT_IN_VSPRINTF_CHK:
6673 maybe_emit_sprintf_chk_warning (exp, fcode);
6674 break;
6675
6676 case BUILT_IN_FREE:
6677 if (warn_free_nonheap_object)
6678 maybe_emit_free_warning (exp);
6679 break;
6680
6681 case BUILT_IN_THREAD_POINTER:
6682 return expand_builtin_thread_pointer (exp, target);
6683
6684 case BUILT_IN_SET_THREAD_POINTER:
6685 expand_builtin_set_thread_pointer (exp);
6686 return const0_rtx;
6687
6688 default: /* just do library call, if unknown builtin */
6689 break;
6690 }
6691
6692 /* The switch statement above can drop through to cause the function
6693 to be called normally. */
6694 return expand_call (exp, target, ignore);
6695 }
6696
6697 /* Determine whether a tree node represents a call to a built-in
6698 function. If the tree T is a call to a built-in function with
6699 the right number of arguments of the appropriate types, return
6700 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6701 Otherwise the return value is END_BUILTINS. */
6702
6703 enum built_in_function
6704 builtin_mathfn_code (const_tree t)
6705 {
6706 const_tree fndecl, arg, parmlist;
6707 const_tree argtype, parmtype;
6708 const_call_expr_arg_iterator iter;
6709
6710 if (TREE_CODE (t) != CALL_EXPR
6711 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6712 return END_BUILTINS;
6713
6714 fndecl = get_callee_fndecl (t);
6715 if (fndecl == NULL_TREE
6716 || TREE_CODE (fndecl) != FUNCTION_DECL
6717 || ! DECL_BUILT_IN (fndecl)
6718 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6719 return END_BUILTINS;
6720
6721 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6722 init_const_call_expr_arg_iterator (t, &iter);
6723 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6724 {
6725 /* If a function doesn't take a variable number of arguments,
6726 the last element in the list will have type `void'. */
6727 parmtype = TREE_VALUE (parmlist);
6728 if (VOID_TYPE_P (parmtype))
6729 {
6730 if (more_const_call_expr_args_p (&iter))
6731 return END_BUILTINS;
6732 return DECL_FUNCTION_CODE (fndecl);
6733 }
6734
6735 if (! more_const_call_expr_args_p (&iter))
6736 return END_BUILTINS;
6737
6738 arg = next_const_call_expr_arg (&iter);
6739 argtype = TREE_TYPE (arg);
6740
6741 if (SCALAR_FLOAT_TYPE_P (parmtype))
6742 {
6743 if (! SCALAR_FLOAT_TYPE_P (argtype))
6744 return END_BUILTINS;
6745 }
6746 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6747 {
6748 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6749 return END_BUILTINS;
6750 }
6751 else if (POINTER_TYPE_P (parmtype))
6752 {
6753 if (! POINTER_TYPE_P (argtype))
6754 return END_BUILTINS;
6755 }
6756 else if (INTEGRAL_TYPE_P (parmtype))
6757 {
6758 if (! INTEGRAL_TYPE_P (argtype))
6759 return END_BUILTINS;
6760 }
6761 else
6762 return END_BUILTINS;
6763 }
6764
6765 /* Variable-length argument list. */
6766 return DECL_FUNCTION_CODE (fndecl);
6767 }
6768
6769 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6770 evaluate to a constant. */
6771
6772 static tree
6773 fold_builtin_constant_p (tree arg)
6774 {
6775 /* We return 1 for a numeric type that's known to be a constant
6776 value at compile-time or for an aggregate type that's a
6777 literal constant. */
6778 STRIP_NOPS (arg);
6779
6780 /* If we know this is a constant, emit the constant of one. */
6781 if (CONSTANT_CLASS_P (arg)
6782 || (TREE_CODE (arg) == CONSTRUCTOR
6783 && TREE_CONSTANT (arg)))
6784 return integer_one_node;
6785 if (TREE_CODE (arg) == ADDR_EXPR)
6786 {
6787 tree op = TREE_OPERAND (arg, 0);
6788 if (TREE_CODE (op) == STRING_CST
6789 || (TREE_CODE (op) == ARRAY_REF
6790 && integer_zerop (TREE_OPERAND (op, 1))
6791 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6792 return integer_one_node;
6793 }
6794
6795 /* If this expression has side effects, show we don't know it to be a
6796 constant. Likewise if it's a pointer or aggregate type since in
6797 those case we only want literals, since those are only optimized
6798 when generating RTL, not later.
6799 And finally, if we are compiling an initializer, not code, we
6800 need to return a definite result now; there's not going to be any
6801 more optimization done. */
6802 if (TREE_SIDE_EFFECTS (arg)
6803 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6804 || POINTER_TYPE_P (TREE_TYPE (arg))
6805 || cfun == 0
6806 || folding_initializer
6807 || force_folding_builtin_constant_p)
6808 return integer_zero_node;
6809
6810 return NULL_TREE;
6811 }
6812
6813 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6814 return it as a truthvalue. */
6815
6816 static tree
6817 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6818 {
6819 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6820
6821 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
6822 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6823 ret_type = TREE_TYPE (TREE_TYPE (fn));
6824 pred_type = TREE_VALUE (arg_types);
6825 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6826
6827 pred = fold_convert_loc (loc, pred_type, pred);
6828 expected = fold_convert_loc (loc, expected_type, expected);
6829 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6830
6831 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6832 build_int_cst (ret_type, 0));
6833 }
6834
6835 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6836 NULL_TREE if no simplification is possible. */
6837
6838 static tree
6839 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6840 {
6841 tree inner, fndecl, inner_arg0;
6842 enum tree_code code;
6843
6844 /* Distribute the expected value over short-circuiting operators.
6845 See through the cast from truthvalue_type_node to long. */
6846 inner_arg0 = arg0;
6847 while (TREE_CODE (inner_arg0) == NOP_EXPR
6848 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
6849 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
6850 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
6851
6852 /* If this is a builtin_expect within a builtin_expect keep the
6853 inner one. See through a comparison against a constant. It
6854 might have been added to create a thruthvalue. */
6855 inner = inner_arg0;
6856
6857 if (COMPARISON_CLASS_P (inner)
6858 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6859 inner = TREE_OPERAND (inner, 0);
6860
6861 if (TREE_CODE (inner) == CALL_EXPR
6862 && (fndecl = get_callee_fndecl (inner))
6863 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6864 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6865 return arg0;
6866
6867 inner = inner_arg0;
6868 code = TREE_CODE (inner);
6869 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6870 {
6871 tree op0 = TREE_OPERAND (inner, 0);
6872 tree op1 = TREE_OPERAND (inner, 1);
6873
6874 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6875 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6876 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6877
6878 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6879 }
6880
6881 /* If the argument isn't invariant then there's nothing else we can do. */
6882 if (!TREE_CONSTANT (inner_arg0))
6883 return NULL_TREE;
6884
6885 /* If we expect that a comparison against the argument will fold to
6886 a constant return the constant. In practice, this means a true
6887 constant or the address of a non-weak symbol. */
6888 inner = inner_arg0;
6889 STRIP_NOPS (inner);
6890 if (TREE_CODE (inner) == ADDR_EXPR)
6891 {
6892 do
6893 {
6894 inner = TREE_OPERAND (inner, 0);
6895 }
6896 while (TREE_CODE (inner) == COMPONENT_REF
6897 || TREE_CODE (inner) == ARRAY_REF);
6898 if ((TREE_CODE (inner) == VAR_DECL
6899 || TREE_CODE (inner) == FUNCTION_DECL)
6900 && DECL_WEAK (inner))
6901 return NULL_TREE;
6902 }
6903
6904 /* Otherwise, ARG0 already has the proper type for the return value. */
6905 return arg0;
6906 }
6907
6908 /* Fold a call to __builtin_classify_type with argument ARG. */
6909
6910 static tree
6911 fold_builtin_classify_type (tree arg)
6912 {
6913 if (arg == 0)
6914 return build_int_cst (integer_type_node, no_type_class);
6915
6916 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
6917 }
6918
6919 /* Fold a call to __builtin_strlen with argument ARG. */
6920
6921 static tree
6922 fold_builtin_strlen (location_t loc, tree type, tree arg)
6923 {
6924 if (!validate_arg (arg, POINTER_TYPE))
6925 return NULL_TREE;
6926 else
6927 {
6928 tree len = c_strlen (arg, 0);
6929
6930 if (len)
6931 return fold_convert_loc (loc, type, len);
6932
6933 return NULL_TREE;
6934 }
6935 }
6936
6937 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6938
6939 static tree
6940 fold_builtin_inf (location_t loc, tree type, int warn)
6941 {
6942 REAL_VALUE_TYPE real;
6943
6944 /* __builtin_inff is intended to be usable to define INFINITY on all
6945 targets. If an infinity is not available, INFINITY expands "to a
6946 positive constant of type float that overflows at translation
6947 time", footnote "In this case, using INFINITY will violate the
6948 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6949 Thus we pedwarn to ensure this constraint violation is
6950 diagnosed. */
6951 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6952 pedwarn (loc, 0, "target format does not support infinity");
6953
6954 real_inf (&real);
6955 return build_real (type, real);
6956 }
6957
6958 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6959
6960 static tree
6961 fold_builtin_nan (tree arg, tree type, int quiet)
6962 {
6963 REAL_VALUE_TYPE real;
6964 const char *str;
6965
6966 if (!validate_arg (arg, POINTER_TYPE))
6967 return NULL_TREE;
6968 str = c_getstr (arg);
6969 if (!str)
6970 return NULL_TREE;
6971
6972 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6973 return NULL_TREE;
6974
6975 return build_real (type, real);
6976 }
6977
6978 /* Return true if the floating point expression T has an integer value.
6979 We also allow +Inf, -Inf and NaN to be considered integer values. */
6980
6981 static bool
6982 integer_valued_real_p (tree t)
6983 {
6984 switch (TREE_CODE (t))
6985 {
6986 case FLOAT_EXPR:
6987 return true;
6988
6989 case ABS_EXPR:
6990 case SAVE_EXPR:
6991 return integer_valued_real_p (TREE_OPERAND (t, 0));
6992
6993 case COMPOUND_EXPR:
6994 case MODIFY_EXPR:
6995 case BIND_EXPR:
6996 return integer_valued_real_p (TREE_OPERAND (t, 1));
6997
6998 case PLUS_EXPR:
6999 case MINUS_EXPR:
7000 case MULT_EXPR:
7001 case MIN_EXPR:
7002 case MAX_EXPR:
7003 return integer_valued_real_p (TREE_OPERAND (t, 0))
7004 && integer_valued_real_p (TREE_OPERAND (t, 1));
7005
7006 case COND_EXPR:
7007 return integer_valued_real_p (TREE_OPERAND (t, 1))
7008 && integer_valued_real_p (TREE_OPERAND (t, 2));
7009
7010 case REAL_CST:
7011 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7012
7013 case NOP_EXPR:
7014 {
7015 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7016 if (TREE_CODE (type) == INTEGER_TYPE)
7017 return true;
7018 if (TREE_CODE (type) == REAL_TYPE)
7019 return integer_valued_real_p (TREE_OPERAND (t, 0));
7020 break;
7021 }
7022
7023 case CALL_EXPR:
7024 switch (builtin_mathfn_code (t))
7025 {
7026 CASE_FLT_FN (BUILT_IN_CEIL):
7027 CASE_FLT_FN (BUILT_IN_FLOOR):
7028 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7029 CASE_FLT_FN (BUILT_IN_RINT):
7030 CASE_FLT_FN (BUILT_IN_ROUND):
7031 CASE_FLT_FN (BUILT_IN_TRUNC):
7032 return true;
7033
7034 CASE_FLT_FN (BUILT_IN_FMIN):
7035 CASE_FLT_FN (BUILT_IN_FMAX):
7036 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7037 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7038
7039 default:
7040 break;
7041 }
7042 break;
7043
7044 default:
7045 break;
7046 }
7047 return false;
7048 }
7049
7050 /* FNDECL is assumed to be a builtin where truncation can be propagated
7051 across (for instance floor((double)f) == (double)floorf (f).
7052 Do the transformation for a call with argument ARG. */
7053
7054 static tree
7055 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7056 {
7057 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7058
7059 if (!validate_arg (arg, REAL_TYPE))
7060 return NULL_TREE;
7061
7062 /* Integer rounding functions are idempotent. */
7063 if (fcode == builtin_mathfn_code (arg))
7064 return arg;
7065
7066 /* If argument is already integer valued, and we don't need to worry
7067 about setting errno, there's no need to perform rounding. */
7068 if (! flag_errno_math && integer_valued_real_p (arg))
7069 return arg;
7070
7071 if (optimize)
7072 {
7073 tree arg0 = strip_float_extensions (arg);
7074 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7075 tree newtype = TREE_TYPE (arg0);
7076 tree decl;
7077
7078 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7079 && (decl = mathfn_built_in (newtype, fcode)))
7080 return fold_convert_loc (loc, ftype,
7081 build_call_expr_loc (loc, decl, 1,
7082 fold_convert_loc (loc,
7083 newtype,
7084 arg0)));
7085 }
7086 return NULL_TREE;
7087 }
7088
7089 /* FNDECL is assumed to be builtin which can narrow the FP type of
7090 the argument, for instance lround((double)f) -> lroundf (f).
7091 Do the transformation for a call with argument ARG. */
7092
7093 static tree
7094 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7095 {
7096 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7097
7098 if (!validate_arg (arg, REAL_TYPE))
7099 return NULL_TREE;
7100
7101 /* If argument is already integer valued, and we don't need to worry
7102 about setting errno, there's no need to perform rounding. */
7103 if (! flag_errno_math && integer_valued_real_p (arg))
7104 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7105 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7106
7107 if (optimize)
7108 {
7109 tree ftype = TREE_TYPE (arg);
7110 tree arg0 = strip_float_extensions (arg);
7111 tree newtype = TREE_TYPE (arg0);
7112 tree decl;
7113
7114 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7115 && (decl = mathfn_built_in (newtype, fcode)))
7116 return build_call_expr_loc (loc, decl, 1,
7117 fold_convert_loc (loc, newtype, arg0));
7118 }
7119
7120 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7121 sizeof (int) == sizeof (long). */
7122 if (TYPE_PRECISION (integer_type_node)
7123 == TYPE_PRECISION (long_integer_type_node))
7124 {
7125 tree newfn = NULL_TREE;
7126 switch (fcode)
7127 {
7128 CASE_FLT_FN (BUILT_IN_ICEIL):
7129 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7130 break;
7131
7132 CASE_FLT_FN (BUILT_IN_IFLOOR):
7133 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7134 break;
7135
7136 CASE_FLT_FN (BUILT_IN_IROUND):
7137 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7138 break;
7139
7140 CASE_FLT_FN (BUILT_IN_IRINT):
7141 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7142 break;
7143
7144 default:
7145 break;
7146 }
7147
7148 if (newfn)
7149 {
7150 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7151 return fold_convert_loc (loc,
7152 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7153 }
7154 }
7155
7156 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7157 sizeof (long long) == sizeof (long). */
7158 if (TYPE_PRECISION (long_long_integer_type_node)
7159 == TYPE_PRECISION (long_integer_type_node))
7160 {
7161 tree newfn = NULL_TREE;
7162 switch (fcode)
7163 {
7164 CASE_FLT_FN (BUILT_IN_LLCEIL):
7165 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7166 break;
7167
7168 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7169 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7170 break;
7171
7172 CASE_FLT_FN (BUILT_IN_LLROUND):
7173 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7174 break;
7175
7176 CASE_FLT_FN (BUILT_IN_LLRINT):
7177 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7178 break;
7179
7180 default:
7181 break;
7182 }
7183
7184 if (newfn)
7185 {
7186 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7187 return fold_convert_loc (loc,
7188 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7189 }
7190 }
7191
7192 return NULL_TREE;
7193 }
7194
7195 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7196 return type. Return NULL_TREE if no simplification can be made. */
7197
7198 static tree
7199 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7200 {
7201 tree res;
7202
7203 if (!validate_arg (arg, COMPLEX_TYPE)
7204 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7205 return NULL_TREE;
7206
7207 /* Calculate the result when the argument is a constant. */
7208 if (TREE_CODE (arg) == COMPLEX_CST
7209 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7210 type, mpfr_hypot)))
7211 return res;
7212
7213 if (TREE_CODE (arg) == COMPLEX_EXPR)
7214 {
7215 tree real = TREE_OPERAND (arg, 0);
7216 tree imag = TREE_OPERAND (arg, 1);
7217
7218 /* If either part is zero, cabs is fabs of the other. */
7219 if (real_zerop (real))
7220 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7221 if (real_zerop (imag))
7222 return fold_build1_loc (loc, ABS_EXPR, type, real);
7223
7224 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7225 if (flag_unsafe_math_optimizations
7226 && operand_equal_p (real, imag, OEP_PURE_SAME))
7227 {
7228 const REAL_VALUE_TYPE sqrt2_trunc
7229 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7230 STRIP_NOPS (real);
7231 return fold_build2_loc (loc, MULT_EXPR, type,
7232 fold_build1_loc (loc, ABS_EXPR, type, real),
7233 build_real (type, sqrt2_trunc));
7234 }
7235 }
7236
7237 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7238 if (TREE_CODE (arg) == NEGATE_EXPR
7239 || TREE_CODE (arg) == CONJ_EXPR)
7240 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7241
7242 /* Don't do this when optimizing for size. */
7243 if (flag_unsafe_math_optimizations
7244 && optimize && optimize_function_for_speed_p (cfun))
7245 {
7246 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7247
7248 if (sqrtfn != NULL_TREE)
7249 {
7250 tree rpart, ipart, result;
7251
7252 arg = builtin_save_expr (arg);
7253
7254 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7255 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7256
7257 rpart = builtin_save_expr (rpart);
7258 ipart = builtin_save_expr (ipart);
7259
7260 result = fold_build2_loc (loc, PLUS_EXPR, type,
7261 fold_build2_loc (loc, MULT_EXPR, type,
7262 rpart, rpart),
7263 fold_build2_loc (loc, MULT_EXPR, type,
7264 ipart, ipart));
7265
7266 return build_call_expr_loc (loc, sqrtfn, 1, result);
7267 }
7268 }
7269
7270 return NULL_TREE;
7271 }
7272
7273 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7274 complex tree type of the result. If NEG is true, the imaginary
7275 zero is negative. */
7276
7277 static tree
7278 build_complex_cproj (tree type, bool neg)
7279 {
7280 REAL_VALUE_TYPE rinf, rzero = dconst0;
7281
7282 real_inf (&rinf);
7283 rzero.sign = neg;
7284 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7285 build_real (TREE_TYPE (type), rzero));
7286 }
7287
7288 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7289 return type. Return NULL_TREE if no simplification can be made. */
7290
7291 static tree
7292 fold_builtin_cproj (location_t loc, tree arg, tree type)
7293 {
7294 if (!validate_arg (arg, COMPLEX_TYPE)
7295 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7296 return NULL_TREE;
7297
7298 /* If there are no infinities, return arg. */
7299 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7300 return non_lvalue_loc (loc, arg);
7301
7302 /* Calculate the result when the argument is a constant. */
7303 if (TREE_CODE (arg) == COMPLEX_CST)
7304 {
7305 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7306 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7307
7308 if (real_isinf (real) || real_isinf (imag))
7309 return build_complex_cproj (type, imag->sign);
7310 else
7311 return arg;
7312 }
7313 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7314 {
7315 tree real = TREE_OPERAND (arg, 0);
7316 tree imag = TREE_OPERAND (arg, 1);
7317
7318 STRIP_NOPS (real);
7319 STRIP_NOPS (imag);
7320
7321 /* If the real part is inf and the imag part is known to be
7322 nonnegative, return (inf + 0i). Remember side-effects are
7323 possible in the imag part. */
7324 if (TREE_CODE (real) == REAL_CST
7325 && real_isinf (TREE_REAL_CST_PTR (real))
7326 && tree_expr_nonnegative_p (imag))
7327 return omit_one_operand_loc (loc, type,
7328 build_complex_cproj (type, false),
7329 arg);
7330
7331 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7332 Remember side-effects are possible in the real part. */
7333 if (TREE_CODE (imag) == REAL_CST
7334 && real_isinf (TREE_REAL_CST_PTR (imag)))
7335 return
7336 omit_one_operand_loc (loc, type,
7337 build_complex_cproj (type, TREE_REAL_CST_PTR
7338 (imag)->sign), arg);
7339 }
7340
7341 return NULL_TREE;
7342 }
7343
7344 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7345 Return NULL_TREE if no simplification can be made. */
7346
7347 static tree
7348 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7349 {
7350
7351 enum built_in_function fcode;
7352 tree res;
7353
7354 if (!validate_arg (arg, REAL_TYPE))
7355 return NULL_TREE;
7356
7357 /* Calculate the result when the argument is a constant. */
7358 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7359 return res;
7360
7361 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7362 fcode = builtin_mathfn_code (arg);
7363 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7364 {
7365 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7366 arg = fold_build2_loc (loc, MULT_EXPR, type,
7367 CALL_EXPR_ARG (arg, 0),
7368 build_real (type, dconsthalf));
7369 return build_call_expr_loc (loc, expfn, 1, arg);
7370 }
7371
7372 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7373 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7374 {
7375 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7376
7377 if (powfn)
7378 {
7379 tree arg0 = CALL_EXPR_ARG (arg, 0);
7380 tree tree_root;
7381 /* The inner root was either sqrt or cbrt. */
7382 /* This was a conditional expression but it triggered a bug
7383 in Sun C 5.5. */
7384 REAL_VALUE_TYPE dconstroot;
7385 if (BUILTIN_SQRT_P (fcode))
7386 dconstroot = dconsthalf;
7387 else
7388 dconstroot = dconst_third ();
7389
7390 /* Adjust for the outer root. */
7391 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7392 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7393 tree_root = build_real (type, dconstroot);
7394 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7395 }
7396 }
7397
7398 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7399 if (flag_unsafe_math_optimizations
7400 && (fcode == BUILT_IN_POW
7401 || fcode == BUILT_IN_POWF
7402 || fcode == BUILT_IN_POWL))
7403 {
7404 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7405 tree arg0 = CALL_EXPR_ARG (arg, 0);
7406 tree arg1 = CALL_EXPR_ARG (arg, 1);
7407 tree narg1;
7408 if (!tree_expr_nonnegative_p (arg0))
7409 arg0 = build1 (ABS_EXPR, type, arg0);
7410 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7411 build_real (type, dconsthalf));
7412 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7413 }
7414
7415 return NULL_TREE;
7416 }
7417
7418 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7419 Return NULL_TREE if no simplification can be made. */
7420
7421 static tree
7422 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7423 {
7424 const enum built_in_function fcode = builtin_mathfn_code (arg);
7425 tree res;
7426
7427 if (!validate_arg (arg, REAL_TYPE))
7428 return NULL_TREE;
7429
7430 /* Calculate the result when the argument is a constant. */
7431 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7432 return res;
7433
7434 if (flag_unsafe_math_optimizations)
7435 {
7436 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7437 if (BUILTIN_EXPONENT_P (fcode))
7438 {
7439 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7440 const REAL_VALUE_TYPE third_trunc =
7441 real_value_truncate (TYPE_MODE (type), dconst_third ());
7442 arg = fold_build2_loc (loc, MULT_EXPR, type,
7443 CALL_EXPR_ARG (arg, 0),
7444 build_real (type, third_trunc));
7445 return build_call_expr_loc (loc, expfn, 1, arg);
7446 }
7447
7448 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7449 if (BUILTIN_SQRT_P (fcode))
7450 {
7451 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7452
7453 if (powfn)
7454 {
7455 tree arg0 = CALL_EXPR_ARG (arg, 0);
7456 tree tree_root;
7457 REAL_VALUE_TYPE dconstroot = dconst_third ();
7458
7459 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7460 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7461 tree_root = build_real (type, dconstroot);
7462 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7463 }
7464 }
7465
7466 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7467 if (BUILTIN_CBRT_P (fcode))
7468 {
7469 tree arg0 = CALL_EXPR_ARG (arg, 0);
7470 if (tree_expr_nonnegative_p (arg0))
7471 {
7472 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7473
7474 if (powfn)
7475 {
7476 tree tree_root;
7477 REAL_VALUE_TYPE dconstroot;
7478
7479 real_arithmetic (&dconstroot, MULT_EXPR,
7480 dconst_third_ptr (), dconst_third_ptr ());
7481 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7482 tree_root = build_real (type, dconstroot);
7483 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7484 }
7485 }
7486 }
7487
7488 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7489 if (fcode == BUILT_IN_POW
7490 || fcode == BUILT_IN_POWF
7491 || fcode == BUILT_IN_POWL)
7492 {
7493 tree arg00 = CALL_EXPR_ARG (arg, 0);
7494 tree arg01 = CALL_EXPR_ARG (arg, 1);
7495 if (tree_expr_nonnegative_p (arg00))
7496 {
7497 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7498 const REAL_VALUE_TYPE dconstroot
7499 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7500 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7501 build_real (type, dconstroot));
7502 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7503 }
7504 }
7505 }
7506 return NULL_TREE;
7507 }
7508
7509 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7510 TYPE is the type of the return value. Return NULL_TREE if no
7511 simplification can be made. */
7512
7513 static tree
7514 fold_builtin_cos (location_t loc,
7515 tree arg, tree type, tree fndecl)
7516 {
7517 tree res, narg;
7518
7519 if (!validate_arg (arg, REAL_TYPE))
7520 return NULL_TREE;
7521
7522 /* Calculate the result when the argument is a constant. */
7523 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7524 return res;
7525
7526 /* Optimize cos(-x) into cos (x). */
7527 if ((narg = fold_strip_sign_ops (arg)))
7528 return build_call_expr_loc (loc, fndecl, 1, narg);
7529
7530 return NULL_TREE;
7531 }
7532
7533 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7534 Return NULL_TREE if no simplification can be made. */
7535
7536 static tree
7537 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7538 {
7539 if (validate_arg (arg, REAL_TYPE))
7540 {
7541 tree res, narg;
7542
7543 /* Calculate the result when the argument is a constant. */
7544 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7545 return res;
7546
7547 /* Optimize cosh(-x) into cosh (x). */
7548 if ((narg = fold_strip_sign_ops (arg)))
7549 return build_call_expr_loc (loc, fndecl, 1, narg);
7550 }
7551
7552 return NULL_TREE;
7553 }
7554
7555 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7556 argument ARG. TYPE is the type of the return value. Return
7557 NULL_TREE if no simplification can be made. */
7558
7559 static tree
7560 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7561 bool hyper)
7562 {
7563 if (validate_arg (arg, COMPLEX_TYPE)
7564 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7565 {
7566 tree tmp;
7567
7568 /* Calculate the result when the argument is a constant. */
7569 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7570 return tmp;
7571
7572 /* Optimize fn(-x) into fn(x). */
7573 if ((tmp = fold_strip_sign_ops (arg)))
7574 return build_call_expr_loc (loc, fndecl, 1, tmp);
7575 }
7576
7577 return NULL_TREE;
7578 }
7579
7580 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7581 Return NULL_TREE if no simplification can be made. */
7582
7583 static tree
7584 fold_builtin_tan (tree arg, tree type)
7585 {
7586 enum built_in_function fcode;
7587 tree res;
7588
7589 if (!validate_arg (arg, REAL_TYPE))
7590 return NULL_TREE;
7591
7592 /* Calculate the result when the argument is a constant. */
7593 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7594 return res;
7595
7596 /* Optimize tan(atan(x)) = x. */
7597 fcode = builtin_mathfn_code (arg);
7598 if (flag_unsafe_math_optimizations
7599 && (fcode == BUILT_IN_ATAN
7600 || fcode == BUILT_IN_ATANF
7601 || fcode == BUILT_IN_ATANL))
7602 return CALL_EXPR_ARG (arg, 0);
7603
7604 return NULL_TREE;
7605 }
7606
7607 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7608 NULL_TREE if no simplification can be made. */
7609
7610 static tree
7611 fold_builtin_sincos (location_t loc,
7612 tree arg0, tree arg1, tree arg2)
7613 {
7614 tree type;
7615 tree res, fn, call;
7616
7617 if (!validate_arg (arg0, REAL_TYPE)
7618 || !validate_arg (arg1, POINTER_TYPE)
7619 || !validate_arg (arg2, POINTER_TYPE))
7620 return NULL_TREE;
7621
7622 type = TREE_TYPE (arg0);
7623
7624 /* Calculate the result when the argument is a constant. */
7625 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7626 return res;
7627
7628 /* Canonicalize sincos to cexpi. */
7629 if (!targetm.libc_has_function (function_c99_math_complex))
7630 return NULL_TREE;
7631 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7632 if (!fn)
7633 return NULL_TREE;
7634
7635 call = build_call_expr_loc (loc, fn, 1, arg0);
7636 call = builtin_save_expr (call);
7637
7638 return build2 (COMPOUND_EXPR, void_type_node,
7639 build2 (MODIFY_EXPR, void_type_node,
7640 build_fold_indirect_ref_loc (loc, arg1),
7641 build1 (IMAGPART_EXPR, type, call)),
7642 build2 (MODIFY_EXPR, void_type_node,
7643 build_fold_indirect_ref_loc (loc, arg2),
7644 build1 (REALPART_EXPR, type, call)));
7645 }
7646
7647 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7648 NULL_TREE if no simplification can be made. */
7649
7650 static tree
7651 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7652 {
7653 tree rtype;
7654 tree realp, imagp, ifn;
7655 tree res;
7656
7657 if (!validate_arg (arg0, COMPLEX_TYPE)
7658 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7659 return NULL_TREE;
7660
7661 /* Calculate the result when the argument is a constant. */
7662 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7663 return res;
7664
7665 rtype = TREE_TYPE (TREE_TYPE (arg0));
7666
7667 /* In case we can figure out the real part of arg0 and it is constant zero
7668 fold to cexpi. */
7669 if (!targetm.libc_has_function (function_c99_math_complex))
7670 return NULL_TREE;
7671 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7672 if (!ifn)
7673 return NULL_TREE;
7674
7675 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7676 && real_zerop (realp))
7677 {
7678 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7679 return build_call_expr_loc (loc, ifn, 1, narg);
7680 }
7681
7682 /* In case we can easily decompose real and imaginary parts split cexp
7683 to exp (r) * cexpi (i). */
7684 if (flag_unsafe_math_optimizations
7685 && realp)
7686 {
7687 tree rfn, rcall, icall;
7688
7689 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7690 if (!rfn)
7691 return NULL_TREE;
7692
7693 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7694 if (!imagp)
7695 return NULL_TREE;
7696
7697 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7698 icall = builtin_save_expr (icall);
7699 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7700 rcall = builtin_save_expr (rcall);
7701 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7702 fold_build2_loc (loc, MULT_EXPR, rtype,
7703 rcall,
7704 fold_build1_loc (loc, REALPART_EXPR,
7705 rtype, icall)),
7706 fold_build2_loc (loc, MULT_EXPR, rtype,
7707 rcall,
7708 fold_build1_loc (loc, IMAGPART_EXPR,
7709 rtype, icall)));
7710 }
7711
7712 return NULL_TREE;
7713 }
7714
7715 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7716 Return NULL_TREE if no simplification can be made. */
7717
7718 static tree
7719 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7720 {
7721 if (!validate_arg (arg, REAL_TYPE))
7722 return NULL_TREE;
7723
7724 /* Optimize trunc of constant value. */
7725 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7726 {
7727 REAL_VALUE_TYPE r, x;
7728 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7729
7730 x = TREE_REAL_CST (arg);
7731 real_trunc (&r, TYPE_MODE (type), &x);
7732 return build_real (type, r);
7733 }
7734
7735 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7736 }
7737
7738 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7739 Return NULL_TREE if no simplification can be made. */
7740
7741 static tree
7742 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7743 {
7744 if (!validate_arg (arg, REAL_TYPE))
7745 return NULL_TREE;
7746
7747 /* Optimize floor of constant value. */
7748 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7749 {
7750 REAL_VALUE_TYPE x;
7751
7752 x = TREE_REAL_CST (arg);
7753 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7754 {
7755 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7756 REAL_VALUE_TYPE r;
7757
7758 real_floor (&r, TYPE_MODE (type), &x);
7759 return build_real (type, r);
7760 }
7761 }
7762
7763 /* Fold floor (x) where x is nonnegative to trunc (x). */
7764 if (tree_expr_nonnegative_p (arg))
7765 {
7766 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7767 if (truncfn)
7768 return build_call_expr_loc (loc, truncfn, 1, arg);
7769 }
7770
7771 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7772 }
7773
7774 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7775 Return NULL_TREE if no simplification can be made. */
7776
7777 static tree
7778 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7779 {
7780 if (!validate_arg (arg, REAL_TYPE))
7781 return NULL_TREE;
7782
7783 /* Optimize ceil of constant value. */
7784 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7785 {
7786 REAL_VALUE_TYPE x;
7787
7788 x = TREE_REAL_CST (arg);
7789 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7790 {
7791 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7792 REAL_VALUE_TYPE r;
7793
7794 real_ceil (&r, TYPE_MODE (type), &x);
7795 return build_real (type, r);
7796 }
7797 }
7798
7799 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7800 }
7801
7802 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7803 Return NULL_TREE if no simplification can be made. */
7804
7805 static tree
7806 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7807 {
7808 if (!validate_arg (arg, REAL_TYPE))
7809 return NULL_TREE;
7810
7811 /* Optimize round of constant value. */
7812 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7813 {
7814 REAL_VALUE_TYPE x;
7815
7816 x = TREE_REAL_CST (arg);
7817 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7818 {
7819 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7820 REAL_VALUE_TYPE r;
7821
7822 real_round (&r, TYPE_MODE (type), &x);
7823 return build_real (type, r);
7824 }
7825 }
7826
7827 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7828 }
7829
7830 /* Fold function call to builtin lround, lroundf or lroundl (or the
7831 corresponding long long versions) and other rounding functions. ARG
7832 is the argument to the call. Return NULL_TREE if no simplification
7833 can be made. */
7834
7835 static tree
7836 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7837 {
7838 if (!validate_arg (arg, REAL_TYPE))
7839 return NULL_TREE;
7840
7841 /* Optimize lround of constant value. */
7842 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7843 {
7844 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7845
7846 if (real_isfinite (&x))
7847 {
7848 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7849 tree ftype = TREE_TYPE (arg);
7850 double_int val;
7851 REAL_VALUE_TYPE r;
7852
7853 switch (DECL_FUNCTION_CODE (fndecl))
7854 {
7855 CASE_FLT_FN (BUILT_IN_IFLOOR):
7856 CASE_FLT_FN (BUILT_IN_LFLOOR):
7857 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7858 real_floor (&r, TYPE_MODE (ftype), &x);
7859 break;
7860
7861 CASE_FLT_FN (BUILT_IN_ICEIL):
7862 CASE_FLT_FN (BUILT_IN_LCEIL):
7863 CASE_FLT_FN (BUILT_IN_LLCEIL):
7864 real_ceil (&r, TYPE_MODE (ftype), &x);
7865 break;
7866
7867 CASE_FLT_FN (BUILT_IN_IROUND):
7868 CASE_FLT_FN (BUILT_IN_LROUND):
7869 CASE_FLT_FN (BUILT_IN_LLROUND):
7870 real_round (&r, TYPE_MODE (ftype), &x);
7871 break;
7872
7873 default:
7874 gcc_unreachable ();
7875 }
7876
7877 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
7878 if (double_int_fits_to_tree_p (itype, val))
7879 return double_int_to_tree (itype, val);
7880 }
7881 }
7882
7883 switch (DECL_FUNCTION_CODE (fndecl))
7884 {
7885 CASE_FLT_FN (BUILT_IN_LFLOOR):
7886 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7887 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7888 if (tree_expr_nonnegative_p (arg))
7889 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7890 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7891 break;
7892 default:;
7893 }
7894
7895 return fold_fixed_mathfn (loc, fndecl, arg);
7896 }
7897
7898 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7899 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7900 the argument to the call. Return NULL_TREE if no simplification can
7901 be made. */
7902
7903 static tree
7904 fold_builtin_bitop (tree fndecl, tree arg)
7905 {
7906 if (!validate_arg (arg, INTEGER_TYPE))
7907 return NULL_TREE;
7908
7909 /* Optimize for constant argument. */
7910 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7911 {
7912 HOST_WIDE_INT hi, width, result;
7913 unsigned HOST_WIDE_INT lo;
7914 tree type;
7915
7916 type = TREE_TYPE (arg);
7917 width = TYPE_PRECISION (type);
7918 lo = TREE_INT_CST_LOW (arg);
7919
7920 /* Clear all the bits that are beyond the type's precision. */
7921 if (width > HOST_BITS_PER_WIDE_INT)
7922 {
7923 hi = TREE_INT_CST_HIGH (arg);
7924 if (width < HOST_BITS_PER_DOUBLE_INT)
7925 hi &= ~(HOST_WIDE_INT_M1U << (width - HOST_BITS_PER_WIDE_INT));
7926 }
7927 else
7928 {
7929 hi = 0;
7930 if (width < HOST_BITS_PER_WIDE_INT)
7931 lo &= ~(HOST_WIDE_INT_M1U << width);
7932 }
7933
7934 switch (DECL_FUNCTION_CODE (fndecl))
7935 {
7936 CASE_INT_FN (BUILT_IN_FFS):
7937 if (lo != 0)
7938 result = ffs_hwi (lo);
7939 else if (hi != 0)
7940 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
7941 else
7942 result = 0;
7943 break;
7944
7945 CASE_INT_FN (BUILT_IN_CLZ):
7946 if (hi != 0)
7947 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7948 else if (lo != 0)
7949 result = width - floor_log2 (lo) - 1;
7950 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7951 result = width;
7952 break;
7953
7954 CASE_INT_FN (BUILT_IN_CTZ):
7955 if (lo != 0)
7956 result = ctz_hwi (lo);
7957 else if (hi != 0)
7958 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
7959 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7960 result = width;
7961 break;
7962
7963 CASE_INT_FN (BUILT_IN_CLRSB):
7964 if (width > 2 * HOST_BITS_PER_WIDE_INT)
7965 return NULL_TREE;
7966 if (width > HOST_BITS_PER_WIDE_INT
7967 && (hi & ((unsigned HOST_WIDE_INT) 1
7968 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
7969 {
7970 hi = ~hi & ~(HOST_WIDE_INT_M1U
7971 << (width - HOST_BITS_PER_WIDE_INT - 1));
7972 lo = ~lo;
7973 }
7974 else if (width <= HOST_BITS_PER_WIDE_INT
7975 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
7976 lo = ~lo & ~(HOST_WIDE_INT_M1U << (width - 1));
7977 if (hi != 0)
7978 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
7979 else if (lo != 0)
7980 result = width - floor_log2 (lo) - 2;
7981 else
7982 result = width - 1;
7983 break;
7984
7985 CASE_INT_FN (BUILT_IN_POPCOUNT):
7986 result = 0;
7987 while (lo)
7988 result++, lo &= lo - 1;
7989 while (hi)
7990 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7991 break;
7992
7993 CASE_INT_FN (BUILT_IN_PARITY):
7994 result = 0;
7995 while (lo)
7996 result++, lo &= lo - 1;
7997 while (hi)
7998 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7999 result &= 1;
8000 break;
8001
8002 default:
8003 gcc_unreachable ();
8004 }
8005
8006 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8007 }
8008
8009 return NULL_TREE;
8010 }
8011
8012 /* Fold function call to builtin_bswap and the short, long and long long
8013 variants. Return NULL_TREE if no simplification can be made. */
8014 static tree
8015 fold_builtin_bswap (tree fndecl, tree arg)
8016 {
8017 if (! validate_arg (arg, INTEGER_TYPE))
8018 return NULL_TREE;
8019
8020 /* Optimize constant value. */
8021 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8022 {
8023 HOST_WIDE_INT hi, width, r_hi = 0;
8024 unsigned HOST_WIDE_INT lo, r_lo = 0;
8025 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8026
8027 width = TYPE_PRECISION (type);
8028 lo = TREE_INT_CST_LOW (arg);
8029 hi = TREE_INT_CST_HIGH (arg);
8030
8031 switch (DECL_FUNCTION_CODE (fndecl))
8032 {
8033 case BUILT_IN_BSWAP16:
8034 case BUILT_IN_BSWAP32:
8035 case BUILT_IN_BSWAP64:
8036 {
8037 int s;
8038
8039 for (s = 0; s < width; s += 8)
8040 {
8041 int d = width - s - 8;
8042 unsigned HOST_WIDE_INT byte;
8043
8044 if (s < HOST_BITS_PER_WIDE_INT)
8045 byte = (lo >> s) & 0xff;
8046 else
8047 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8048
8049 if (d < HOST_BITS_PER_WIDE_INT)
8050 r_lo |= byte << d;
8051 else
8052 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8053 }
8054 }
8055
8056 break;
8057
8058 default:
8059 gcc_unreachable ();
8060 }
8061
8062 if (width < HOST_BITS_PER_WIDE_INT)
8063 return build_int_cst (type, r_lo);
8064 else
8065 return build_int_cst_wide (type, r_lo, r_hi);
8066 }
8067
8068 return NULL_TREE;
8069 }
8070
8071 /* A subroutine of fold_builtin to fold the various logarithmic
8072 functions. Return NULL_TREE if no simplification can me made.
8073 FUNC is the corresponding MPFR logarithm function. */
8074
8075 static tree
8076 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8077 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8078 {
8079 if (validate_arg (arg, REAL_TYPE))
8080 {
8081 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8082 tree res;
8083 const enum built_in_function fcode = builtin_mathfn_code (arg);
8084
8085 /* Calculate the result when the argument is a constant. */
8086 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8087 return res;
8088
8089 /* Special case, optimize logN(expN(x)) = x. */
8090 if (flag_unsafe_math_optimizations
8091 && ((func == mpfr_log
8092 && (fcode == BUILT_IN_EXP
8093 || fcode == BUILT_IN_EXPF
8094 || fcode == BUILT_IN_EXPL))
8095 || (func == mpfr_log2
8096 && (fcode == BUILT_IN_EXP2
8097 || fcode == BUILT_IN_EXP2F
8098 || fcode == BUILT_IN_EXP2L))
8099 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8100 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8101
8102 /* Optimize logN(func()) for various exponential functions. We
8103 want to determine the value "x" and the power "exponent" in
8104 order to transform logN(x**exponent) into exponent*logN(x). */
8105 if (flag_unsafe_math_optimizations)
8106 {
8107 tree exponent = 0, x = 0;
8108
8109 switch (fcode)
8110 {
8111 CASE_FLT_FN (BUILT_IN_EXP):
8112 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8113 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8114 dconst_e ()));
8115 exponent = CALL_EXPR_ARG (arg, 0);
8116 break;
8117 CASE_FLT_FN (BUILT_IN_EXP2):
8118 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8119 x = build_real (type, dconst2);
8120 exponent = CALL_EXPR_ARG (arg, 0);
8121 break;
8122 CASE_FLT_FN (BUILT_IN_EXP10):
8123 CASE_FLT_FN (BUILT_IN_POW10):
8124 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8125 {
8126 REAL_VALUE_TYPE dconst10;
8127 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8128 x = build_real (type, dconst10);
8129 }
8130 exponent = CALL_EXPR_ARG (arg, 0);
8131 break;
8132 CASE_FLT_FN (BUILT_IN_SQRT):
8133 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8134 x = CALL_EXPR_ARG (arg, 0);
8135 exponent = build_real (type, dconsthalf);
8136 break;
8137 CASE_FLT_FN (BUILT_IN_CBRT):
8138 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8139 x = CALL_EXPR_ARG (arg, 0);
8140 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8141 dconst_third ()));
8142 break;
8143 CASE_FLT_FN (BUILT_IN_POW):
8144 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8145 x = CALL_EXPR_ARG (arg, 0);
8146 exponent = CALL_EXPR_ARG (arg, 1);
8147 break;
8148 default:
8149 break;
8150 }
8151
8152 /* Now perform the optimization. */
8153 if (x && exponent)
8154 {
8155 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8156 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8157 }
8158 }
8159 }
8160
8161 return NULL_TREE;
8162 }
8163
8164 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8165 NULL_TREE if no simplification can be made. */
8166
8167 static tree
8168 fold_builtin_hypot (location_t loc, tree fndecl,
8169 tree arg0, tree arg1, tree type)
8170 {
8171 tree res, narg0, narg1;
8172
8173 if (!validate_arg (arg0, REAL_TYPE)
8174 || !validate_arg (arg1, REAL_TYPE))
8175 return NULL_TREE;
8176
8177 /* Calculate the result when the argument is a constant. */
8178 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8179 return res;
8180
8181 /* If either argument to hypot has a negate or abs, strip that off.
8182 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8183 narg0 = fold_strip_sign_ops (arg0);
8184 narg1 = fold_strip_sign_ops (arg1);
8185 if (narg0 || narg1)
8186 {
8187 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8188 narg1 ? narg1 : arg1);
8189 }
8190
8191 /* If either argument is zero, hypot is fabs of the other. */
8192 if (real_zerop (arg0))
8193 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8194 else if (real_zerop (arg1))
8195 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8196
8197 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8198 if (flag_unsafe_math_optimizations
8199 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8200 {
8201 const REAL_VALUE_TYPE sqrt2_trunc
8202 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8203 return fold_build2_loc (loc, MULT_EXPR, type,
8204 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8205 build_real (type, sqrt2_trunc));
8206 }
8207
8208 return NULL_TREE;
8209 }
8210
8211
8212 /* Fold a builtin function call to pow, powf, or powl. Return
8213 NULL_TREE if no simplification can be made. */
8214 static tree
8215 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8216 {
8217 tree res;
8218
8219 if (!validate_arg (arg0, REAL_TYPE)
8220 || !validate_arg (arg1, REAL_TYPE))
8221 return NULL_TREE;
8222
8223 /* Calculate the result when the argument is a constant. */
8224 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8225 return res;
8226
8227 /* Optimize pow(1.0,y) = 1.0. */
8228 if (real_onep (arg0))
8229 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8230
8231 if (TREE_CODE (arg1) == REAL_CST
8232 && !TREE_OVERFLOW (arg1))
8233 {
8234 REAL_VALUE_TYPE cint;
8235 REAL_VALUE_TYPE c;
8236 HOST_WIDE_INT n;
8237
8238 c = TREE_REAL_CST (arg1);
8239
8240 /* Optimize pow(x,0.0) = 1.0. */
8241 if (REAL_VALUES_EQUAL (c, dconst0))
8242 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8243 arg0);
8244
8245 /* Optimize pow(x,1.0) = x. */
8246 if (REAL_VALUES_EQUAL (c, dconst1))
8247 return arg0;
8248
8249 /* Optimize pow(x,-1.0) = 1.0/x. */
8250 if (REAL_VALUES_EQUAL (c, dconstm1))
8251 return fold_build2_loc (loc, RDIV_EXPR, type,
8252 build_real (type, dconst1), arg0);
8253
8254 /* Optimize pow(x,0.5) = sqrt(x). */
8255 if (flag_unsafe_math_optimizations
8256 && REAL_VALUES_EQUAL (c, dconsthalf))
8257 {
8258 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8259
8260 if (sqrtfn != NULL_TREE)
8261 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8262 }
8263
8264 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8265 if (flag_unsafe_math_optimizations)
8266 {
8267 const REAL_VALUE_TYPE dconstroot
8268 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8269
8270 if (REAL_VALUES_EQUAL (c, dconstroot))
8271 {
8272 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8273 if (cbrtfn != NULL_TREE)
8274 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8275 }
8276 }
8277
8278 /* Check for an integer exponent. */
8279 n = real_to_integer (&c);
8280 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8281 if (real_identical (&c, &cint))
8282 {
8283 /* Attempt to evaluate pow at compile-time, unless this should
8284 raise an exception. */
8285 if (TREE_CODE (arg0) == REAL_CST
8286 && !TREE_OVERFLOW (arg0)
8287 && (n > 0
8288 || (!flag_trapping_math && !flag_errno_math)
8289 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8290 {
8291 REAL_VALUE_TYPE x;
8292 bool inexact;
8293
8294 x = TREE_REAL_CST (arg0);
8295 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8296 if (flag_unsafe_math_optimizations || !inexact)
8297 return build_real (type, x);
8298 }
8299
8300 /* Strip sign ops from even integer powers. */
8301 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8302 {
8303 tree narg0 = fold_strip_sign_ops (arg0);
8304 if (narg0)
8305 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8306 }
8307 }
8308 }
8309
8310 if (flag_unsafe_math_optimizations)
8311 {
8312 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8313
8314 /* Optimize pow(expN(x),y) = expN(x*y). */
8315 if (BUILTIN_EXPONENT_P (fcode))
8316 {
8317 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8318 tree arg = CALL_EXPR_ARG (arg0, 0);
8319 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8320 return build_call_expr_loc (loc, expfn, 1, arg);
8321 }
8322
8323 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8324 if (BUILTIN_SQRT_P (fcode))
8325 {
8326 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8327 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8328 build_real (type, dconsthalf));
8329 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8330 }
8331
8332 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8333 if (BUILTIN_CBRT_P (fcode))
8334 {
8335 tree arg = CALL_EXPR_ARG (arg0, 0);
8336 if (tree_expr_nonnegative_p (arg))
8337 {
8338 const REAL_VALUE_TYPE dconstroot
8339 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8340 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8341 build_real (type, dconstroot));
8342 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8343 }
8344 }
8345
8346 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8347 if (fcode == BUILT_IN_POW
8348 || fcode == BUILT_IN_POWF
8349 || fcode == BUILT_IN_POWL)
8350 {
8351 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8352 if (tree_expr_nonnegative_p (arg00))
8353 {
8354 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8355 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8356 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8357 }
8358 }
8359 }
8360
8361 return NULL_TREE;
8362 }
8363
8364 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8365 Return NULL_TREE if no simplification can be made. */
8366 static tree
8367 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8368 tree arg0, tree arg1, tree type)
8369 {
8370 if (!validate_arg (arg0, REAL_TYPE)
8371 || !validate_arg (arg1, INTEGER_TYPE))
8372 return NULL_TREE;
8373
8374 /* Optimize pow(1.0,y) = 1.0. */
8375 if (real_onep (arg0))
8376 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8377
8378 if (host_integerp (arg1, 0))
8379 {
8380 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8381
8382 /* Evaluate powi at compile-time. */
8383 if (TREE_CODE (arg0) == REAL_CST
8384 && !TREE_OVERFLOW (arg0))
8385 {
8386 REAL_VALUE_TYPE x;
8387 x = TREE_REAL_CST (arg0);
8388 real_powi (&x, TYPE_MODE (type), &x, c);
8389 return build_real (type, x);
8390 }
8391
8392 /* Optimize pow(x,0) = 1.0. */
8393 if (c == 0)
8394 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8395 arg0);
8396
8397 /* Optimize pow(x,1) = x. */
8398 if (c == 1)
8399 return arg0;
8400
8401 /* Optimize pow(x,-1) = 1.0/x. */
8402 if (c == -1)
8403 return fold_build2_loc (loc, RDIV_EXPR, type,
8404 build_real (type, dconst1), arg0);
8405 }
8406
8407 return NULL_TREE;
8408 }
8409
8410 /* A subroutine of fold_builtin to fold the various exponent
8411 functions. Return NULL_TREE if no simplification can be made.
8412 FUNC is the corresponding MPFR exponent function. */
8413
8414 static tree
8415 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8416 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8417 {
8418 if (validate_arg (arg, REAL_TYPE))
8419 {
8420 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8421 tree res;
8422
8423 /* Calculate the result when the argument is a constant. */
8424 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8425 return res;
8426
8427 /* Optimize expN(logN(x)) = x. */
8428 if (flag_unsafe_math_optimizations)
8429 {
8430 const enum built_in_function fcode = builtin_mathfn_code (arg);
8431
8432 if ((func == mpfr_exp
8433 && (fcode == BUILT_IN_LOG
8434 || fcode == BUILT_IN_LOGF
8435 || fcode == BUILT_IN_LOGL))
8436 || (func == mpfr_exp2
8437 && (fcode == BUILT_IN_LOG2
8438 || fcode == BUILT_IN_LOG2F
8439 || fcode == BUILT_IN_LOG2L))
8440 || (func == mpfr_exp10
8441 && (fcode == BUILT_IN_LOG10
8442 || fcode == BUILT_IN_LOG10F
8443 || fcode == BUILT_IN_LOG10L)))
8444 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8445 }
8446 }
8447
8448 return NULL_TREE;
8449 }
8450
8451 /* Return true if VAR is a VAR_DECL or a component thereof. */
8452
8453 static bool
8454 var_decl_component_p (tree var)
8455 {
8456 tree inner = var;
8457 while (handled_component_p (inner))
8458 inner = TREE_OPERAND (inner, 0);
8459 return SSA_VAR_P (inner);
8460 }
8461
8462 /* Fold function call to builtin memset. Return
8463 NULL_TREE if no simplification can be made. */
8464
8465 static tree
8466 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8467 tree type, bool ignore)
8468 {
8469 tree var, ret, etype;
8470 unsigned HOST_WIDE_INT length, cval;
8471
8472 if (! validate_arg (dest, POINTER_TYPE)
8473 || ! validate_arg (c, INTEGER_TYPE)
8474 || ! validate_arg (len, INTEGER_TYPE))
8475 return NULL_TREE;
8476
8477 if (! host_integerp (len, 1))
8478 return NULL_TREE;
8479
8480 /* If the LEN parameter is zero, return DEST. */
8481 if (integer_zerop (len))
8482 return omit_one_operand_loc (loc, type, dest, c);
8483
8484 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8485 return NULL_TREE;
8486
8487 var = dest;
8488 STRIP_NOPS (var);
8489 if (TREE_CODE (var) != ADDR_EXPR)
8490 return NULL_TREE;
8491
8492 var = TREE_OPERAND (var, 0);
8493 if (TREE_THIS_VOLATILE (var))
8494 return NULL_TREE;
8495
8496 etype = TREE_TYPE (var);
8497 if (TREE_CODE (etype) == ARRAY_TYPE)
8498 etype = TREE_TYPE (etype);
8499
8500 if (!INTEGRAL_TYPE_P (etype)
8501 && !POINTER_TYPE_P (etype))
8502 return NULL_TREE;
8503
8504 if (! var_decl_component_p (var))
8505 return NULL_TREE;
8506
8507 length = tree_low_cst (len, 1);
8508 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8509 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
8510 return NULL_TREE;
8511
8512 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8513 return NULL_TREE;
8514
8515 if (integer_zerop (c))
8516 cval = 0;
8517 else
8518 {
8519 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8520 return NULL_TREE;
8521
8522 cval = TREE_INT_CST_LOW (c);
8523 cval &= 0xff;
8524 cval |= cval << 8;
8525 cval |= cval << 16;
8526 cval |= (cval << 31) << 1;
8527 }
8528
8529 ret = build_int_cst_type (etype, cval);
8530 var = build_fold_indirect_ref_loc (loc,
8531 fold_convert_loc (loc,
8532 build_pointer_type (etype),
8533 dest));
8534 ret = build2 (MODIFY_EXPR, etype, var, ret);
8535 if (ignore)
8536 return ret;
8537
8538 return omit_one_operand_loc (loc, type, dest, ret);
8539 }
8540
8541 /* Fold function call to builtin memset. Return
8542 NULL_TREE if no simplification can be made. */
8543
8544 static tree
8545 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8546 {
8547 if (! validate_arg (dest, POINTER_TYPE)
8548 || ! validate_arg (size, INTEGER_TYPE))
8549 return NULL_TREE;
8550
8551 if (!ignore)
8552 return NULL_TREE;
8553
8554 /* New argument list transforming bzero(ptr x, int y) to
8555 memset(ptr x, int 0, size_t y). This is done this way
8556 so that if it isn't expanded inline, we fallback to
8557 calling bzero instead of memset. */
8558
8559 return fold_builtin_memset (loc, dest, integer_zero_node,
8560 fold_convert_loc (loc, size_type_node, size),
8561 void_type_node, ignore);
8562 }
8563
8564 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8565 NULL_TREE if no simplification can be made.
8566 If ENDP is 0, return DEST (like memcpy).
8567 If ENDP is 1, return DEST+LEN (like mempcpy).
8568 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8569 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8570 (memmove). */
8571
8572 static tree
8573 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8574 tree len, tree type, bool ignore, int endp)
8575 {
8576 tree destvar, srcvar, expr;
8577
8578 if (! validate_arg (dest, POINTER_TYPE)
8579 || ! validate_arg (src, POINTER_TYPE)
8580 || ! validate_arg (len, INTEGER_TYPE))
8581 return NULL_TREE;
8582
8583 /* If the LEN parameter is zero, return DEST. */
8584 if (integer_zerop (len))
8585 return omit_one_operand_loc (loc, type, dest, src);
8586
8587 /* If SRC and DEST are the same (and not volatile), return
8588 DEST{,+LEN,+LEN-1}. */
8589 if (operand_equal_p (src, dest, 0))
8590 expr = len;
8591 else
8592 {
8593 tree srctype, desttype;
8594 unsigned int src_align, dest_align;
8595 tree off0;
8596
8597 if (endp == 3)
8598 {
8599 src_align = get_pointer_alignment (src);
8600 dest_align = get_pointer_alignment (dest);
8601
8602 /* Both DEST and SRC must be pointer types.
8603 ??? This is what old code did. Is the testing for pointer types
8604 really mandatory?
8605
8606 If either SRC is readonly or length is 1, we can use memcpy. */
8607 if (!dest_align || !src_align)
8608 return NULL_TREE;
8609 if (readonly_data_expr (src)
8610 || (host_integerp (len, 1)
8611 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8612 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
8613 {
8614 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8615 if (!fn)
8616 return NULL_TREE;
8617 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8618 }
8619
8620 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8621 if (TREE_CODE (src) == ADDR_EXPR
8622 && TREE_CODE (dest) == ADDR_EXPR)
8623 {
8624 tree src_base, dest_base, fn;
8625 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8626 HOST_WIDE_INT size = -1;
8627 HOST_WIDE_INT maxsize = -1;
8628
8629 srcvar = TREE_OPERAND (src, 0);
8630 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8631 &size, &maxsize);
8632 destvar = TREE_OPERAND (dest, 0);
8633 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8634 &size, &maxsize);
8635 if (host_integerp (len, 1))
8636 maxsize = tree_low_cst (len, 1);
8637 else
8638 maxsize = -1;
8639 src_offset /= BITS_PER_UNIT;
8640 dest_offset /= BITS_PER_UNIT;
8641 if (SSA_VAR_P (src_base)
8642 && SSA_VAR_P (dest_base))
8643 {
8644 if (operand_equal_p (src_base, dest_base, 0)
8645 && ranges_overlap_p (src_offset, maxsize,
8646 dest_offset, maxsize))
8647 return NULL_TREE;
8648 }
8649 else if (TREE_CODE (src_base) == MEM_REF
8650 && TREE_CODE (dest_base) == MEM_REF)
8651 {
8652 double_int off;
8653 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8654 TREE_OPERAND (dest_base, 0), 0))
8655 return NULL_TREE;
8656 off = mem_ref_offset (src_base) +
8657 double_int::from_shwi (src_offset);
8658 if (!off.fits_shwi ())
8659 return NULL_TREE;
8660 src_offset = off.low;
8661 off = mem_ref_offset (dest_base) +
8662 double_int::from_shwi (dest_offset);
8663 if (!off.fits_shwi ())
8664 return NULL_TREE;
8665 dest_offset = off.low;
8666 if (ranges_overlap_p (src_offset, maxsize,
8667 dest_offset, maxsize))
8668 return NULL_TREE;
8669 }
8670 else
8671 return NULL_TREE;
8672
8673 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8674 if (!fn)
8675 return NULL_TREE;
8676 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8677 }
8678
8679 /* If the destination and source do not alias optimize into
8680 memcpy as well. */
8681 if ((is_gimple_min_invariant (dest)
8682 || TREE_CODE (dest) == SSA_NAME)
8683 && (is_gimple_min_invariant (src)
8684 || TREE_CODE (src) == SSA_NAME))
8685 {
8686 ao_ref destr, srcr;
8687 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8688 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8689 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8690 {
8691 tree fn;
8692 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8693 if (!fn)
8694 return NULL_TREE;
8695 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8696 }
8697 }
8698
8699 return NULL_TREE;
8700 }
8701
8702 if (!host_integerp (len, 0))
8703 return NULL_TREE;
8704 /* FIXME:
8705 This logic lose for arguments like (type *)malloc (sizeof (type)),
8706 since we strip the casts of up to VOID return value from malloc.
8707 Perhaps we ought to inherit type from non-VOID argument here? */
8708 STRIP_NOPS (src);
8709 STRIP_NOPS (dest);
8710 if (!POINTER_TYPE_P (TREE_TYPE (src))
8711 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8712 return NULL_TREE;
8713 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8714 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8715 {
8716 tree tem = TREE_OPERAND (src, 0);
8717 STRIP_NOPS (tem);
8718 if (tem != TREE_OPERAND (src, 0))
8719 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8720 }
8721 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8722 {
8723 tree tem = TREE_OPERAND (dest, 0);
8724 STRIP_NOPS (tem);
8725 if (tem != TREE_OPERAND (dest, 0))
8726 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8727 }
8728 srctype = TREE_TYPE (TREE_TYPE (src));
8729 if (TREE_CODE (srctype) == ARRAY_TYPE
8730 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8731 {
8732 srctype = TREE_TYPE (srctype);
8733 STRIP_NOPS (src);
8734 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8735 }
8736 desttype = TREE_TYPE (TREE_TYPE (dest));
8737 if (TREE_CODE (desttype) == ARRAY_TYPE
8738 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8739 {
8740 desttype = TREE_TYPE (desttype);
8741 STRIP_NOPS (dest);
8742 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8743 }
8744 if (TREE_ADDRESSABLE (srctype)
8745 || TREE_ADDRESSABLE (desttype))
8746 return NULL_TREE;
8747
8748 src_align = get_pointer_alignment (src);
8749 dest_align = get_pointer_alignment (dest);
8750 if (dest_align < TYPE_ALIGN (desttype)
8751 || src_align < TYPE_ALIGN (srctype))
8752 return NULL_TREE;
8753
8754 if (!ignore)
8755 dest = builtin_save_expr (dest);
8756
8757 /* Build accesses at offset zero with a ref-all character type. */
8758 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8759 ptr_mode, true), 0);
8760
8761 destvar = dest;
8762 STRIP_NOPS (destvar);
8763 if (TREE_CODE (destvar) == ADDR_EXPR
8764 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8765 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8766 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8767 else
8768 destvar = NULL_TREE;
8769
8770 srcvar = src;
8771 STRIP_NOPS (srcvar);
8772 if (TREE_CODE (srcvar) == ADDR_EXPR
8773 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8774 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8775 {
8776 if (!destvar
8777 || src_align >= TYPE_ALIGN (desttype))
8778 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8779 srcvar, off0);
8780 else if (!STRICT_ALIGNMENT)
8781 {
8782 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8783 src_align);
8784 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8785 }
8786 else
8787 srcvar = NULL_TREE;
8788 }
8789 else
8790 srcvar = NULL_TREE;
8791
8792 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8793 return NULL_TREE;
8794
8795 if (srcvar == NULL_TREE)
8796 {
8797 STRIP_NOPS (src);
8798 if (src_align >= TYPE_ALIGN (desttype))
8799 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8800 else
8801 {
8802 if (STRICT_ALIGNMENT)
8803 return NULL_TREE;
8804 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8805 src_align);
8806 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
8807 }
8808 }
8809 else if (destvar == NULL_TREE)
8810 {
8811 STRIP_NOPS (dest);
8812 if (dest_align >= TYPE_ALIGN (srctype))
8813 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8814 else
8815 {
8816 if (STRICT_ALIGNMENT)
8817 return NULL_TREE;
8818 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
8819 dest_align);
8820 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
8821 }
8822 }
8823
8824 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8825 }
8826
8827 if (ignore)
8828 return expr;
8829
8830 if (endp == 0 || endp == 3)
8831 return omit_one_operand_loc (loc, type, dest, expr);
8832
8833 if (expr == len)
8834 expr = NULL_TREE;
8835
8836 if (endp == 2)
8837 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8838 ssize_int (1));
8839
8840 dest = fold_build_pointer_plus_loc (loc, dest, len);
8841 dest = fold_convert_loc (loc, type, dest);
8842 if (expr)
8843 dest = omit_one_operand_loc (loc, type, dest, expr);
8844 return dest;
8845 }
8846
8847 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8848 If LEN is not NULL, it represents the length of the string to be
8849 copied. Return NULL_TREE if no simplification can be made. */
8850
8851 tree
8852 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8853 {
8854 tree fn;
8855
8856 if (!validate_arg (dest, POINTER_TYPE)
8857 || !validate_arg (src, POINTER_TYPE))
8858 return NULL_TREE;
8859
8860 /* If SRC and DEST are the same (and not volatile), return DEST. */
8861 if (operand_equal_p (src, dest, 0))
8862 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8863
8864 if (optimize_function_for_size_p (cfun))
8865 return NULL_TREE;
8866
8867 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8868 if (!fn)
8869 return NULL_TREE;
8870
8871 if (!len)
8872 {
8873 len = c_strlen (src, 1);
8874 if (! len || TREE_SIDE_EFFECTS (len))
8875 return NULL_TREE;
8876 }
8877
8878 len = fold_convert_loc (loc, size_type_node, len);
8879 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
8880 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8881 build_call_expr_loc (loc, fn, 3, dest, src, len));
8882 }
8883
8884 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8885 Return NULL_TREE if no simplification can be made. */
8886
8887 static tree
8888 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8889 {
8890 tree fn, len, lenp1, call, type;
8891
8892 if (!validate_arg (dest, POINTER_TYPE)
8893 || !validate_arg (src, POINTER_TYPE))
8894 return NULL_TREE;
8895
8896 len = c_strlen (src, 1);
8897 if (!len
8898 || TREE_CODE (len) != INTEGER_CST)
8899 return NULL_TREE;
8900
8901 if (optimize_function_for_size_p (cfun)
8902 /* If length is zero it's small enough. */
8903 && !integer_zerop (len))
8904 return NULL_TREE;
8905
8906 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8907 if (!fn)
8908 return NULL_TREE;
8909
8910 lenp1 = size_binop_loc (loc, PLUS_EXPR,
8911 fold_convert_loc (loc, size_type_node, len),
8912 build_int_cst (size_type_node, 1));
8913 /* We use dest twice in building our expression. Save it from
8914 multiple expansions. */
8915 dest = builtin_save_expr (dest);
8916 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8917
8918 type = TREE_TYPE (TREE_TYPE (fndecl));
8919 dest = fold_build_pointer_plus_loc (loc, dest, len);
8920 dest = fold_convert_loc (loc, type, dest);
8921 dest = omit_one_operand_loc (loc, type, dest, call);
8922 return dest;
8923 }
8924
8925 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8926 If SLEN is not NULL, it represents the length of the source string.
8927 Return NULL_TREE if no simplification can be made. */
8928
8929 tree
8930 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8931 tree src, tree len, tree slen)
8932 {
8933 tree fn;
8934
8935 if (!validate_arg (dest, POINTER_TYPE)
8936 || !validate_arg (src, POINTER_TYPE)
8937 || !validate_arg (len, INTEGER_TYPE))
8938 return NULL_TREE;
8939
8940 /* If the LEN parameter is zero, return DEST. */
8941 if (integer_zerop (len))
8942 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8943
8944 /* We can't compare slen with len as constants below if len is not a
8945 constant. */
8946 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8947 return NULL_TREE;
8948
8949 if (!slen)
8950 slen = c_strlen (src, 1);
8951
8952 /* Now, we must be passed a constant src ptr parameter. */
8953 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8954 return NULL_TREE;
8955
8956 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8957
8958 /* We do not support simplification of this case, though we do
8959 support it when expanding trees into RTL. */
8960 /* FIXME: generate a call to __builtin_memset. */
8961 if (tree_int_cst_lt (slen, len))
8962 return NULL_TREE;
8963
8964 /* OK transform into builtin memcpy. */
8965 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8966 if (!fn)
8967 return NULL_TREE;
8968
8969 len = fold_convert_loc (loc, size_type_node, len);
8970 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8971 build_call_expr_loc (loc, fn, 3, dest, src, len));
8972 }
8973
8974 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8975 arguments to the call, and TYPE is its return type.
8976 Return NULL_TREE if no simplification can be made. */
8977
8978 static tree
8979 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8980 {
8981 if (!validate_arg (arg1, POINTER_TYPE)
8982 || !validate_arg (arg2, INTEGER_TYPE)
8983 || !validate_arg (len, INTEGER_TYPE))
8984 return NULL_TREE;
8985 else
8986 {
8987 const char *p1;
8988
8989 if (TREE_CODE (arg2) != INTEGER_CST
8990 || !host_integerp (len, 1))
8991 return NULL_TREE;
8992
8993 p1 = c_getstr (arg1);
8994 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8995 {
8996 char c;
8997 const char *r;
8998 tree tem;
8999
9000 if (target_char_cast (arg2, &c))
9001 return NULL_TREE;
9002
9003 r = (const char *) memchr (p1, c, tree_low_cst (len, 1));
9004
9005 if (r == NULL)
9006 return build_int_cst (TREE_TYPE (arg1), 0);
9007
9008 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
9009 return fold_convert_loc (loc, type, tem);
9010 }
9011 return NULL_TREE;
9012 }
9013 }
9014
9015 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9016 Return NULL_TREE if no simplification can be made. */
9017
9018 static tree
9019 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9020 {
9021 const char *p1, *p2;
9022
9023 if (!validate_arg (arg1, POINTER_TYPE)
9024 || !validate_arg (arg2, POINTER_TYPE)
9025 || !validate_arg (len, INTEGER_TYPE))
9026 return NULL_TREE;
9027
9028 /* If the LEN parameter is zero, return zero. */
9029 if (integer_zerop (len))
9030 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9031 arg1, arg2);
9032
9033 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9034 if (operand_equal_p (arg1, arg2, 0))
9035 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9036
9037 p1 = c_getstr (arg1);
9038 p2 = c_getstr (arg2);
9039
9040 /* If all arguments are constant, and the value of len is not greater
9041 than the lengths of arg1 and arg2, evaluate at compile-time. */
9042 if (host_integerp (len, 1) && p1 && p2
9043 && compare_tree_int (len, strlen (p1) + 1) <= 0
9044 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9045 {
9046 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9047
9048 if (r > 0)
9049 return integer_one_node;
9050 else if (r < 0)
9051 return integer_minus_one_node;
9052 else
9053 return integer_zero_node;
9054 }
9055
9056 /* If len parameter is one, return an expression corresponding to
9057 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9058 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9059 {
9060 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9061 tree cst_uchar_ptr_node
9062 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9063
9064 tree ind1
9065 = fold_convert_loc (loc, integer_type_node,
9066 build1 (INDIRECT_REF, cst_uchar_node,
9067 fold_convert_loc (loc,
9068 cst_uchar_ptr_node,
9069 arg1)));
9070 tree ind2
9071 = fold_convert_loc (loc, integer_type_node,
9072 build1 (INDIRECT_REF, cst_uchar_node,
9073 fold_convert_loc (loc,
9074 cst_uchar_ptr_node,
9075 arg2)));
9076 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9077 }
9078
9079 return NULL_TREE;
9080 }
9081
9082 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9083 Return NULL_TREE if no simplification can be made. */
9084
9085 static tree
9086 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9087 {
9088 const char *p1, *p2;
9089
9090 if (!validate_arg (arg1, POINTER_TYPE)
9091 || !validate_arg (arg2, POINTER_TYPE))
9092 return NULL_TREE;
9093
9094 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9095 if (operand_equal_p (arg1, arg2, 0))
9096 return integer_zero_node;
9097
9098 p1 = c_getstr (arg1);
9099 p2 = c_getstr (arg2);
9100
9101 if (p1 && p2)
9102 {
9103 const int i = strcmp (p1, p2);
9104 if (i < 0)
9105 return integer_minus_one_node;
9106 else if (i > 0)
9107 return integer_one_node;
9108 else
9109 return integer_zero_node;
9110 }
9111
9112 /* If the second arg is "", return *(const unsigned char*)arg1. */
9113 if (p2 && *p2 == '\0')
9114 {
9115 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9116 tree cst_uchar_ptr_node
9117 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9118
9119 return fold_convert_loc (loc, integer_type_node,
9120 build1 (INDIRECT_REF, cst_uchar_node,
9121 fold_convert_loc (loc,
9122 cst_uchar_ptr_node,
9123 arg1)));
9124 }
9125
9126 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9127 if (p1 && *p1 == '\0')
9128 {
9129 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9130 tree cst_uchar_ptr_node
9131 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9132
9133 tree temp
9134 = fold_convert_loc (loc, integer_type_node,
9135 build1 (INDIRECT_REF, cst_uchar_node,
9136 fold_convert_loc (loc,
9137 cst_uchar_ptr_node,
9138 arg2)));
9139 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9140 }
9141
9142 return NULL_TREE;
9143 }
9144
9145 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9146 Return NULL_TREE if no simplification can be made. */
9147
9148 static tree
9149 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9150 {
9151 const char *p1, *p2;
9152
9153 if (!validate_arg (arg1, POINTER_TYPE)
9154 || !validate_arg (arg2, POINTER_TYPE)
9155 || !validate_arg (len, INTEGER_TYPE))
9156 return NULL_TREE;
9157
9158 /* If the LEN parameter is zero, return zero. */
9159 if (integer_zerop (len))
9160 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9161 arg1, arg2);
9162
9163 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9164 if (operand_equal_p (arg1, arg2, 0))
9165 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9166
9167 p1 = c_getstr (arg1);
9168 p2 = c_getstr (arg2);
9169
9170 if (host_integerp (len, 1) && p1 && p2)
9171 {
9172 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9173 if (i > 0)
9174 return integer_one_node;
9175 else if (i < 0)
9176 return integer_minus_one_node;
9177 else
9178 return integer_zero_node;
9179 }
9180
9181 /* If the second arg is "", and the length is greater than zero,
9182 return *(const unsigned char*)arg1. */
9183 if (p2 && *p2 == '\0'
9184 && TREE_CODE (len) == INTEGER_CST
9185 && tree_int_cst_sgn (len) == 1)
9186 {
9187 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9188 tree cst_uchar_ptr_node
9189 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9190
9191 return fold_convert_loc (loc, integer_type_node,
9192 build1 (INDIRECT_REF, cst_uchar_node,
9193 fold_convert_loc (loc,
9194 cst_uchar_ptr_node,
9195 arg1)));
9196 }
9197
9198 /* If the first arg is "", and the length is greater than zero,
9199 return -*(const unsigned char*)arg2. */
9200 if (p1 && *p1 == '\0'
9201 && TREE_CODE (len) == INTEGER_CST
9202 && tree_int_cst_sgn (len) == 1)
9203 {
9204 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9205 tree cst_uchar_ptr_node
9206 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9207
9208 tree temp = fold_convert_loc (loc, integer_type_node,
9209 build1 (INDIRECT_REF, cst_uchar_node,
9210 fold_convert_loc (loc,
9211 cst_uchar_ptr_node,
9212 arg2)));
9213 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9214 }
9215
9216 /* If len parameter is one, return an expression corresponding to
9217 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9218 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9219 {
9220 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9221 tree cst_uchar_ptr_node
9222 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9223
9224 tree ind1 = fold_convert_loc (loc, integer_type_node,
9225 build1 (INDIRECT_REF, cst_uchar_node,
9226 fold_convert_loc (loc,
9227 cst_uchar_ptr_node,
9228 arg1)));
9229 tree ind2 = fold_convert_loc (loc, integer_type_node,
9230 build1 (INDIRECT_REF, cst_uchar_node,
9231 fold_convert_loc (loc,
9232 cst_uchar_ptr_node,
9233 arg2)));
9234 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9235 }
9236
9237 return NULL_TREE;
9238 }
9239
9240 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9241 ARG. Return NULL_TREE if no simplification can be made. */
9242
9243 static tree
9244 fold_builtin_signbit (location_t loc, tree arg, tree type)
9245 {
9246 if (!validate_arg (arg, REAL_TYPE))
9247 return NULL_TREE;
9248
9249 /* If ARG is a compile-time constant, determine the result. */
9250 if (TREE_CODE (arg) == REAL_CST
9251 && !TREE_OVERFLOW (arg))
9252 {
9253 REAL_VALUE_TYPE c;
9254
9255 c = TREE_REAL_CST (arg);
9256 return (REAL_VALUE_NEGATIVE (c)
9257 ? build_one_cst (type)
9258 : build_zero_cst (type));
9259 }
9260
9261 /* If ARG is non-negative, the result is always zero. */
9262 if (tree_expr_nonnegative_p (arg))
9263 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9264
9265 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9266 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9267 return fold_convert (type,
9268 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9269 build_real (TREE_TYPE (arg), dconst0)));
9270
9271 return NULL_TREE;
9272 }
9273
9274 /* Fold function call to builtin copysign, copysignf or copysignl with
9275 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9276 be made. */
9277
9278 static tree
9279 fold_builtin_copysign (location_t loc, tree fndecl,
9280 tree arg1, tree arg2, tree type)
9281 {
9282 tree tem;
9283
9284 if (!validate_arg (arg1, REAL_TYPE)
9285 || !validate_arg (arg2, REAL_TYPE))
9286 return NULL_TREE;
9287
9288 /* copysign(X,X) is X. */
9289 if (operand_equal_p (arg1, arg2, 0))
9290 return fold_convert_loc (loc, type, arg1);
9291
9292 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9293 if (TREE_CODE (arg1) == REAL_CST
9294 && TREE_CODE (arg2) == REAL_CST
9295 && !TREE_OVERFLOW (arg1)
9296 && !TREE_OVERFLOW (arg2))
9297 {
9298 REAL_VALUE_TYPE c1, c2;
9299
9300 c1 = TREE_REAL_CST (arg1);
9301 c2 = TREE_REAL_CST (arg2);
9302 /* c1.sign := c2.sign. */
9303 real_copysign (&c1, &c2);
9304 return build_real (type, c1);
9305 }
9306
9307 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9308 Remember to evaluate Y for side-effects. */
9309 if (tree_expr_nonnegative_p (arg2))
9310 return omit_one_operand_loc (loc, type,
9311 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9312 arg2);
9313
9314 /* Strip sign changing operations for the first argument. */
9315 tem = fold_strip_sign_ops (arg1);
9316 if (tem)
9317 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9318
9319 return NULL_TREE;
9320 }
9321
9322 /* Fold a call to builtin isascii with argument ARG. */
9323
9324 static tree
9325 fold_builtin_isascii (location_t loc, tree arg)
9326 {
9327 if (!validate_arg (arg, INTEGER_TYPE))
9328 return NULL_TREE;
9329 else
9330 {
9331 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9332 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9333 build_int_cst (integer_type_node,
9334 ~ (unsigned HOST_WIDE_INT) 0x7f));
9335 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9336 arg, integer_zero_node);
9337 }
9338 }
9339
9340 /* Fold a call to builtin toascii with argument ARG. */
9341
9342 static tree
9343 fold_builtin_toascii (location_t loc, tree arg)
9344 {
9345 if (!validate_arg (arg, INTEGER_TYPE))
9346 return NULL_TREE;
9347
9348 /* Transform toascii(c) -> (c & 0x7f). */
9349 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9350 build_int_cst (integer_type_node, 0x7f));
9351 }
9352
9353 /* Fold a call to builtin isdigit with argument ARG. */
9354
9355 static tree
9356 fold_builtin_isdigit (location_t loc, tree arg)
9357 {
9358 if (!validate_arg (arg, INTEGER_TYPE))
9359 return NULL_TREE;
9360 else
9361 {
9362 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9363 /* According to the C standard, isdigit is unaffected by locale.
9364 However, it definitely is affected by the target character set. */
9365 unsigned HOST_WIDE_INT target_digit0
9366 = lang_hooks.to_target_charset ('0');
9367
9368 if (target_digit0 == 0)
9369 return NULL_TREE;
9370
9371 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9372 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9373 build_int_cst (unsigned_type_node, target_digit0));
9374 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9375 build_int_cst (unsigned_type_node, 9));
9376 }
9377 }
9378
9379 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9380
9381 static tree
9382 fold_builtin_fabs (location_t loc, tree arg, tree type)
9383 {
9384 if (!validate_arg (arg, REAL_TYPE))
9385 return NULL_TREE;
9386
9387 arg = fold_convert_loc (loc, type, arg);
9388 if (TREE_CODE (arg) == REAL_CST)
9389 return fold_abs_const (arg, type);
9390 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9391 }
9392
9393 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9394
9395 static tree
9396 fold_builtin_abs (location_t loc, tree arg, tree type)
9397 {
9398 if (!validate_arg (arg, INTEGER_TYPE))
9399 return NULL_TREE;
9400
9401 arg = fold_convert_loc (loc, type, arg);
9402 if (TREE_CODE (arg) == INTEGER_CST)
9403 return fold_abs_const (arg, type);
9404 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9405 }
9406
9407 /* Fold a fma operation with arguments ARG[012]. */
9408
9409 tree
9410 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9411 tree type, tree arg0, tree arg1, tree arg2)
9412 {
9413 if (TREE_CODE (arg0) == REAL_CST
9414 && TREE_CODE (arg1) == REAL_CST
9415 && TREE_CODE (arg2) == REAL_CST)
9416 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9417
9418 return NULL_TREE;
9419 }
9420
9421 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9422
9423 static tree
9424 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9425 {
9426 if (validate_arg (arg0, REAL_TYPE)
9427 && validate_arg (arg1, REAL_TYPE)
9428 && validate_arg (arg2, REAL_TYPE))
9429 {
9430 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9431 if (tem)
9432 return tem;
9433
9434 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9435 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9436 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9437 }
9438 return NULL_TREE;
9439 }
9440
9441 /* Fold a call to builtin fmin or fmax. */
9442
9443 static tree
9444 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9445 tree type, bool max)
9446 {
9447 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9448 {
9449 /* Calculate the result when the argument is a constant. */
9450 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9451
9452 if (res)
9453 return res;
9454
9455 /* If either argument is NaN, return the other one. Avoid the
9456 transformation if we get (and honor) a signalling NaN. Using
9457 omit_one_operand() ensures we create a non-lvalue. */
9458 if (TREE_CODE (arg0) == REAL_CST
9459 && real_isnan (&TREE_REAL_CST (arg0))
9460 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9461 || ! TREE_REAL_CST (arg0).signalling))
9462 return omit_one_operand_loc (loc, type, arg1, arg0);
9463 if (TREE_CODE (arg1) == REAL_CST
9464 && real_isnan (&TREE_REAL_CST (arg1))
9465 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9466 || ! TREE_REAL_CST (arg1).signalling))
9467 return omit_one_operand_loc (loc, type, arg0, arg1);
9468
9469 /* Transform fmin/fmax(x,x) -> x. */
9470 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9471 return omit_one_operand_loc (loc, type, arg0, arg1);
9472
9473 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9474 functions to return the numeric arg if the other one is NaN.
9475 These tree codes don't honor that, so only transform if
9476 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9477 handled, so we don't have to worry about it either. */
9478 if (flag_finite_math_only)
9479 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9480 fold_convert_loc (loc, type, arg0),
9481 fold_convert_loc (loc, type, arg1));
9482 }
9483 return NULL_TREE;
9484 }
9485
9486 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9487
9488 static tree
9489 fold_builtin_carg (location_t loc, tree arg, tree type)
9490 {
9491 if (validate_arg (arg, COMPLEX_TYPE)
9492 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9493 {
9494 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9495
9496 if (atan2_fn)
9497 {
9498 tree new_arg = builtin_save_expr (arg);
9499 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9500 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9501 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9502 }
9503 }
9504
9505 return NULL_TREE;
9506 }
9507
9508 /* Fold a call to builtin logb/ilogb. */
9509
9510 static tree
9511 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9512 {
9513 if (! validate_arg (arg, REAL_TYPE))
9514 return NULL_TREE;
9515
9516 STRIP_NOPS (arg);
9517
9518 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9519 {
9520 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9521
9522 switch (value->cl)
9523 {
9524 case rvc_nan:
9525 case rvc_inf:
9526 /* If arg is Inf or NaN and we're logb, return it. */
9527 if (TREE_CODE (rettype) == REAL_TYPE)
9528 {
9529 /* For logb(-Inf) we have to return +Inf. */
9530 if (real_isinf (value) && real_isneg (value))
9531 {
9532 REAL_VALUE_TYPE tem;
9533 real_inf (&tem);
9534 return build_real (rettype, tem);
9535 }
9536 return fold_convert_loc (loc, rettype, arg);
9537 }
9538 /* Fall through... */
9539 case rvc_zero:
9540 /* Zero may set errno and/or raise an exception for logb, also
9541 for ilogb we don't know FP_ILOGB0. */
9542 return NULL_TREE;
9543 case rvc_normal:
9544 /* For normal numbers, proceed iff radix == 2. In GCC,
9545 normalized significands are in the range [0.5, 1.0). We
9546 want the exponent as if they were [1.0, 2.0) so get the
9547 exponent and subtract 1. */
9548 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9549 return fold_convert_loc (loc, rettype,
9550 build_int_cst (integer_type_node,
9551 REAL_EXP (value)-1));
9552 break;
9553 }
9554 }
9555
9556 return NULL_TREE;
9557 }
9558
9559 /* Fold a call to builtin significand, if radix == 2. */
9560
9561 static tree
9562 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9563 {
9564 if (! validate_arg (arg, REAL_TYPE))
9565 return NULL_TREE;
9566
9567 STRIP_NOPS (arg);
9568
9569 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9570 {
9571 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9572
9573 switch (value->cl)
9574 {
9575 case rvc_zero:
9576 case rvc_nan:
9577 case rvc_inf:
9578 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9579 return fold_convert_loc (loc, rettype, arg);
9580 case rvc_normal:
9581 /* For normal numbers, proceed iff radix == 2. */
9582 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9583 {
9584 REAL_VALUE_TYPE result = *value;
9585 /* In GCC, normalized significands are in the range [0.5,
9586 1.0). We want them to be [1.0, 2.0) so set the
9587 exponent to 1. */
9588 SET_REAL_EXP (&result, 1);
9589 return build_real (rettype, result);
9590 }
9591 break;
9592 }
9593 }
9594
9595 return NULL_TREE;
9596 }
9597
9598 /* Fold a call to builtin frexp, we can assume the base is 2. */
9599
9600 static tree
9601 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9602 {
9603 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9604 return NULL_TREE;
9605
9606 STRIP_NOPS (arg0);
9607
9608 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9609 return NULL_TREE;
9610
9611 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9612
9613 /* Proceed if a valid pointer type was passed in. */
9614 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9615 {
9616 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9617 tree frac, exp;
9618
9619 switch (value->cl)
9620 {
9621 case rvc_zero:
9622 /* For +-0, return (*exp = 0, +-0). */
9623 exp = integer_zero_node;
9624 frac = arg0;
9625 break;
9626 case rvc_nan:
9627 case rvc_inf:
9628 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9629 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9630 case rvc_normal:
9631 {
9632 /* Since the frexp function always expects base 2, and in
9633 GCC normalized significands are already in the range
9634 [0.5, 1.0), we have exactly what frexp wants. */
9635 REAL_VALUE_TYPE frac_rvt = *value;
9636 SET_REAL_EXP (&frac_rvt, 0);
9637 frac = build_real (rettype, frac_rvt);
9638 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9639 }
9640 break;
9641 default:
9642 gcc_unreachable ();
9643 }
9644
9645 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9646 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9647 TREE_SIDE_EFFECTS (arg1) = 1;
9648 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9649 }
9650
9651 return NULL_TREE;
9652 }
9653
9654 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9655 then we can assume the base is two. If it's false, then we have to
9656 check the mode of the TYPE parameter in certain cases. */
9657
9658 static tree
9659 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9660 tree type, bool ldexp)
9661 {
9662 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9663 {
9664 STRIP_NOPS (arg0);
9665 STRIP_NOPS (arg1);
9666
9667 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9668 if (real_zerop (arg0) || integer_zerop (arg1)
9669 || (TREE_CODE (arg0) == REAL_CST
9670 && !real_isfinite (&TREE_REAL_CST (arg0))))
9671 return omit_one_operand_loc (loc, type, arg0, arg1);
9672
9673 /* If both arguments are constant, then try to evaluate it. */
9674 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9675 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9676 && host_integerp (arg1, 0))
9677 {
9678 /* Bound the maximum adjustment to twice the range of the
9679 mode's valid exponents. Use abs to ensure the range is
9680 positive as a sanity check. */
9681 const long max_exp_adj = 2 *
9682 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9683 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9684
9685 /* Get the user-requested adjustment. */
9686 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9687
9688 /* The requested adjustment must be inside this range. This
9689 is a preliminary cap to avoid things like overflow, we
9690 may still fail to compute the result for other reasons. */
9691 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9692 {
9693 REAL_VALUE_TYPE initial_result;
9694
9695 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9696
9697 /* Ensure we didn't overflow. */
9698 if (! real_isinf (&initial_result))
9699 {
9700 const REAL_VALUE_TYPE trunc_result
9701 = real_value_truncate (TYPE_MODE (type), initial_result);
9702
9703 /* Only proceed if the target mode can hold the
9704 resulting value. */
9705 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9706 return build_real (type, trunc_result);
9707 }
9708 }
9709 }
9710 }
9711
9712 return NULL_TREE;
9713 }
9714
9715 /* Fold a call to builtin modf. */
9716
9717 static tree
9718 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9719 {
9720 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9721 return NULL_TREE;
9722
9723 STRIP_NOPS (arg0);
9724
9725 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9726 return NULL_TREE;
9727
9728 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9729
9730 /* Proceed if a valid pointer type was passed in. */
9731 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9732 {
9733 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9734 REAL_VALUE_TYPE trunc, frac;
9735
9736 switch (value->cl)
9737 {
9738 case rvc_nan:
9739 case rvc_zero:
9740 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9741 trunc = frac = *value;
9742 break;
9743 case rvc_inf:
9744 /* For +-Inf, return (*arg1 = arg0, +-0). */
9745 frac = dconst0;
9746 frac.sign = value->sign;
9747 trunc = *value;
9748 break;
9749 case rvc_normal:
9750 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9751 real_trunc (&trunc, VOIDmode, value);
9752 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9753 /* If the original number was negative and already
9754 integral, then the fractional part is -0.0. */
9755 if (value->sign && frac.cl == rvc_zero)
9756 frac.sign = value->sign;
9757 break;
9758 }
9759
9760 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9761 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9762 build_real (rettype, trunc));
9763 TREE_SIDE_EFFECTS (arg1) = 1;
9764 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9765 build_real (rettype, frac));
9766 }
9767
9768 return NULL_TREE;
9769 }
9770
9771 /* Given a location LOC, an interclass builtin function decl FNDECL
9772 and its single argument ARG, return an folded expression computing
9773 the same, or NULL_TREE if we either couldn't or didn't want to fold
9774 (the latter happen if there's an RTL instruction available). */
9775
9776 static tree
9777 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9778 {
9779 enum machine_mode mode;
9780
9781 if (!validate_arg (arg, REAL_TYPE))
9782 return NULL_TREE;
9783
9784 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9785 return NULL_TREE;
9786
9787 mode = TYPE_MODE (TREE_TYPE (arg));
9788
9789 /* If there is no optab, try generic code. */
9790 switch (DECL_FUNCTION_CODE (fndecl))
9791 {
9792 tree result;
9793
9794 CASE_FLT_FN (BUILT_IN_ISINF):
9795 {
9796 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9797 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9798 tree const type = TREE_TYPE (arg);
9799 REAL_VALUE_TYPE r;
9800 char buf[128];
9801
9802 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9803 real_from_string (&r, buf);
9804 result = build_call_expr (isgr_fn, 2,
9805 fold_build1_loc (loc, ABS_EXPR, type, arg),
9806 build_real (type, r));
9807 return result;
9808 }
9809 CASE_FLT_FN (BUILT_IN_FINITE):
9810 case BUILT_IN_ISFINITE:
9811 {
9812 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9813 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9814 tree const type = TREE_TYPE (arg);
9815 REAL_VALUE_TYPE r;
9816 char buf[128];
9817
9818 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9819 real_from_string (&r, buf);
9820 result = build_call_expr (isle_fn, 2,
9821 fold_build1_loc (loc, ABS_EXPR, type, arg),
9822 build_real (type, r));
9823 /*result = fold_build2_loc (loc, UNGT_EXPR,
9824 TREE_TYPE (TREE_TYPE (fndecl)),
9825 fold_build1_loc (loc, ABS_EXPR, type, arg),
9826 build_real (type, r));
9827 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9828 TREE_TYPE (TREE_TYPE (fndecl)),
9829 result);*/
9830 return result;
9831 }
9832 case BUILT_IN_ISNORMAL:
9833 {
9834 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9835 islessequal(fabs(x),DBL_MAX). */
9836 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9837 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9838 tree const type = TREE_TYPE (arg);
9839 REAL_VALUE_TYPE rmax, rmin;
9840 char buf[128];
9841
9842 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9843 real_from_string (&rmax, buf);
9844 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9845 real_from_string (&rmin, buf);
9846 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9847 result = build_call_expr (isle_fn, 2, arg,
9848 build_real (type, rmax));
9849 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9850 build_call_expr (isge_fn, 2, arg,
9851 build_real (type, rmin)));
9852 return result;
9853 }
9854 default:
9855 break;
9856 }
9857
9858 return NULL_TREE;
9859 }
9860
9861 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9862 ARG is the argument for the call. */
9863
9864 static tree
9865 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9866 {
9867 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9868 REAL_VALUE_TYPE r;
9869
9870 if (!validate_arg (arg, REAL_TYPE))
9871 return NULL_TREE;
9872
9873 switch (builtin_index)
9874 {
9875 case BUILT_IN_ISINF:
9876 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9877 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9878
9879 if (TREE_CODE (arg) == REAL_CST)
9880 {
9881 r = TREE_REAL_CST (arg);
9882 if (real_isinf (&r))
9883 return real_compare (GT_EXPR, &r, &dconst0)
9884 ? integer_one_node : integer_minus_one_node;
9885 else
9886 return integer_zero_node;
9887 }
9888
9889 return NULL_TREE;
9890
9891 case BUILT_IN_ISINF_SIGN:
9892 {
9893 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9894 /* In a boolean context, GCC will fold the inner COND_EXPR to
9895 1. So e.g. "if (isinf_sign(x))" would be folded to just
9896 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9897 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9898 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9899 tree tmp = NULL_TREE;
9900
9901 arg = builtin_save_expr (arg);
9902
9903 if (signbit_fn && isinf_fn)
9904 {
9905 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9906 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9907
9908 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9909 signbit_call, integer_zero_node);
9910 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9911 isinf_call, integer_zero_node);
9912
9913 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9914 integer_minus_one_node, integer_one_node);
9915 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9916 isinf_call, tmp,
9917 integer_zero_node);
9918 }
9919
9920 return tmp;
9921 }
9922
9923 case BUILT_IN_ISFINITE:
9924 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9925 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9926 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9927
9928 if (TREE_CODE (arg) == REAL_CST)
9929 {
9930 r = TREE_REAL_CST (arg);
9931 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9932 }
9933
9934 return NULL_TREE;
9935
9936 case BUILT_IN_ISNAN:
9937 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9938 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9939
9940 if (TREE_CODE (arg) == REAL_CST)
9941 {
9942 r = TREE_REAL_CST (arg);
9943 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9944 }
9945
9946 arg = builtin_save_expr (arg);
9947 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9948
9949 default:
9950 gcc_unreachable ();
9951 }
9952 }
9953
9954 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9955 This builtin will generate code to return the appropriate floating
9956 point classification depending on the value of the floating point
9957 number passed in. The possible return values must be supplied as
9958 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9959 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9960 one floating point argument which is "type generic". */
9961
9962 static tree
9963 fold_builtin_fpclassify (location_t loc, tree exp)
9964 {
9965 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9966 arg, type, res, tmp;
9967 enum machine_mode mode;
9968 REAL_VALUE_TYPE r;
9969 char buf[128];
9970
9971 /* Verify the required arguments in the original call. */
9972 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9973 INTEGER_TYPE, INTEGER_TYPE,
9974 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9975 return NULL_TREE;
9976
9977 fp_nan = CALL_EXPR_ARG (exp, 0);
9978 fp_infinite = CALL_EXPR_ARG (exp, 1);
9979 fp_normal = CALL_EXPR_ARG (exp, 2);
9980 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9981 fp_zero = CALL_EXPR_ARG (exp, 4);
9982 arg = CALL_EXPR_ARG (exp, 5);
9983 type = TREE_TYPE (arg);
9984 mode = TYPE_MODE (type);
9985 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9986
9987 /* fpclassify(x) ->
9988 isnan(x) ? FP_NAN :
9989 (fabs(x) == Inf ? FP_INFINITE :
9990 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9991 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9992
9993 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9994 build_real (type, dconst0));
9995 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9996 tmp, fp_zero, fp_subnormal);
9997
9998 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9999 real_from_string (&r, buf);
10000 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10001 arg, build_real (type, r));
10002 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10003
10004 if (HONOR_INFINITIES (mode))
10005 {
10006 real_inf (&r);
10007 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10008 build_real (type, r));
10009 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10010 fp_infinite, res);
10011 }
10012
10013 if (HONOR_NANS (mode))
10014 {
10015 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10016 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10017 }
10018
10019 return res;
10020 }
10021
10022 /* Fold a call to an unordered comparison function such as
10023 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10024 being called and ARG0 and ARG1 are the arguments for the call.
10025 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10026 the opposite of the desired result. UNORDERED_CODE is used
10027 for modes that can hold NaNs and ORDERED_CODE is used for
10028 the rest. */
10029
10030 static tree
10031 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10032 enum tree_code unordered_code,
10033 enum tree_code ordered_code)
10034 {
10035 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10036 enum tree_code code;
10037 tree type0, type1;
10038 enum tree_code code0, code1;
10039 tree cmp_type = NULL_TREE;
10040
10041 type0 = TREE_TYPE (arg0);
10042 type1 = TREE_TYPE (arg1);
10043
10044 code0 = TREE_CODE (type0);
10045 code1 = TREE_CODE (type1);
10046
10047 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10048 /* Choose the wider of two real types. */
10049 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10050 ? type0 : type1;
10051 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10052 cmp_type = type0;
10053 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10054 cmp_type = type1;
10055
10056 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10057 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10058
10059 if (unordered_code == UNORDERED_EXPR)
10060 {
10061 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10062 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10063 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10064 }
10065
10066 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10067 : ordered_code;
10068 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10069 fold_build2_loc (loc, code, type, arg0, arg1));
10070 }
10071
10072 /* Fold a call to built-in function FNDECL with 0 arguments.
10073 IGNORE is true if the result of the function call is ignored. This
10074 function returns NULL_TREE if no simplification was possible. */
10075
10076 static tree
10077 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10078 {
10079 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10080 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10081 switch (fcode)
10082 {
10083 CASE_FLT_FN (BUILT_IN_INF):
10084 case BUILT_IN_INFD32:
10085 case BUILT_IN_INFD64:
10086 case BUILT_IN_INFD128:
10087 return fold_builtin_inf (loc, type, true);
10088
10089 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10090 return fold_builtin_inf (loc, type, false);
10091
10092 case BUILT_IN_CLASSIFY_TYPE:
10093 return fold_builtin_classify_type (NULL_TREE);
10094
10095 case BUILT_IN_UNREACHABLE:
10096 if (flag_sanitize & SANITIZE_UNREACHABLE
10097 && (current_function_decl == NULL
10098 || !lookup_attribute ("no_sanitize_undefined",
10099 DECL_ATTRIBUTES (current_function_decl))))
10100 return ubsan_instrument_unreachable (loc);
10101 break;
10102
10103 default:
10104 break;
10105 }
10106 return NULL_TREE;
10107 }
10108
10109 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10110 IGNORE is true if the result of the function call is ignored. This
10111 function returns NULL_TREE if no simplification was possible. */
10112
10113 static tree
10114 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10115 {
10116 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10117 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10118 switch (fcode)
10119 {
10120 case BUILT_IN_CONSTANT_P:
10121 {
10122 tree val = fold_builtin_constant_p (arg0);
10123
10124 /* Gimplification will pull the CALL_EXPR for the builtin out of
10125 an if condition. When not optimizing, we'll not CSE it back.
10126 To avoid link error types of regressions, return false now. */
10127 if (!val && !optimize)
10128 val = integer_zero_node;
10129
10130 return val;
10131 }
10132
10133 case BUILT_IN_CLASSIFY_TYPE:
10134 return fold_builtin_classify_type (arg0);
10135
10136 case BUILT_IN_STRLEN:
10137 return fold_builtin_strlen (loc, type, arg0);
10138
10139 CASE_FLT_FN (BUILT_IN_FABS):
10140 case BUILT_IN_FABSD32:
10141 case BUILT_IN_FABSD64:
10142 case BUILT_IN_FABSD128:
10143 return fold_builtin_fabs (loc, arg0, type);
10144
10145 case BUILT_IN_ABS:
10146 case BUILT_IN_LABS:
10147 case BUILT_IN_LLABS:
10148 case BUILT_IN_IMAXABS:
10149 return fold_builtin_abs (loc, arg0, type);
10150
10151 CASE_FLT_FN (BUILT_IN_CONJ):
10152 if (validate_arg (arg0, COMPLEX_TYPE)
10153 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10154 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10155 break;
10156
10157 CASE_FLT_FN (BUILT_IN_CREAL):
10158 if (validate_arg (arg0, COMPLEX_TYPE)
10159 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10160 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10161 break;
10162
10163 CASE_FLT_FN (BUILT_IN_CIMAG):
10164 if (validate_arg (arg0, COMPLEX_TYPE)
10165 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10166 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10167 break;
10168
10169 CASE_FLT_FN (BUILT_IN_CCOS):
10170 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
10171
10172 CASE_FLT_FN (BUILT_IN_CCOSH):
10173 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
10174
10175 CASE_FLT_FN (BUILT_IN_CPROJ):
10176 return fold_builtin_cproj (loc, arg0, type);
10177
10178 CASE_FLT_FN (BUILT_IN_CSIN):
10179 if (validate_arg (arg0, COMPLEX_TYPE)
10180 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10181 return do_mpc_arg1 (arg0, type, mpc_sin);
10182 break;
10183
10184 CASE_FLT_FN (BUILT_IN_CSINH):
10185 if (validate_arg (arg0, COMPLEX_TYPE)
10186 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10187 return do_mpc_arg1 (arg0, type, mpc_sinh);
10188 break;
10189
10190 CASE_FLT_FN (BUILT_IN_CTAN):
10191 if (validate_arg (arg0, COMPLEX_TYPE)
10192 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10193 return do_mpc_arg1 (arg0, type, mpc_tan);
10194 break;
10195
10196 CASE_FLT_FN (BUILT_IN_CTANH):
10197 if (validate_arg (arg0, COMPLEX_TYPE)
10198 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10199 return do_mpc_arg1 (arg0, type, mpc_tanh);
10200 break;
10201
10202 CASE_FLT_FN (BUILT_IN_CLOG):
10203 if (validate_arg (arg0, COMPLEX_TYPE)
10204 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10205 return do_mpc_arg1 (arg0, type, mpc_log);
10206 break;
10207
10208 CASE_FLT_FN (BUILT_IN_CSQRT):
10209 if (validate_arg (arg0, COMPLEX_TYPE)
10210 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10211 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10212 break;
10213
10214 CASE_FLT_FN (BUILT_IN_CASIN):
10215 if (validate_arg (arg0, COMPLEX_TYPE)
10216 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10217 return do_mpc_arg1 (arg0, type, mpc_asin);
10218 break;
10219
10220 CASE_FLT_FN (BUILT_IN_CACOS):
10221 if (validate_arg (arg0, COMPLEX_TYPE)
10222 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10223 return do_mpc_arg1 (arg0, type, mpc_acos);
10224 break;
10225
10226 CASE_FLT_FN (BUILT_IN_CATAN):
10227 if (validate_arg (arg0, COMPLEX_TYPE)
10228 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10229 return do_mpc_arg1 (arg0, type, mpc_atan);
10230 break;
10231
10232 CASE_FLT_FN (BUILT_IN_CASINH):
10233 if (validate_arg (arg0, COMPLEX_TYPE)
10234 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10235 return do_mpc_arg1 (arg0, type, mpc_asinh);
10236 break;
10237
10238 CASE_FLT_FN (BUILT_IN_CACOSH):
10239 if (validate_arg (arg0, COMPLEX_TYPE)
10240 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10241 return do_mpc_arg1 (arg0, type, mpc_acosh);
10242 break;
10243
10244 CASE_FLT_FN (BUILT_IN_CATANH):
10245 if (validate_arg (arg0, COMPLEX_TYPE)
10246 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10247 return do_mpc_arg1 (arg0, type, mpc_atanh);
10248 break;
10249
10250 CASE_FLT_FN (BUILT_IN_CABS):
10251 return fold_builtin_cabs (loc, arg0, type, fndecl);
10252
10253 CASE_FLT_FN (BUILT_IN_CARG):
10254 return fold_builtin_carg (loc, arg0, type);
10255
10256 CASE_FLT_FN (BUILT_IN_SQRT):
10257 return fold_builtin_sqrt (loc, arg0, type);
10258
10259 CASE_FLT_FN (BUILT_IN_CBRT):
10260 return fold_builtin_cbrt (loc, arg0, type);
10261
10262 CASE_FLT_FN (BUILT_IN_ASIN):
10263 if (validate_arg (arg0, REAL_TYPE))
10264 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10265 &dconstm1, &dconst1, true);
10266 break;
10267
10268 CASE_FLT_FN (BUILT_IN_ACOS):
10269 if (validate_arg (arg0, REAL_TYPE))
10270 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10271 &dconstm1, &dconst1, true);
10272 break;
10273
10274 CASE_FLT_FN (BUILT_IN_ATAN):
10275 if (validate_arg (arg0, REAL_TYPE))
10276 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10277 break;
10278
10279 CASE_FLT_FN (BUILT_IN_ASINH):
10280 if (validate_arg (arg0, REAL_TYPE))
10281 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10282 break;
10283
10284 CASE_FLT_FN (BUILT_IN_ACOSH):
10285 if (validate_arg (arg0, REAL_TYPE))
10286 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10287 &dconst1, NULL, true);
10288 break;
10289
10290 CASE_FLT_FN (BUILT_IN_ATANH):
10291 if (validate_arg (arg0, REAL_TYPE))
10292 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10293 &dconstm1, &dconst1, false);
10294 break;
10295
10296 CASE_FLT_FN (BUILT_IN_SIN):
10297 if (validate_arg (arg0, REAL_TYPE))
10298 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10299 break;
10300
10301 CASE_FLT_FN (BUILT_IN_COS):
10302 return fold_builtin_cos (loc, arg0, type, fndecl);
10303
10304 CASE_FLT_FN (BUILT_IN_TAN):
10305 return fold_builtin_tan (arg0, type);
10306
10307 CASE_FLT_FN (BUILT_IN_CEXP):
10308 return fold_builtin_cexp (loc, arg0, type);
10309
10310 CASE_FLT_FN (BUILT_IN_CEXPI):
10311 if (validate_arg (arg0, REAL_TYPE))
10312 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10313 break;
10314
10315 CASE_FLT_FN (BUILT_IN_SINH):
10316 if (validate_arg (arg0, REAL_TYPE))
10317 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10318 break;
10319
10320 CASE_FLT_FN (BUILT_IN_COSH):
10321 return fold_builtin_cosh (loc, arg0, type, fndecl);
10322
10323 CASE_FLT_FN (BUILT_IN_TANH):
10324 if (validate_arg (arg0, REAL_TYPE))
10325 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10326 break;
10327
10328 CASE_FLT_FN (BUILT_IN_ERF):
10329 if (validate_arg (arg0, REAL_TYPE))
10330 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10331 break;
10332
10333 CASE_FLT_FN (BUILT_IN_ERFC):
10334 if (validate_arg (arg0, REAL_TYPE))
10335 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10336 break;
10337
10338 CASE_FLT_FN (BUILT_IN_TGAMMA):
10339 if (validate_arg (arg0, REAL_TYPE))
10340 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10341 break;
10342
10343 CASE_FLT_FN (BUILT_IN_EXP):
10344 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10345
10346 CASE_FLT_FN (BUILT_IN_EXP2):
10347 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10348
10349 CASE_FLT_FN (BUILT_IN_EXP10):
10350 CASE_FLT_FN (BUILT_IN_POW10):
10351 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10352
10353 CASE_FLT_FN (BUILT_IN_EXPM1):
10354 if (validate_arg (arg0, REAL_TYPE))
10355 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10356 break;
10357
10358 CASE_FLT_FN (BUILT_IN_LOG):
10359 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10360
10361 CASE_FLT_FN (BUILT_IN_LOG2):
10362 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10363
10364 CASE_FLT_FN (BUILT_IN_LOG10):
10365 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10366
10367 CASE_FLT_FN (BUILT_IN_LOG1P):
10368 if (validate_arg (arg0, REAL_TYPE))
10369 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10370 &dconstm1, NULL, false);
10371 break;
10372
10373 CASE_FLT_FN (BUILT_IN_J0):
10374 if (validate_arg (arg0, REAL_TYPE))
10375 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10376 NULL, NULL, 0);
10377 break;
10378
10379 CASE_FLT_FN (BUILT_IN_J1):
10380 if (validate_arg (arg0, REAL_TYPE))
10381 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10382 NULL, NULL, 0);
10383 break;
10384
10385 CASE_FLT_FN (BUILT_IN_Y0):
10386 if (validate_arg (arg0, REAL_TYPE))
10387 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10388 &dconst0, NULL, false);
10389 break;
10390
10391 CASE_FLT_FN (BUILT_IN_Y1):
10392 if (validate_arg (arg0, REAL_TYPE))
10393 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10394 &dconst0, NULL, false);
10395 break;
10396
10397 CASE_FLT_FN (BUILT_IN_NAN):
10398 case BUILT_IN_NAND32:
10399 case BUILT_IN_NAND64:
10400 case BUILT_IN_NAND128:
10401 return fold_builtin_nan (arg0, type, true);
10402
10403 CASE_FLT_FN (BUILT_IN_NANS):
10404 return fold_builtin_nan (arg0, type, false);
10405
10406 CASE_FLT_FN (BUILT_IN_FLOOR):
10407 return fold_builtin_floor (loc, fndecl, arg0);
10408
10409 CASE_FLT_FN (BUILT_IN_CEIL):
10410 return fold_builtin_ceil (loc, fndecl, arg0);
10411
10412 CASE_FLT_FN (BUILT_IN_TRUNC):
10413 return fold_builtin_trunc (loc, fndecl, arg0);
10414
10415 CASE_FLT_FN (BUILT_IN_ROUND):
10416 return fold_builtin_round (loc, fndecl, arg0);
10417
10418 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10419 CASE_FLT_FN (BUILT_IN_RINT):
10420 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10421
10422 CASE_FLT_FN (BUILT_IN_ICEIL):
10423 CASE_FLT_FN (BUILT_IN_LCEIL):
10424 CASE_FLT_FN (BUILT_IN_LLCEIL):
10425 CASE_FLT_FN (BUILT_IN_LFLOOR):
10426 CASE_FLT_FN (BUILT_IN_IFLOOR):
10427 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10428 CASE_FLT_FN (BUILT_IN_IROUND):
10429 CASE_FLT_FN (BUILT_IN_LROUND):
10430 CASE_FLT_FN (BUILT_IN_LLROUND):
10431 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10432
10433 CASE_FLT_FN (BUILT_IN_IRINT):
10434 CASE_FLT_FN (BUILT_IN_LRINT):
10435 CASE_FLT_FN (BUILT_IN_LLRINT):
10436 return fold_fixed_mathfn (loc, fndecl, arg0);
10437
10438 case BUILT_IN_BSWAP16:
10439 case BUILT_IN_BSWAP32:
10440 case BUILT_IN_BSWAP64:
10441 return fold_builtin_bswap (fndecl, arg0);
10442
10443 CASE_INT_FN (BUILT_IN_FFS):
10444 CASE_INT_FN (BUILT_IN_CLZ):
10445 CASE_INT_FN (BUILT_IN_CTZ):
10446 CASE_INT_FN (BUILT_IN_CLRSB):
10447 CASE_INT_FN (BUILT_IN_POPCOUNT):
10448 CASE_INT_FN (BUILT_IN_PARITY):
10449 return fold_builtin_bitop (fndecl, arg0);
10450
10451 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10452 return fold_builtin_signbit (loc, arg0, type);
10453
10454 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10455 return fold_builtin_significand (loc, arg0, type);
10456
10457 CASE_FLT_FN (BUILT_IN_ILOGB):
10458 CASE_FLT_FN (BUILT_IN_LOGB):
10459 return fold_builtin_logb (loc, arg0, type);
10460
10461 case BUILT_IN_ISASCII:
10462 return fold_builtin_isascii (loc, arg0);
10463
10464 case BUILT_IN_TOASCII:
10465 return fold_builtin_toascii (loc, arg0);
10466
10467 case BUILT_IN_ISDIGIT:
10468 return fold_builtin_isdigit (loc, arg0);
10469
10470 CASE_FLT_FN (BUILT_IN_FINITE):
10471 case BUILT_IN_FINITED32:
10472 case BUILT_IN_FINITED64:
10473 case BUILT_IN_FINITED128:
10474 case BUILT_IN_ISFINITE:
10475 {
10476 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10477 if (ret)
10478 return ret;
10479 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10480 }
10481
10482 CASE_FLT_FN (BUILT_IN_ISINF):
10483 case BUILT_IN_ISINFD32:
10484 case BUILT_IN_ISINFD64:
10485 case BUILT_IN_ISINFD128:
10486 {
10487 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10488 if (ret)
10489 return ret;
10490 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10491 }
10492
10493 case BUILT_IN_ISNORMAL:
10494 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10495
10496 case BUILT_IN_ISINF_SIGN:
10497 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10498
10499 CASE_FLT_FN (BUILT_IN_ISNAN):
10500 case BUILT_IN_ISNAND32:
10501 case BUILT_IN_ISNAND64:
10502 case BUILT_IN_ISNAND128:
10503 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10504
10505 case BUILT_IN_PRINTF:
10506 case BUILT_IN_PRINTF_UNLOCKED:
10507 case BUILT_IN_VPRINTF:
10508 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10509
10510 case BUILT_IN_FREE:
10511 if (integer_zerop (arg0))
10512 return build_empty_stmt (loc);
10513 break;
10514
10515 default:
10516 break;
10517 }
10518
10519 return NULL_TREE;
10520
10521 }
10522
10523 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10524 IGNORE is true if the result of the function call is ignored. This
10525 function returns NULL_TREE if no simplification was possible. */
10526
10527 static tree
10528 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10529 {
10530 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10531 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10532
10533 switch (fcode)
10534 {
10535 CASE_FLT_FN (BUILT_IN_JN):
10536 if (validate_arg (arg0, INTEGER_TYPE)
10537 && validate_arg (arg1, REAL_TYPE))
10538 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10539 break;
10540
10541 CASE_FLT_FN (BUILT_IN_YN):
10542 if (validate_arg (arg0, INTEGER_TYPE)
10543 && validate_arg (arg1, REAL_TYPE))
10544 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10545 &dconst0, false);
10546 break;
10547
10548 CASE_FLT_FN (BUILT_IN_DREM):
10549 CASE_FLT_FN (BUILT_IN_REMAINDER):
10550 if (validate_arg (arg0, REAL_TYPE)
10551 && validate_arg (arg1, REAL_TYPE))
10552 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10553 break;
10554
10555 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10556 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10557 if (validate_arg (arg0, REAL_TYPE)
10558 && validate_arg (arg1, POINTER_TYPE))
10559 return do_mpfr_lgamma_r (arg0, arg1, type);
10560 break;
10561
10562 CASE_FLT_FN (BUILT_IN_ATAN2):
10563 if (validate_arg (arg0, REAL_TYPE)
10564 && validate_arg (arg1, REAL_TYPE))
10565 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10566 break;
10567
10568 CASE_FLT_FN (BUILT_IN_FDIM):
10569 if (validate_arg (arg0, REAL_TYPE)
10570 && validate_arg (arg1, REAL_TYPE))
10571 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10572 break;
10573
10574 CASE_FLT_FN (BUILT_IN_HYPOT):
10575 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10576
10577 CASE_FLT_FN (BUILT_IN_CPOW):
10578 if (validate_arg (arg0, COMPLEX_TYPE)
10579 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10580 && validate_arg (arg1, COMPLEX_TYPE)
10581 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10582 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10583 break;
10584
10585 CASE_FLT_FN (BUILT_IN_LDEXP):
10586 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10587 CASE_FLT_FN (BUILT_IN_SCALBN):
10588 CASE_FLT_FN (BUILT_IN_SCALBLN):
10589 return fold_builtin_load_exponent (loc, arg0, arg1,
10590 type, /*ldexp=*/false);
10591
10592 CASE_FLT_FN (BUILT_IN_FREXP):
10593 return fold_builtin_frexp (loc, arg0, arg1, type);
10594
10595 CASE_FLT_FN (BUILT_IN_MODF):
10596 return fold_builtin_modf (loc, arg0, arg1, type);
10597
10598 case BUILT_IN_BZERO:
10599 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10600
10601 case BUILT_IN_FPUTS:
10602 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10603
10604 case BUILT_IN_FPUTS_UNLOCKED:
10605 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10606
10607 case BUILT_IN_STRSTR:
10608 return fold_builtin_strstr (loc, arg0, arg1, type);
10609
10610 case BUILT_IN_STRCAT:
10611 return fold_builtin_strcat (loc, arg0, arg1);
10612
10613 case BUILT_IN_STRSPN:
10614 return fold_builtin_strspn (loc, arg0, arg1);
10615
10616 case BUILT_IN_STRCSPN:
10617 return fold_builtin_strcspn (loc, arg0, arg1);
10618
10619 case BUILT_IN_STRCHR:
10620 case BUILT_IN_INDEX:
10621 return fold_builtin_strchr (loc, arg0, arg1, type);
10622
10623 case BUILT_IN_STRRCHR:
10624 case BUILT_IN_RINDEX:
10625 return fold_builtin_strrchr (loc, arg0, arg1, type);
10626
10627 case BUILT_IN_STRCPY:
10628 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10629
10630 case BUILT_IN_STPCPY:
10631 if (ignore)
10632 {
10633 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10634 if (!fn)
10635 break;
10636
10637 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10638 }
10639 else
10640 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10641 break;
10642
10643 case BUILT_IN_STRCMP:
10644 return fold_builtin_strcmp (loc, arg0, arg1);
10645
10646 case BUILT_IN_STRPBRK:
10647 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10648
10649 case BUILT_IN_EXPECT:
10650 return fold_builtin_expect (loc, arg0, arg1);
10651
10652 CASE_FLT_FN (BUILT_IN_POW):
10653 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10654
10655 CASE_FLT_FN (BUILT_IN_POWI):
10656 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10657
10658 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10659 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10660
10661 CASE_FLT_FN (BUILT_IN_FMIN):
10662 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10663
10664 CASE_FLT_FN (BUILT_IN_FMAX):
10665 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10666
10667 case BUILT_IN_ISGREATER:
10668 return fold_builtin_unordered_cmp (loc, fndecl,
10669 arg0, arg1, UNLE_EXPR, LE_EXPR);
10670 case BUILT_IN_ISGREATEREQUAL:
10671 return fold_builtin_unordered_cmp (loc, fndecl,
10672 arg0, arg1, UNLT_EXPR, LT_EXPR);
10673 case BUILT_IN_ISLESS:
10674 return fold_builtin_unordered_cmp (loc, fndecl,
10675 arg0, arg1, UNGE_EXPR, GE_EXPR);
10676 case BUILT_IN_ISLESSEQUAL:
10677 return fold_builtin_unordered_cmp (loc, fndecl,
10678 arg0, arg1, UNGT_EXPR, GT_EXPR);
10679 case BUILT_IN_ISLESSGREATER:
10680 return fold_builtin_unordered_cmp (loc, fndecl,
10681 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10682 case BUILT_IN_ISUNORDERED:
10683 return fold_builtin_unordered_cmp (loc, fndecl,
10684 arg0, arg1, UNORDERED_EXPR,
10685 NOP_EXPR);
10686
10687 /* We do the folding for va_start in the expander. */
10688 case BUILT_IN_VA_START:
10689 break;
10690
10691 case BUILT_IN_SPRINTF:
10692 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10693
10694 case BUILT_IN_OBJECT_SIZE:
10695 return fold_builtin_object_size (arg0, arg1);
10696
10697 case BUILT_IN_PRINTF:
10698 case BUILT_IN_PRINTF_UNLOCKED:
10699 case BUILT_IN_VPRINTF:
10700 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10701
10702 case BUILT_IN_PRINTF_CHK:
10703 case BUILT_IN_VPRINTF_CHK:
10704 if (!validate_arg (arg0, INTEGER_TYPE)
10705 || TREE_SIDE_EFFECTS (arg0))
10706 return NULL_TREE;
10707 else
10708 return fold_builtin_printf (loc, fndecl,
10709 arg1, NULL_TREE, ignore, fcode);
10710 break;
10711
10712 case BUILT_IN_FPRINTF:
10713 case BUILT_IN_FPRINTF_UNLOCKED:
10714 case BUILT_IN_VFPRINTF:
10715 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10716 ignore, fcode);
10717
10718 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10719 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10720
10721 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10722 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10723
10724 default:
10725 break;
10726 }
10727 return NULL_TREE;
10728 }
10729
10730 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10731 and ARG2. IGNORE is true if the result of the function call is ignored.
10732 This function returns NULL_TREE if no simplification was possible. */
10733
10734 static tree
10735 fold_builtin_3 (location_t loc, tree fndecl,
10736 tree arg0, tree arg1, tree arg2, bool ignore)
10737 {
10738 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10739 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10740 switch (fcode)
10741 {
10742
10743 CASE_FLT_FN (BUILT_IN_SINCOS):
10744 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10745
10746 CASE_FLT_FN (BUILT_IN_FMA):
10747 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10748 break;
10749
10750 CASE_FLT_FN (BUILT_IN_REMQUO):
10751 if (validate_arg (arg0, REAL_TYPE)
10752 && validate_arg (arg1, REAL_TYPE)
10753 && validate_arg (arg2, POINTER_TYPE))
10754 return do_mpfr_remquo (arg0, arg1, arg2);
10755 break;
10756
10757 case BUILT_IN_MEMSET:
10758 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10759
10760 case BUILT_IN_BCOPY:
10761 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10762 void_type_node, true, /*endp=*/3);
10763
10764 case BUILT_IN_MEMCPY:
10765 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10766 type, ignore, /*endp=*/0);
10767
10768 case BUILT_IN_MEMPCPY:
10769 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10770 type, ignore, /*endp=*/1);
10771
10772 case BUILT_IN_MEMMOVE:
10773 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10774 type, ignore, /*endp=*/3);
10775
10776 case BUILT_IN_STRNCAT:
10777 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10778
10779 case BUILT_IN_STRNCPY:
10780 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10781
10782 case BUILT_IN_STRNCMP:
10783 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10784
10785 case BUILT_IN_MEMCHR:
10786 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10787
10788 case BUILT_IN_BCMP:
10789 case BUILT_IN_MEMCMP:
10790 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10791
10792 case BUILT_IN_SPRINTF:
10793 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10794
10795 case BUILT_IN_SNPRINTF:
10796 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
10797
10798 case BUILT_IN_STRCPY_CHK:
10799 case BUILT_IN_STPCPY_CHK:
10800 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10801 ignore, fcode);
10802
10803 case BUILT_IN_STRCAT_CHK:
10804 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10805
10806 case BUILT_IN_PRINTF_CHK:
10807 case BUILT_IN_VPRINTF_CHK:
10808 if (!validate_arg (arg0, INTEGER_TYPE)
10809 || TREE_SIDE_EFFECTS (arg0))
10810 return NULL_TREE;
10811 else
10812 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10813 break;
10814
10815 case BUILT_IN_FPRINTF:
10816 case BUILT_IN_FPRINTF_UNLOCKED:
10817 case BUILT_IN_VFPRINTF:
10818 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10819 ignore, fcode);
10820
10821 case BUILT_IN_FPRINTF_CHK:
10822 case BUILT_IN_VFPRINTF_CHK:
10823 if (!validate_arg (arg1, INTEGER_TYPE)
10824 || TREE_SIDE_EFFECTS (arg1))
10825 return NULL_TREE;
10826 else
10827 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10828 ignore, fcode);
10829
10830 default:
10831 break;
10832 }
10833 return NULL_TREE;
10834 }
10835
10836 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10837 ARG2, and ARG3. IGNORE is true if the result of the function call is
10838 ignored. This function returns NULL_TREE if no simplification was
10839 possible. */
10840
10841 static tree
10842 fold_builtin_4 (location_t loc, tree fndecl,
10843 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10844 {
10845 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10846
10847 switch (fcode)
10848 {
10849 case BUILT_IN_MEMCPY_CHK:
10850 case BUILT_IN_MEMPCPY_CHK:
10851 case BUILT_IN_MEMMOVE_CHK:
10852 case BUILT_IN_MEMSET_CHK:
10853 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10854 NULL_TREE, ignore,
10855 DECL_FUNCTION_CODE (fndecl));
10856
10857 case BUILT_IN_STRNCPY_CHK:
10858 case BUILT_IN_STPNCPY_CHK:
10859 return fold_builtin_stxncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE,
10860 ignore, fcode);
10861
10862 case BUILT_IN_STRNCAT_CHK:
10863 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10864
10865 case BUILT_IN_SNPRINTF:
10866 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
10867
10868 case BUILT_IN_FPRINTF_CHK:
10869 case BUILT_IN_VFPRINTF_CHK:
10870 if (!validate_arg (arg1, INTEGER_TYPE)
10871 || TREE_SIDE_EFFECTS (arg1))
10872 return NULL_TREE;
10873 else
10874 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10875 ignore, fcode);
10876 break;
10877
10878 default:
10879 break;
10880 }
10881 return NULL_TREE;
10882 }
10883
10884 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10885 arguments, where NARGS <= 4. IGNORE is true if the result of the
10886 function call is ignored. This function returns NULL_TREE if no
10887 simplification was possible. Note that this only folds builtins with
10888 fixed argument patterns. Foldings that do varargs-to-varargs
10889 transformations, or that match calls with more than 4 arguments,
10890 need to be handled with fold_builtin_varargs instead. */
10891
10892 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10893
10894 static tree
10895 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10896 {
10897 tree ret = NULL_TREE;
10898
10899 switch (nargs)
10900 {
10901 case 0:
10902 ret = fold_builtin_0 (loc, fndecl, ignore);
10903 break;
10904 case 1:
10905 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10906 break;
10907 case 2:
10908 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10909 break;
10910 case 3:
10911 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10912 break;
10913 case 4:
10914 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10915 ignore);
10916 break;
10917 default:
10918 break;
10919 }
10920 if (ret)
10921 {
10922 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10923 SET_EXPR_LOCATION (ret, loc);
10924 TREE_NO_WARNING (ret) = 1;
10925 return ret;
10926 }
10927 return NULL_TREE;
10928 }
10929
10930 /* Builtins with folding operations that operate on "..." arguments
10931 need special handling; we need to store the arguments in a convenient
10932 data structure before attempting any folding. Fortunately there are
10933 only a few builtins that fall into this category. FNDECL is the
10934 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10935 result of the function call is ignored. */
10936
10937 static tree
10938 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10939 bool ignore ATTRIBUTE_UNUSED)
10940 {
10941 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10942 tree ret = NULL_TREE;
10943
10944 switch (fcode)
10945 {
10946 case BUILT_IN_SPRINTF_CHK:
10947 case BUILT_IN_VSPRINTF_CHK:
10948 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10949 break;
10950
10951 case BUILT_IN_SNPRINTF_CHK:
10952 case BUILT_IN_VSNPRINTF_CHK:
10953 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10954 break;
10955
10956 case BUILT_IN_FPCLASSIFY:
10957 ret = fold_builtin_fpclassify (loc, exp);
10958 break;
10959
10960 default:
10961 break;
10962 }
10963 if (ret)
10964 {
10965 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10966 SET_EXPR_LOCATION (ret, loc);
10967 TREE_NO_WARNING (ret) = 1;
10968 return ret;
10969 }
10970 return NULL_TREE;
10971 }
10972
10973 /* Return true if FNDECL shouldn't be folded right now.
10974 If a built-in function has an inline attribute always_inline
10975 wrapper, defer folding it after always_inline functions have
10976 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10977 might not be performed. */
10978
10979 bool
10980 avoid_folding_inline_builtin (tree fndecl)
10981 {
10982 return (DECL_DECLARED_INLINE_P (fndecl)
10983 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10984 && cfun
10985 && !cfun->always_inline_functions_inlined
10986 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10987 }
10988
10989 /* A wrapper function for builtin folding that prevents warnings for
10990 "statement without effect" and the like, caused by removing the
10991 call node earlier than the warning is generated. */
10992
10993 tree
10994 fold_call_expr (location_t loc, tree exp, bool ignore)
10995 {
10996 tree ret = NULL_TREE;
10997 tree fndecl = get_callee_fndecl (exp);
10998 if (fndecl
10999 && TREE_CODE (fndecl) == FUNCTION_DECL
11000 && DECL_BUILT_IN (fndecl)
11001 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11002 yet. Defer folding until we see all the arguments
11003 (after inlining). */
11004 && !CALL_EXPR_VA_ARG_PACK (exp))
11005 {
11006 int nargs = call_expr_nargs (exp);
11007
11008 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11009 instead last argument is __builtin_va_arg_pack (). Defer folding
11010 even in that case, until arguments are finalized. */
11011 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11012 {
11013 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11014 if (fndecl2
11015 && TREE_CODE (fndecl2) == FUNCTION_DECL
11016 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11017 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11018 return NULL_TREE;
11019 }
11020
11021 if (avoid_folding_inline_builtin (fndecl))
11022 return NULL_TREE;
11023
11024 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11025 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11026 CALL_EXPR_ARGP (exp), ignore);
11027 else
11028 {
11029 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11030 {
11031 tree *args = CALL_EXPR_ARGP (exp);
11032 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11033 }
11034 if (!ret)
11035 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11036 if (ret)
11037 return ret;
11038 }
11039 }
11040 return NULL_TREE;
11041 }
11042
11043 /* Conveniently construct a function call expression. FNDECL names the
11044 function to be called and N arguments are passed in the array
11045 ARGARRAY. */
11046
11047 tree
11048 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11049 {
11050 tree fntype = TREE_TYPE (fndecl);
11051 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11052
11053 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11054 }
11055
11056 /* Conveniently construct a function call expression. FNDECL names the
11057 function to be called and the arguments are passed in the vector
11058 VEC. */
11059
11060 tree
11061 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11062 {
11063 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11064 vec_safe_address (vec));
11065 }
11066
11067
11068 /* Conveniently construct a function call expression. FNDECL names the
11069 function to be called, N is the number of arguments, and the "..."
11070 parameters are the argument expressions. */
11071
11072 tree
11073 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11074 {
11075 va_list ap;
11076 tree *argarray = XALLOCAVEC (tree, n);
11077 int i;
11078
11079 va_start (ap, n);
11080 for (i = 0; i < n; i++)
11081 argarray[i] = va_arg (ap, tree);
11082 va_end (ap);
11083 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11084 }
11085
11086 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11087 varargs macros aren't supported by all bootstrap compilers. */
11088
11089 tree
11090 build_call_expr (tree fndecl, int n, ...)
11091 {
11092 va_list ap;
11093 tree *argarray = XALLOCAVEC (tree, n);
11094 int i;
11095
11096 va_start (ap, n);
11097 for (i = 0; i < n; i++)
11098 argarray[i] = va_arg (ap, tree);
11099 va_end (ap);
11100 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11101 }
11102
11103 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11104 N arguments are passed in the array ARGARRAY. */
11105
11106 tree
11107 fold_builtin_call_array (location_t loc, tree type,
11108 tree fn,
11109 int n,
11110 tree *argarray)
11111 {
11112 tree ret = NULL_TREE;
11113 tree exp;
11114
11115 if (TREE_CODE (fn) == ADDR_EXPR)
11116 {
11117 tree fndecl = TREE_OPERAND (fn, 0);
11118 if (TREE_CODE (fndecl) == FUNCTION_DECL
11119 && DECL_BUILT_IN (fndecl))
11120 {
11121 /* If last argument is __builtin_va_arg_pack (), arguments to this
11122 function are not finalized yet. Defer folding until they are. */
11123 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11124 {
11125 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11126 if (fndecl2
11127 && TREE_CODE (fndecl2) == FUNCTION_DECL
11128 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11129 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11130 return build_call_array_loc (loc, type, fn, n, argarray);
11131 }
11132 if (avoid_folding_inline_builtin (fndecl))
11133 return build_call_array_loc (loc, type, fn, n, argarray);
11134 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11135 {
11136 ret = targetm.fold_builtin (fndecl, n, argarray, false);
11137 if (ret)
11138 return ret;
11139
11140 return build_call_array_loc (loc, type, fn, n, argarray);
11141 }
11142 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11143 {
11144 /* First try the transformations that don't require consing up
11145 an exp. */
11146 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11147 if (ret)
11148 return ret;
11149 }
11150
11151 /* If we got this far, we need to build an exp. */
11152 exp = build_call_array_loc (loc, type, fn, n, argarray);
11153 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11154 return ret ? ret : exp;
11155 }
11156 }
11157
11158 return build_call_array_loc (loc, type, fn, n, argarray);
11159 }
11160
11161 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11162 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11163 of arguments in ARGS to be omitted. OLDNARGS is the number of
11164 elements in ARGS. */
11165
11166 static tree
11167 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11168 int skip, tree fndecl, int n, va_list newargs)
11169 {
11170 int nargs = oldnargs - skip + n;
11171 tree *buffer;
11172
11173 if (n > 0)
11174 {
11175 int i, j;
11176
11177 buffer = XALLOCAVEC (tree, nargs);
11178 for (i = 0; i < n; i++)
11179 buffer[i] = va_arg (newargs, tree);
11180 for (j = skip; j < oldnargs; j++, i++)
11181 buffer[i] = args[j];
11182 }
11183 else
11184 buffer = args + skip;
11185
11186 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11187 }
11188
11189 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11190 list ARGS along with N new arguments specified as the "..."
11191 parameters. SKIP is the number of arguments in ARGS to be omitted.
11192 OLDNARGS is the number of elements in ARGS. */
11193
11194 static tree
11195 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
11196 int skip, tree fndecl, int n, ...)
11197 {
11198 va_list ap;
11199 tree t;
11200
11201 va_start (ap, n);
11202 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
11203 va_end (ap);
11204
11205 return t;
11206 }
11207
11208 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11209 along with N new arguments specified as the "..." parameters. SKIP
11210 is the number of arguments in EXP to be omitted. This function is used
11211 to do varargs-to-varargs transformations. */
11212
11213 static tree
11214 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11215 {
11216 va_list ap;
11217 tree t;
11218
11219 va_start (ap, n);
11220 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11221 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11222 va_end (ap);
11223
11224 return t;
11225 }
11226
11227 /* Validate a single argument ARG against a tree code CODE representing
11228 a type. */
11229
11230 static bool
11231 validate_arg (const_tree arg, enum tree_code code)
11232 {
11233 if (!arg)
11234 return false;
11235 else if (code == POINTER_TYPE)
11236 return POINTER_TYPE_P (TREE_TYPE (arg));
11237 else if (code == INTEGER_TYPE)
11238 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11239 return code == TREE_CODE (TREE_TYPE (arg));
11240 }
11241
11242 /* This function validates the types of a function call argument list
11243 against a specified list of tree_codes. If the last specifier is a 0,
11244 that represents an ellipses, otherwise the last specifier must be a
11245 VOID_TYPE.
11246
11247 This is the GIMPLE version of validate_arglist. Eventually we want to
11248 completely convert builtins.c to work from GIMPLEs and the tree based
11249 validate_arglist will then be removed. */
11250
11251 bool
11252 validate_gimple_arglist (const_gimple call, ...)
11253 {
11254 enum tree_code code;
11255 bool res = 0;
11256 va_list ap;
11257 const_tree arg;
11258 size_t i;
11259
11260 va_start (ap, call);
11261 i = 0;
11262
11263 do
11264 {
11265 code = (enum tree_code) va_arg (ap, int);
11266 switch (code)
11267 {
11268 case 0:
11269 /* This signifies an ellipses, any further arguments are all ok. */
11270 res = true;
11271 goto end;
11272 case VOID_TYPE:
11273 /* This signifies an endlink, if no arguments remain, return
11274 true, otherwise return false. */
11275 res = (i == gimple_call_num_args (call));
11276 goto end;
11277 default:
11278 /* If no parameters remain or the parameter's code does not
11279 match the specified code, return false. Otherwise continue
11280 checking any remaining arguments. */
11281 arg = gimple_call_arg (call, i++);
11282 if (!validate_arg (arg, code))
11283 goto end;
11284 break;
11285 }
11286 }
11287 while (1);
11288
11289 /* We need gotos here since we can only have one VA_CLOSE in a
11290 function. */
11291 end: ;
11292 va_end (ap);
11293
11294 return res;
11295 }
11296
11297 /* This function validates the types of a function call argument list
11298 against a specified list of tree_codes. If the last specifier is a 0,
11299 that represents an ellipses, otherwise the last specifier must be a
11300 VOID_TYPE. */
11301
11302 bool
11303 validate_arglist (const_tree callexpr, ...)
11304 {
11305 enum tree_code code;
11306 bool res = 0;
11307 va_list ap;
11308 const_call_expr_arg_iterator iter;
11309 const_tree arg;
11310
11311 va_start (ap, callexpr);
11312 init_const_call_expr_arg_iterator (callexpr, &iter);
11313
11314 do
11315 {
11316 code = (enum tree_code) va_arg (ap, int);
11317 switch (code)
11318 {
11319 case 0:
11320 /* This signifies an ellipses, any further arguments are all ok. */
11321 res = true;
11322 goto end;
11323 case VOID_TYPE:
11324 /* This signifies an endlink, if no arguments remain, return
11325 true, otherwise return false. */
11326 res = !more_const_call_expr_args_p (&iter);
11327 goto end;
11328 default:
11329 /* If no parameters remain or the parameter's code does not
11330 match the specified code, return false. Otherwise continue
11331 checking any remaining arguments. */
11332 arg = next_const_call_expr_arg (&iter);
11333 if (!validate_arg (arg, code))
11334 goto end;
11335 break;
11336 }
11337 }
11338 while (1);
11339
11340 /* We need gotos here since we can only have one VA_CLOSE in a
11341 function. */
11342 end: ;
11343 va_end (ap);
11344
11345 return res;
11346 }
11347
11348 /* Default target-specific builtin expander that does nothing. */
11349
11350 rtx
11351 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11352 rtx target ATTRIBUTE_UNUSED,
11353 rtx subtarget ATTRIBUTE_UNUSED,
11354 enum machine_mode mode ATTRIBUTE_UNUSED,
11355 int ignore ATTRIBUTE_UNUSED)
11356 {
11357 return NULL_RTX;
11358 }
11359
11360 /* Returns true is EXP represents data that would potentially reside
11361 in a readonly section. */
11362
11363 static bool
11364 readonly_data_expr (tree exp)
11365 {
11366 STRIP_NOPS (exp);
11367
11368 if (TREE_CODE (exp) != ADDR_EXPR)
11369 return false;
11370
11371 exp = get_base_address (TREE_OPERAND (exp, 0));
11372 if (!exp)
11373 return false;
11374
11375 /* Make sure we call decl_readonly_section only for trees it
11376 can handle (since it returns true for everything it doesn't
11377 understand). */
11378 if (TREE_CODE (exp) == STRING_CST
11379 || TREE_CODE (exp) == CONSTRUCTOR
11380 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11381 return decl_readonly_section (exp, 0);
11382 else
11383 return false;
11384 }
11385
11386 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11387 to the call, and TYPE is its return type.
11388
11389 Return NULL_TREE if no simplification was possible, otherwise return the
11390 simplified form of the call as a tree.
11391
11392 The simplified form may be a constant or other expression which
11393 computes the same value, but in a more efficient manner (including
11394 calls to other builtin functions).
11395
11396 The call may contain arguments which need to be evaluated, but
11397 which are not useful to determine the result of the call. In
11398 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11399 COMPOUND_EXPR will be an argument which must be evaluated.
11400 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11401 COMPOUND_EXPR in the chain will contain the tree for the simplified
11402 form of the builtin function call. */
11403
11404 static tree
11405 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11406 {
11407 if (!validate_arg (s1, POINTER_TYPE)
11408 || !validate_arg (s2, POINTER_TYPE))
11409 return NULL_TREE;
11410 else
11411 {
11412 tree fn;
11413 const char *p1, *p2;
11414
11415 p2 = c_getstr (s2);
11416 if (p2 == NULL)
11417 return NULL_TREE;
11418
11419 p1 = c_getstr (s1);
11420 if (p1 != NULL)
11421 {
11422 const char *r = strstr (p1, p2);
11423 tree tem;
11424
11425 if (r == NULL)
11426 return build_int_cst (TREE_TYPE (s1), 0);
11427
11428 /* Return an offset into the constant string argument. */
11429 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11430 return fold_convert_loc (loc, type, tem);
11431 }
11432
11433 /* The argument is const char *, and the result is char *, so we need
11434 a type conversion here to avoid a warning. */
11435 if (p2[0] == '\0')
11436 return fold_convert_loc (loc, type, s1);
11437
11438 if (p2[1] != '\0')
11439 return NULL_TREE;
11440
11441 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11442 if (!fn)
11443 return NULL_TREE;
11444
11445 /* New argument list transforming strstr(s1, s2) to
11446 strchr(s1, s2[0]). */
11447 return build_call_expr_loc (loc, fn, 2, s1,
11448 build_int_cst (integer_type_node, p2[0]));
11449 }
11450 }
11451
11452 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11453 the call, and TYPE is its return type.
11454
11455 Return NULL_TREE if no simplification was possible, otherwise return the
11456 simplified form of the call as a tree.
11457
11458 The simplified form may be a constant or other expression which
11459 computes the same value, but in a more efficient manner (including
11460 calls to other builtin functions).
11461
11462 The call may contain arguments which need to be evaluated, but
11463 which are not useful to determine the result of the call. In
11464 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11465 COMPOUND_EXPR will be an argument which must be evaluated.
11466 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11467 COMPOUND_EXPR in the chain will contain the tree for the simplified
11468 form of the builtin function call. */
11469
11470 static tree
11471 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11472 {
11473 if (!validate_arg (s1, POINTER_TYPE)
11474 || !validate_arg (s2, INTEGER_TYPE))
11475 return NULL_TREE;
11476 else
11477 {
11478 const char *p1;
11479
11480 if (TREE_CODE (s2) != INTEGER_CST)
11481 return NULL_TREE;
11482
11483 p1 = c_getstr (s1);
11484 if (p1 != NULL)
11485 {
11486 char c;
11487 const char *r;
11488 tree tem;
11489
11490 if (target_char_cast (s2, &c))
11491 return NULL_TREE;
11492
11493 r = strchr (p1, c);
11494
11495 if (r == NULL)
11496 return build_int_cst (TREE_TYPE (s1), 0);
11497
11498 /* Return an offset into the constant string argument. */
11499 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11500 return fold_convert_loc (loc, type, tem);
11501 }
11502 return NULL_TREE;
11503 }
11504 }
11505
11506 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11507 the call, and TYPE is its return type.
11508
11509 Return NULL_TREE if no simplification was possible, otherwise return the
11510 simplified form of the call as a tree.
11511
11512 The simplified form may be a constant or other expression which
11513 computes the same value, but in a more efficient manner (including
11514 calls to other builtin functions).
11515
11516 The call may contain arguments which need to be evaluated, but
11517 which are not useful to determine the result of the call. In
11518 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11519 COMPOUND_EXPR will be an argument which must be evaluated.
11520 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11521 COMPOUND_EXPR in the chain will contain the tree for the simplified
11522 form of the builtin function call. */
11523
11524 static tree
11525 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11526 {
11527 if (!validate_arg (s1, POINTER_TYPE)
11528 || !validate_arg (s2, INTEGER_TYPE))
11529 return NULL_TREE;
11530 else
11531 {
11532 tree fn;
11533 const char *p1;
11534
11535 if (TREE_CODE (s2) != INTEGER_CST)
11536 return NULL_TREE;
11537
11538 p1 = c_getstr (s1);
11539 if (p1 != NULL)
11540 {
11541 char c;
11542 const char *r;
11543 tree tem;
11544
11545 if (target_char_cast (s2, &c))
11546 return NULL_TREE;
11547
11548 r = strrchr (p1, c);
11549
11550 if (r == NULL)
11551 return build_int_cst (TREE_TYPE (s1), 0);
11552
11553 /* Return an offset into the constant string argument. */
11554 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11555 return fold_convert_loc (loc, type, tem);
11556 }
11557
11558 if (! integer_zerop (s2))
11559 return NULL_TREE;
11560
11561 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11562 if (!fn)
11563 return NULL_TREE;
11564
11565 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11566 return build_call_expr_loc (loc, fn, 2, s1, s2);
11567 }
11568 }
11569
11570 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11571 to the call, and TYPE is its return type.
11572
11573 Return NULL_TREE if no simplification was possible, otherwise return the
11574 simplified form of the call as a tree.
11575
11576 The simplified form may be a constant or other expression which
11577 computes the same value, but in a more efficient manner (including
11578 calls to other builtin functions).
11579
11580 The call may contain arguments which need to be evaluated, but
11581 which are not useful to determine the result of the call. In
11582 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11583 COMPOUND_EXPR will be an argument which must be evaluated.
11584 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11585 COMPOUND_EXPR in the chain will contain the tree for the simplified
11586 form of the builtin function call. */
11587
11588 static tree
11589 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11590 {
11591 if (!validate_arg (s1, POINTER_TYPE)
11592 || !validate_arg (s2, POINTER_TYPE))
11593 return NULL_TREE;
11594 else
11595 {
11596 tree fn;
11597 const char *p1, *p2;
11598
11599 p2 = c_getstr (s2);
11600 if (p2 == NULL)
11601 return NULL_TREE;
11602
11603 p1 = c_getstr (s1);
11604 if (p1 != NULL)
11605 {
11606 const char *r = strpbrk (p1, p2);
11607 tree tem;
11608
11609 if (r == NULL)
11610 return build_int_cst (TREE_TYPE (s1), 0);
11611
11612 /* Return an offset into the constant string argument. */
11613 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11614 return fold_convert_loc (loc, type, tem);
11615 }
11616
11617 if (p2[0] == '\0')
11618 /* strpbrk(x, "") == NULL.
11619 Evaluate and ignore s1 in case it had side-effects. */
11620 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11621
11622 if (p2[1] != '\0')
11623 return NULL_TREE; /* Really call strpbrk. */
11624
11625 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11626 if (!fn)
11627 return NULL_TREE;
11628
11629 /* New argument list transforming strpbrk(s1, s2) to
11630 strchr(s1, s2[0]). */
11631 return build_call_expr_loc (loc, fn, 2, s1,
11632 build_int_cst (integer_type_node, p2[0]));
11633 }
11634 }
11635
11636 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11637 to the call.
11638
11639 Return NULL_TREE if no simplification was possible, otherwise return the
11640 simplified form of the call as a tree.
11641
11642 The simplified form may be a constant or other expression which
11643 computes the same value, but in a more efficient manner (including
11644 calls to other builtin functions).
11645
11646 The call may contain arguments which need to be evaluated, but
11647 which are not useful to determine the result of the call. In
11648 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11649 COMPOUND_EXPR will be an argument which must be evaluated.
11650 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11651 COMPOUND_EXPR in the chain will contain the tree for the simplified
11652 form of the builtin function call. */
11653
11654 static tree
11655 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11656 {
11657 if (!validate_arg (dst, POINTER_TYPE)
11658 || !validate_arg (src, POINTER_TYPE))
11659 return NULL_TREE;
11660 else
11661 {
11662 const char *p = c_getstr (src);
11663
11664 /* If the string length is zero, return the dst parameter. */
11665 if (p && *p == '\0')
11666 return dst;
11667
11668 if (optimize_insn_for_speed_p ())
11669 {
11670 /* See if we can store by pieces into (dst + strlen(dst)). */
11671 tree newdst, call;
11672 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11673 tree strcpy_fn = builtin_decl_implicit (BUILT_IN_STRCPY);
11674
11675 if (!strlen_fn || !strcpy_fn)
11676 return NULL_TREE;
11677
11678 /* If we don't have a movstr we don't want to emit an strcpy
11679 call. We have to do that if the length of the source string
11680 isn't computable (in that case we can use memcpy probably
11681 later expanding to a sequence of mov instructions). If we
11682 have movstr instructions we can emit strcpy calls. */
11683 if (!HAVE_movstr)
11684 {
11685 tree len = c_strlen (src, 1);
11686 if (! len || TREE_SIDE_EFFECTS (len))
11687 return NULL_TREE;
11688 }
11689
11690 /* Stabilize the argument list. */
11691 dst = builtin_save_expr (dst);
11692
11693 /* Create strlen (dst). */
11694 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11695 /* Create (dst p+ strlen (dst)). */
11696
11697 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11698 newdst = builtin_save_expr (newdst);
11699
11700 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11701 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11702 }
11703 return NULL_TREE;
11704 }
11705 }
11706
11707 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11708 arguments to the call.
11709
11710 Return NULL_TREE if no simplification was possible, otherwise return the
11711 simplified form of the call as a tree.
11712
11713 The simplified form may be a constant or other expression which
11714 computes the same value, but in a more efficient manner (including
11715 calls to other builtin functions).
11716
11717 The call may contain arguments which need to be evaluated, but
11718 which are not useful to determine the result of the call. In
11719 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11720 COMPOUND_EXPR will be an argument which must be evaluated.
11721 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11722 COMPOUND_EXPR in the chain will contain the tree for the simplified
11723 form of the builtin function call. */
11724
11725 static tree
11726 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11727 {
11728 if (!validate_arg (dst, POINTER_TYPE)
11729 || !validate_arg (src, POINTER_TYPE)
11730 || !validate_arg (len, INTEGER_TYPE))
11731 return NULL_TREE;
11732 else
11733 {
11734 const char *p = c_getstr (src);
11735
11736 /* If the requested length is zero, or the src parameter string
11737 length is zero, return the dst parameter. */
11738 if (integer_zerop (len) || (p && *p == '\0'))
11739 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11740
11741 /* If the requested len is greater than or equal to the string
11742 length, call strcat. */
11743 if (TREE_CODE (len) == INTEGER_CST && p
11744 && compare_tree_int (len, strlen (p)) >= 0)
11745 {
11746 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11747
11748 /* If the replacement _DECL isn't initialized, don't do the
11749 transformation. */
11750 if (!fn)
11751 return NULL_TREE;
11752
11753 return build_call_expr_loc (loc, fn, 2, dst, src);
11754 }
11755 return NULL_TREE;
11756 }
11757 }
11758
11759 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11760 to the call.
11761
11762 Return NULL_TREE if no simplification was possible, otherwise return the
11763 simplified form of the call as a tree.
11764
11765 The simplified form may be a constant or other expression which
11766 computes the same value, but in a more efficient manner (including
11767 calls to other builtin functions).
11768
11769 The call may contain arguments which need to be evaluated, but
11770 which are not useful to determine the result of the call. In
11771 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11772 COMPOUND_EXPR will be an argument which must be evaluated.
11773 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11774 COMPOUND_EXPR in the chain will contain the tree for the simplified
11775 form of the builtin function call. */
11776
11777 static tree
11778 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11779 {
11780 if (!validate_arg (s1, POINTER_TYPE)
11781 || !validate_arg (s2, POINTER_TYPE))
11782 return NULL_TREE;
11783 else
11784 {
11785 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11786
11787 /* If both arguments are constants, evaluate at compile-time. */
11788 if (p1 && p2)
11789 {
11790 const size_t r = strspn (p1, p2);
11791 return build_int_cst (size_type_node, r);
11792 }
11793
11794 /* If either argument is "", return NULL_TREE. */
11795 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11796 /* Evaluate and ignore both arguments in case either one has
11797 side-effects. */
11798 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11799 s1, s2);
11800 return NULL_TREE;
11801 }
11802 }
11803
11804 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11805 to the call.
11806
11807 Return NULL_TREE if no simplification was possible, otherwise return the
11808 simplified form of the call as a tree.
11809
11810 The simplified form may be a constant or other expression which
11811 computes the same value, but in a more efficient manner (including
11812 calls to other builtin functions).
11813
11814 The call may contain arguments which need to be evaluated, but
11815 which are not useful to determine the result of the call. In
11816 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11817 COMPOUND_EXPR will be an argument which must be evaluated.
11818 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11819 COMPOUND_EXPR in the chain will contain the tree for the simplified
11820 form of the builtin function call. */
11821
11822 static tree
11823 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11824 {
11825 if (!validate_arg (s1, POINTER_TYPE)
11826 || !validate_arg (s2, POINTER_TYPE))
11827 return NULL_TREE;
11828 else
11829 {
11830 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11831
11832 /* If both arguments are constants, evaluate at compile-time. */
11833 if (p1 && p2)
11834 {
11835 const size_t r = strcspn (p1, p2);
11836 return build_int_cst (size_type_node, r);
11837 }
11838
11839 /* If the first argument is "", return NULL_TREE. */
11840 if (p1 && *p1 == '\0')
11841 {
11842 /* Evaluate and ignore argument s2 in case it has
11843 side-effects. */
11844 return omit_one_operand_loc (loc, size_type_node,
11845 size_zero_node, s2);
11846 }
11847
11848 /* If the second argument is "", return __builtin_strlen(s1). */
11849 if (p2 && *p2 == '\0')
11850 {
11851 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11852
11853 /* If the replacement _DECL isn't initialized, don't do the
11854 transformation. */
11855 if (!fn)
11856 return NULL_TREE;
11857
11858 return build_call_expr_loc (loc, fn, 1, s1);
11859 }
11860 return NULL_TREE;
11861 }
11862 }
11863
11864 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11865 to the call. IGNORE is true if the value returned
11866 by the builtin will be ignored. UNLOCKED is true is true if this
11867 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11868 the known length of the string. Return NULL_TREE if no simplification
11869 was possible. */
11870
11871 tree
11872 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11873 bool ignore, bool unlocked, tree len)
11874 {
11875 /* If we're using an unlocked function, assume the other unlocked
11876 functions exist explicitly. */
11877 tree const fn_fputc = (unlocked
11878 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
11879 : builtin_decl_implicit (BUILT_IN_FPUTC));
11880 tree const fn_fwrite = (unlocked
11881 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
11882 : builtin_decl_implicit (BUILT_IN_FWRITE));
11883
11884 /* If the return value is used, don't do the transformation. */
11885 if (!ignore)
11886 return NULL_TREE;
11887
11888 /* Verify the arguments in the original call. */
11889 if (!validate_arg (arg0, POINTER_TYPE)
11890 || !validate_arg (arg1, POINTER_TYPE))
11891 return NULL_TREE;
11892
11893 if (! len)
11894 len = c_strlen (arg0, 0);
11895
11896 /* Get the length of the string passed to fputs. If the length
11897 can't be determined, punt. */
11898 if (!len
11899 || TREE_CODE (len) != INTEGER_CST)
11900 return NULL_TREE;
11901
11902 switch (compare_tree_int (len, 1))
11903 {
11904 case -1: /* length is 0, delete the call entirely . */
11905 return omit_one_operand_loc (loc, integer_type_node,
11906 integer_zero_node, arg1);;
11907
11908 case 0: /* length is 1, call fputc. */
11909 {
11910 const char *p = c_getstr (arg0);
11911
11912 if (p != NULL)
11913 {
11914 if (fn_fputc)
11915 return build_call_expr_loc (loc, fn_fputc, 2,
11916 build_int_cst
11917 (integer_type_node, p[0]), arg1);
11918 else
11919 return NULL_TREE;
11920 }
11921 }
11922 /* FALLTHROUGH */
11923 case 1: /* length is greater than 1, call fwrite. */
11924 {
11925 /* If optimizing for size keep fputs. */
11926 if (optimize_function_for_size_p (cfun))
11927 return NULL_TREE;
11928 /* New argument list transforming fputs(string, stream) to
11929 fwrite(string, 1, len, stream). */
11930 if (fn_fwrite)
11931 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11932 size_one_node, len, arg1);
11933 else
11934 return NULL_TREE;
11935 }
11936 default:
11937 gcc_unreachable ();
11938 }
11939 return NULL_TREE;
11940 }
11941
11942 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11943 produced. False otherwise. This is done so that we don't output the error
11944 or warning twice or three times. */
11945
11946 bool
11947 fold_builtin_next_arg (tree exp, bool va_start_p)
11948 {
11949 tree fntype = TREE_TYPE (current_function_decl);
11950 int nargs = call_expr_nargs (exp);
11951 tree arg;
11952 /* There is good chance the current input_location points inside the
11953 definition of the va_start macro (perhaps on the token for
11954 builtin) in a system header, so warnings will not be emitted.
11955 Use the location in real source code. */
11956 source_location current_location =
11957 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11958 NULL);
11959
11960 if (!stdarg_p (fntype))
11961 {
11962 error ("%<va_start%> used in function with fixed args");
11963 return true;
11964 }
11965
11966 if (va_start_p)
11967 {
11968 if (va_start_p && (nargs != 2))
11969 {
11970 error ("wrong number of arguments to function %<va_start%>");
11971 return true;
11972 }
11973 arg = CALL_EXPR_ARG (exp, 1);
11974 }
11975 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11976 when we checked the arguments and if needed issued a warning. */
11977 else
11978 {
11979 if (nargs == 0)
11980 {
11981 /* Evidently an out of date version of <stdarg.h>; can't validate
11982 va_start's second argument, but can still work as intended. */
11983 warning_at (current_location,
11984 OPT_Wvarargs,
11985 "%<__builtin_next_arg%> called without an argument");
11986 return true;
11987 }
11988 else if (nargs > 1)
11989 {
11990 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11991 return true;
11992 }
11993 arg = CALL_EXPR_ARG (exp, 0);
11994 }
11995
11996 if (TREE_CODE (arg) == SSA_NAME)
11997 arg = SSA_NAME_VAR (arg);
11998
11999 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12000 or __builtin_next_arg (0) the first time we see it, after checking
12001 the arguments and if needed issuing a warning. */
12002 if (!integer_zerop (arg))
12003 {
12004 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12005
12006 /* Strip off all nops for the sake of the comparison. This
12007 is not quite the same as STRIP_NOPS. It does more.
12008 We must also strip off INDIRECT_EXPR for C++ reference
12009 parameters. */
12010 while (CONVERT_EXPR_P (arg)
12011 || TREE_CODE (arg) == INDIRECT_REF)
12012 arg = TREE_OPERAND (arg, 0);
12013 if (arg != last_parm)
12014 {
12015 /* FIXME: Sometimes with the tree optimizers we can get the
12016 not the last argument even though the user used the last
12017 argument. We just warn and set the arg to be the last
12018 argument so that we will get wrong-code because of
12019 it. */
12020 warning_at (current_location,
12021 OPT_Wvarargs,
12022 "second parameter of %<va_start%> not last named argument");
12023 }
12024
12025 /* Undefined by C99 7.15.1.4p4 (va_start):
12026 "If the parameter parmN is declared with the register storage
12027 class, with a function or array type, or with a type that is
12028 not compatible with the type that results after application of
12029 the default argument promotions, the behavior is undefined."
12030 */
12031 else if (DECL_REGISTER (arg))
12032 {
12033 warning_at (current_location,
12034 OPT_Wvarargs,
12035 "undefined behaviour when second parameter of "
12036 "%<va_start%> is declared with %<register%> storage");
12037 }
12038
12039 /* We want to verify the second parameter just once before the tree
12040 optimizers are run and then avoid keeping it in the tree,
12041 as otherwise we could warn even for correct code like:
12042 void foo (int i, ...)
12043 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12044 if (va_start_p)
12045 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12046 else
12047 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12048 }
12049 return false;
12050 }
12051
12052
12053 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12054 ORIG may be null if this is a 2-argument call. We don't attempt to
12055 simplify calls with more than 3 arguments.
12056
12057 Return NULL_TREE if no simplification was possible, otherwise return the
12058 simplified form of the call as a tree. If IGNORED is true, it means that
12059 the caller does not use the returned value of the function. */
12060
12061 static tree
12062 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12063 tree orig, int ignored)
12064 {
12065 tree call, retval;
12066 const char *fmt_str = NULL;
12067
12068 /* Verify the required arguments in the original call. We deal with two
12069 types of sprintf() calls: 'sprintf (str, fmt)' and
12070 'sprintf (dest, "%s", orig)'. */
12071 if (!validate_arg (dest, POINTER_TYPE)
12072 || !validate_arg (fmt, POINTER_TYPE))
12073 return NULL_TREE;
12074 if (orig && !validate_arg (orig, POINTER_TYPE))
12075 return NULL_TREE;
12076
12077 /* Check whether the format is a literal string constant. */
12078 fmt_str = c_getstr (fmt);
12079 if (fmt_str == NULL)
12080 return NULL_TREE;
12081
12082 call = NULL_TREE;
12083 retval = NULL_TREE;
12084
12085 if (!init_target_chars ())
12086 return NULL_TREE;
12087
12088 /* If the format doesn't contain % args or %%, use strcpy. */
12089 if (strchr (fmt_str, target_percent) == NULL)
12090 {
12091 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12092
12093 if (!fn)
12094 return NULL_TREE;
12095
12096 /* Don't optimize sprintf (buf, "abc", ptr++). */
12097 if (orig)
12098 return NULL_TREE;
12099
12100 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12101 'format' is known to contain no % formats. */
12102 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12103 if (!ignored)
12104 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12105 }
12106
12107 /* If the format is "%s", use strcpy if the result isn't used. */
12108 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12109 {
12110 tree fn;
12111 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12112
12113 if (!fn)
12114 return NULL_TREE;
12115
12116 /* Don't crash on sprintf (str1, "%s"). */
12117 if (!orig)
12118 return NULL_TREE;
12119
12120 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12121 if (!ignored)
12122 {
12123 retval = c_strlen (orig, 1);
12124 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12125 return NULL_TREE;
12126 }
12127 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12128 }
12129
12130 if (call && retval)
12131 {
12132 retval = fold_convert_loc
12133 (loc, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF))),
12134 retval);
12135 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12136 }
12137 else
12138 return call;
12139 }
12140
12141 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12142 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12143 attempt to simplify calls with more than 4 arguments.
12144
12145 Return NULL_TREE if no simplification was possible, otherwise return the
12146 simplified form of the call as a tree. If IGNORED is true, it means that
12147 the caller does not use the returned value of the function. */
12148
12149 static tree
12150 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
12151 tree orig, int ignored)
12152 {
12153 tree call, retval;
12154 const char *fmt_str = NULL;
12155 unsigned HOST_WIDE_INT destlen;
12156
12157 /* Verify the required arguments in the original call. We deal with two
12158 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12159 'snprintf (dest, cst, "%s", orig)'. */
12160 if (!validate_arg (dest, POINTER_TYPE)
12161 || !validate_arg (destsize, INTEGER_TYPE)
12162 || !validate_arg (fmt, POINTER_TYPE))
12163 return NULL_TREE;
12164 if (orig && !validate_arg (orig, POINTER_TYPE))
12165 return NULL_TREE;
12166
12167 if (!host_integerp (destsize, 1))
12168 return NULL_TREE;
12169
12170 /* Check whether the format is a literal string constant. */
12171 fmt_str = c_getstr (fmt);
12172 if (fmt_str == NULL)
12173 return NULL_TREE;
12174
12175 call = NULL_TREE;
12176 retval = NULL_TREE;
12177
12178 if (!init_target_chars ())
12179 return NULL_TREE;
12180
12181 destlen = tree_low_cst (destsize, 1);
12182
12183 /* If the format doesn't contain % args or %%, use strcpy. */
12184 if (strchr (fmt_str, target_percent) == NULL)
12185 {
12186 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12187 size_t len = strlen (fmt_str);
12188
12189 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12190 if (orig)
12191 return NULL_TREE;
12192
12193 /* We could expand this as
12194 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12195 or to
12196 memcpy (str, fmt_with_nul_at_cstm1, cst);
12197 but in the former case that might increase code size
12198 and in the latter case grow .rodata section too much.
12199 So punt for now. */
12200 if (len >= destlen)
12201 return NULL_TREE;
12202
12203 if (!fn)
12204 return NULL_TREE;
12205
12206 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12207 'format' is known to contain no % formats and
12208 strlen (fmt) < cst. */
12209 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12210
12211 if (!ignored)
12212 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12213 }
12214
12215 /* If the format is "%s", use strcpy if the result isn't used. */
12216 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12217 {
12218 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12219 unsigned HOST_WIDE_INT origlen;
12220
12221 /* Don't crash on snprintf (str1, cst, "%s"). */
12222 if (!orig)
12223 return NULL_TREE;
12224
12225 retval = c_strlen (orig, 1);
12226 if (!retval || !host_integerp (retval, 1))
12227 return NULL_TREE;
12228
12229 origlen = tree_low_cst (retval, 1);
12230 /* We could expand this as
12231 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12232 or to
12233 memcpy (str1, str2_with_nul_at_cstm1, cst);
12234 but in the former case that might increase code size
12235 and in the latter case grow .rodata section too much.
12236 So punt for now. */
12237 if (origlen >= destlen)
12238 return NULL_TREE;
12239
12240 /* Convert snprintf (str1, cst, "%s", str2) into
12241 strcpy (str1, str2) if strlen (str2) < cst. */
12242 if (!fn)
12243 return NULL_TREE;
12244
12245 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12246
12247 if (ignored)
12248 retval = NULL_TREE;
12249 }
12250
12251 if (call && retval)
12252 {
12253 tree fn = builtin_decl_explicit (BUILT_IN_SNPRINTF);
12254 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
12255 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12256 }
12257 else
12258 return call;
12259 }
12260
12261 /* Expand a call EXP to __builtin_object_size. */
12262
12263 rtx
12264 expand_builtin_object_size (tree exp)
12265 {
12266 tree ost;
12267 int object_size_type;
12268 tree fndecl = get_callee_fndecl (exp);
12269
12270 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12271 {
12272 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12273 exp, fndecl);
12274 expand_builtin_trap ();
12275 return const0_rtx;
12276 }
12277
12278 ost = CALL_EXPR_ARG (exp, 1);
12279 STRIP_NOPS (ost);
12280
12281 if (TREE_CODE (ost) != INTEGER_CST
12282 || tree_int_cst_sgn (ost) < 0
12283 || compare_tree_int (ost, 3) > 0)
12284 {
12285 error ("%Klast argument of %D is not integer constant between 0 and 3",
12286 exp, fndecl);
12287 expand_builtin_trap ();
12288 return const0_rtx;
12289 }
12290
12291 object_size_type = tree_low_cst (ost, 0);
12292
12293 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12294 }
12295
12296 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12297 FCODE is the BUILT_IN_* to use.
12298 Return NULL_RTX if we failed; the caller should emit a normal call,
12299 otherwise try to get the result in TARGET, if convenient (and in
12300 mode MODE if that's convenient). */
12301
12302 static rtx
12303 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12304 enum built_in_function fcode)
12305 {
12306 tree dest, src, len, size;
12307
12308 if (!validate_arglist (exp,
12309 POINTER_TYPE,
12310 fcode == BUILT_IN_MEMSET_CHK
12311 ? INTEGER_TYPE : POINTER_TYPE,
12312 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12313 return NULL_RTX;
12314
12315 dest = CALL_EXPR_ARG (exp, 0);
12316 src = CALL_EXPR_ARG (exp, 1);
12317 len = CALL_EXPR_ARG (exp, 2);
12318 size = CALL_EXPR_ARG (exp, 3);
12319
12320 if (! host_integerp (size, 1))
12321 return NULL_RTX;
12322
12323 if (host_integerp (len, 1) || integer_all_onesp (size))
12324 {
12325 tree fn;
12326
12327 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12328 {
12329 warning_at (tree_nonartificial_location (exp),
12330 0, "%Kcall to %D will always overflow destination buffer",
12331 exp, get_callee_fndecl (exp));
12332 return NULL_RTX;
12333 }
12334
12335 fn = NULL_TREE;
12336 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12337 mem{cpy,pcpy,move,set} is available. */
12338 switch (fcode)
12339 {
12340 case BUILT_IN_MEMCPY_CHK:
12341 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12342 break;
12343 case BUILT_IN_MEMPCPY_CHK:
12344 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12345 break;
12346 case BUILT_IN_MEMMOVE_CHK:
12347 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12348 break;
12349 case BUILT_IN_MEMSET_CHK:
12350 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12351 break;
12352 default:
12353 break;
12354 }
12355
12356 if (! fn)
12357 return NULL_RTX;
12358
12359 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12360 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12361 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12362 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12363 }
12364 else if (fcode == BUILT_IN_MEMSET_CHK)
12365 return NULL_RTX;
12366 else
12367 {
12368 unsigned int dest_align = get_pointer_alignment (dest);
12369
12370 /* If DEST is not a pointer type, call the normal function. */
12371 if (dest_align == 0)
12372 return NULL_RTX;
12373
12374 /* If SRC and DEST are the same (and not volatile), do nothing. */
12375 if (operand_equal_p (src, dest, 0))
12376 {
12377 tree expr;
12378
12379 if (fcode != BUILT_IN_MEMPCPY_CHK)
12380 {
12381 /* Evaluate and ignore LEN in case it has side-effects. */
12382 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12383 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12384 }
12385
12386 expr = fold_build_pointer_plus (dest, len);
12387 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12388 }
12389
12390 /* __memmove_chk special case. */
12391 if (fcode == BUILT_IN_MEMMOVE_CHK)
12392 {
12393 unsigned int src_align = get_pointer_alignment (src);
12394
12395 if (src_align == 0)
12396 return NULL_RTX;
12397
12398 /* If src is categorized for a readonly section we can use
12399 normal __memcpy_chk. */
12400 if (readonly_data_expr (src))
12401 {
12402 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12403 if (!fn)
12404 return NULL_RTX;
12405 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12406 dest, src, len, size);
12407 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12408 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12409 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12410 }
12411 }
12412 return NULL_RTX;
12413 }
12414 }
12415
12416 /* Emit warning if a buffer overflow is detected at compile time. */
12417
12418 static void
12419 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12420 {
12421 int is_strlen = 0;
12422 tree len, size;
12423 location_t loc = tree_nonartificial_location (exp);
12424
12425 switch (fcode)
12426 {
12427 case BUILT_IN_STRCPY_CHK:
12428 case BUILT_IN_STPCPY_CHK:
12429 /* For __strcat_chk the warning will be emitted only if overflowing
12430 by at least strlen (dest) + 1 bytes. */
12431 case BUILT_IN_STRCAT_CHK:
12432 len = CALL_EXPR_ARG (exp, 1);
12433 size = CALL_EXPR_ARG (exp, 2);
12434 is_strlen = 1;
12435 break;
12436 case BUILT_IN_STRNCAT_CHK:
12437 case BUILT_IN_STRNCPY_CHK:
12438 case BUILT_IN_STPNCPY_CHK:
12439 len = CALL_EXPR_ARG (exp, 2);
12440 size = CALL_EXPR_ARG (exp, 3);
12441 break;
12442 case BUILT_IN_SNPRINTF_CHK:
12443 case BUILT_IN_VSNPRINTF_CHK:
12444 len = CALL_EXPR_ARG (exp, 1);
12445 size = CALL_EXPR_ARG (exp, 3);
12446 break;
12447 default:
12448 gcc_unreachable ();
12449 }
12450
12451 if (!len || !size)
12452 return;
12453
12454 if (! host_integerp (size, 1) || integer_all_onesp (size))
12455 return;
12456
12457 if (is_strlen)
12458 {
12459 len = c_strlen (len, 1);
12460 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12461 return;
12462 }
12463 else if (fcode == BUILT_IN_STRNCAT_CHK)
12464 {
12465 tree src = CALL_EXPR_ARG (exp, 1);
12466 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12467 return;
12468 src = c_strlen (src, 1);
12469 if (! src || ! host_integerp (src, 1))
12470 {
12471 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12472 exp, get_callee_fndecl (exp));
12473 return;
12474 }
12475 else if (tree_int_cst_lt (src, size))
12476 return;
12477 }
12478 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12479 return;
12480
12481 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12482 exp, get_callee_fndecl (exp));
12483 }
12484
12485 /* Emit warning if a buffer overflow is detected at compile time
12486 in __sprintf_chk/__vsprintf_chk calls. */
12487
12488 static void
12489 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12490 {
12491 tree size, len, fmt;
12492 const char *fmt_str;
12493 int nargs = call_expr_nargs (exp);
12494
12495 /* Verify the required arguments in the original call. */
12496
12497 if (nargs < 4)
12498 return;
12499 size = CALL_EXPR_ARG (exp, 2);
12500 fmt = CALL_EXPR_ARG (exp, 3);
12501
12502 if (! host_integerp (size, 1) || integer_all_onesp (size))
12503 return;
12504
12505 /* Check whether the format is a literal string constant. */
12506 fmt_str = c_getstr (fmt);
12507 if (fmt_str == NULL)
12508 return;
12509
12510 if (!init_target_chars ())
12511 return;
12512
12513 /* If the format doesn't contain % args or %%, we know its size. */
12514 if (strchr (fmt_str, target_percent) == 0)
12515 len = build_int_cstu (size_type_node, strlen (fmt_str));
12516 /* If the format is "%s" and first ... argument is a string literal,
12517 we know it too. */
12518 else if (fcode == BUILT_IN_SPRINTF_CHK
12519 && strcmp (fmt_str, target_percent_s) == 0)
12520 {
12521 tree arg;
12522
12523 if (nargs < 5)
12524 return;
12525 arg = CALL_EXPR_ARG (exp, 4);
12526 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12527 return;
12528
12529 len = c_strlen (arg, 1);
12530 if (!len || ! host_integerp (len, 1))
12531 return;
12532 }
12533 else
12534 return;
12535
12536 if (! tree_int_cst_lt (len, size))
12537 warning_at (tree_nonartificial_location (exp),
12538 0, "%Kcall to %D will always overflow destination buffer",
12539 exp, get_callee_fndecl (exp));
12540 }
12541
12542 /* Emit warning if a free is called with address of a variable. */
12543
12544 static void
12545 maybe_emit_free_warning (tree exp)
12546 {
12547 tree arg = CALL_EXPR_ARG (exp, 0);
12548
12549 STRIP_NOPS (arg);
12550 if (TREE_CODE (arg) != ADDR_EXPR)
12551 return;
12552
12553 arg = get_base_address (TREE_OPERAND (arg, 0));
12554 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12555 return;
12556
12557 if (SSA_VAR_P (arg))
12558 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12559 "%Kattempt to free a non-heap object %qD", exp, arg);
12560 else
12561 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12562 "%Kattempt to free a non-heap object", exp);
12563 }
12564
12565 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12566 if possible. */
12567
12568 tree
12569 fold_builtin_object_size (tree ptr, tree ost)
12570 {
12571 unsigned HOST_WIDE_INT bytes;
12572 int object_size_type;
12573
12574 if (!validate_arg (ptr, POINTER_TYPE)
12575 || !validate_arg (ost, INTEGER_TYPE))
12576 return NULL_TREE;
12577
12578 STRIP_NOPS (ost);
12579
12580 if (TREE_CODE (ost) != INTEGER_CST
12581 || tree_int_cst_sgn (ost) < 0
12582 || compare_tree_int (ost, 3) > 0)
12583 return NULL_TREE;
12584
12585 object_size_type = tree_low_cst (ost, 0);
12586
12587 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12588 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12589 and (size_t) 0 for types 2 and 3. */
12590 if (TREE_SIDE_EFFECTS (ptr))
12591 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12592
12593 if (TREE_CODE (ptr) == ADDR_EXPR)
12594 {
12595 bytes = compute_builtin_object_size (ptr, object_size_type);
12596 if (double_int_fits_to_tree_p (size_type_node,
12597 double_int::from_uhwi (bytes)))
12598 return build_int_cstu (size_type_node, bytes);
12599 }
12600 else if (TREE_CODE (ptr) == SSA_NAME)
12601 {
12602 /* If object size is not known yet, delay folding until
12603 later. Maybe subsequent passes will help determining
12604 it. */
12605 bytes = compute_builtin_object_size (ptr, object_size_type);
12606 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12607 && double_int_fits_to_tree_p (size_type_node,
12608 double_int::from_uhwi (bytes)))
12609 return build_int_cstu (size_type_node, bytes);
12610 }
12611
12612 return NULL_TREE;
12613 }
12614
12615 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12616 DEST, SRC, LEN, and SIZE are the arguments to the call.
12617 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12618 code of the builtin. If MAXLEN is not NULL, it is maximum length
12619 passed as third argument. */
12620
12621 tree
12622 fold_builtin_memory_chk (location_t loc, tree fndecl,
12623 tree dest, tree src, tree len, tree size,
12624 tree maxlen, bool ignore,
12625 enum built_in_function fcode)
12626 {
12627 tree fn;
12628
12629 if (!validate_arg (dest, POINTER_TYPE)
12630 || !validate_arg (src,
12631 (fcode == BUILT_IN_MEMSET_CHK
12632 ? INTEGER_TYPE : POINTER_TYPE))
12633 || !validate_arg (len, INTEGER_TYPE)
12634 || !validate_arg (size, INTEGER_TYPE))
12635 return NULL_TREE;
12636
12637 /* If SRC and DEST are the same (and not volatile), return DEST
12638 (resp. DEST+LEN for __mempcpy_chk). */
12639 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12640 {
12641 if (fcode != BUILT_IN_MEMPCPY_CHK)
12642 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12643 dest, len);
12644 else
12645 {
12646 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12647 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12648 }
12649 }
12650
12651 if (! host_integerp (size, 1))
12652 return NULL_TREE;
12653
12654 if (! integer_all_onesp (size))
12655 {
12656 if (! host_integerp (len, 1))
12657 {
12658 /* If LEN is not constant, try MAXLEN too.
12659 For MAXLEN only allow optimizing into non-_ocs function
12660 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12661 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12662 {
12663 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12664 {
12665 /* (void) __mempcpy_chk () can be optimized into
12666 (void) __memcpy_chk (). */
12667 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12668 if (!fn)
12669 return NULL_TREE;
12670
12671 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12672 }
12673 return NULL_TREE;
12674 }
12675 }
12676 else
12677 maxlen = len;
12678
12679 if (tree_int_cst_lt (size, maxlen))
12680 return NULL_TREE;
12681 }
12682
12683 fn = NULL_TREE;
12684 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12685 mem{cpy,pcpy,move,set} is available. */
12686 switch (fcode)
12687 {
12688 case BUILT_IN_MEMCPY_CHK:
12689 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12690 break;
12691 case BUILT_IN_MEMPCPY_CHK:
12692 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12693 break;
12694 case BUILT_IN_MEMMOVE_CHK:
12695 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12696 break;
12697 case BUILT_IN_MEMSET_CHK:
12698 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12699 break;
12700 default:
12701 break;
12702 }
12703
12704 if (!fn)
12705 return NULL_TREE;
12706
12707 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12708 }
12709
12710 /* Fold a call to the __st[rp]cpy_chk builtin.
12711 DEST, SRC, and SIZE are the arguments to the call.
12712 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12713 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12714 strings passed as second argument. */
12715
12716 tree
12717 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12718 tree src, tree size,
12719 tree maxlen, bool ignore,
12720 enum built_in_function fcode)
12721 {
12722 tree len, fn;
12723
12724 if (!validate_arg (dest, POINTER_TYPE)
12725 || !validate_arg (src, POINTER_TYPE)
12726 || !validate_arg (size, INTEGER_TYPE))
12727 return NULL_TREE;
12728
12729 /* If SRC and DEST are the same (and not volatile), return DEST. */
12730 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12731 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12732
12733 if (! host_integerp (size, 1))
12734 return NULL_TREE;
12735
12736 if (! integer_all_onesp (size))
12737 {
12738 len = c_strlen (src, 1);
12739 if (! len || ! host_integerp (len, 1))
12740 {
12741 /* If LEN is not constant, try MAXLEN too.
12742 For MAXLEN only allow optimizing into non-_ocs function
12743 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12744 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12745 {
12746 if (fcode == BUILT_IN_STPCPY_CHK)
12747 {
12748 if (! ignore)
12749 return NULL_TREE;
12750
12751 /* If return value of __stpcpy_chk is ignored,
12752 optimize into __strcpy_chk. */
12753 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
12754 if (!fn)
12755 return NULL_TREE;
12756
12757 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12758 }
12759
12760 if (! len || TREE_SIDE_EFFECTS (len))
12761 return NULL_TREE;
12762
12763 /* If c_strlen returned something, but not a constant,
12764 transform __strcpy_chk into __memcpy_chk. */
12765 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12766 if (!fn)
12767 return NULL_TREE;
12768
12769 len = fold_convert_loc (loc, size_type_node, len);
12770 len = size_binop_loc (loc, PLUS_EXPR, len,
12771 build_int_cst (size_type_node, 1));
12772 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12773 build_call_expr_loc (loc, fn, 4,
12774 dest, src, len, size));
12775 }
12776 }
12777 else
12778 maxlen = len;
12779
12780 if (! tree_int_cst_lt (maxlen, size))
12781 return NULL_TREE;
12782 }
12783
12784 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12785 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
12786 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
12787 if (!fn)
12788 return NULL_TREE;
12789
12790 return build_call_expr_loc (loc, fn, 2, dest, src);
12791 }
12792
12793 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
12794 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12795 length passed as third argument. IGNORE is true if return value can be
12796 ignored. FCODE is the BUILT_IN_* code of the builtin. */
12797
12798 tree
12799 fold_builtin_stxncpy_chk (location_t loc, tree dest, tree src,
12800 tree len, tree size, tree maxlen, bool ignore,
12801 enum built_in_function fcode)
12802 {
12803 tree fn;
12804
12805 if (!validate_arg (dest, POINTER_TYPE)
12806 || !validate_arg (src, POINTER_TYPE)
12807 || !validate_arg (len, INTEGER_TYPE)
12808 || !validate_arg (size, INTEGER_TYPE))
12809 return NULL_TREE;
12810
12811 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
12812 {
12813 /* If return value of __stpncpy_chk is ignored,
12814 optimize into __strncpy_chk. */
12815 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
12816 if (fn)
12817 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12818 }
12819
12820 if (! host_integerp (size, 1))
12821 return NULL_TREE;
12822
12823 if (! integer_all_onesp (size))
12824 {
12825 if (! host_integerp (len, 1))
12826 {
12827 /* If LEN is not constant, try MAXLEN too.
12828 For MAXLEN only allow optimizing into non-_ocs function
12829 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12830 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12831 return NULL_TREE;
12832 }
12833 else
12834 maxlen = len;
12835
12836 if (tree_int_cst_lt (size, maxlen))
12837 return NULL_TREE;
12838 }
12839
12840 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
12841 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
12842 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
12843 if (!fn)
12844 return NULL_TREE;
12845
12846 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12847 }
12848
12849 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12850 are the arguments to the call. */
12851
12852 static tree
12853 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12854 tree src, tree size)
12855 {
12856 tree fn;
12857 const char *p;
12858
12859 if (!validate_arg (dest, POINTER_TYPE)
12860 || !validate_arg (src, POINTER_TYPE)
12861 || !validate_arg (size, INTEGER_TYPE))
12862 return NULL_TREE;
12863
12864 p = c_getstr (src);
12865 /* If the SRC parameter is "", return DEST. */
12866 if (p && *p == '\0')
12867 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12868
12869 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12870 return NULL_TREE;
12871
12872 /* If __builtin_strcat_chk is used, assume strcat is available. */
12873 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
12874 if (!fn)
12875 return NULL_TREE;
12876
12877 return build_call_expr_loc (loc, fn, 2, dest, src);
12878 }
12879
12880 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12881 LEN, and SIZE. */
12882
12883 static tree
12884 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12885 tree dest, tree src, tree len, tree size)
12886 {
12887 tree fn;
12888 const char *p;
12889
12890 if (!validate_arg (dest, POINTER_TYPE)
12891 || !validate_arg (src, POINTER_TYPE)
12892 || !validate_arg (size, INTEGER_TYPE)
12893 || !validate_arg (size, INTEGER_TYPE))
12894 return NULL_TREE;
12895
12896 p = c_getstr (src);
12897 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12898 if (p && *p == '\0')
12899 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12900 else if (integer_zerop (len))
12901 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12902
12903 if (! host_integerp (size, 1))
12904 return NULL_TREE;
12905
12906 if (! integer_all_onesp (size))
12907 {
12908 tree src_len = c_strlen (src, 1);
12909 if (src_len
12910 && host_integerp (src_len, 1)
12911 && host_integerp (len, 1)
12912 && ! tree_int_cst_lt (len, src_len))
12913 {
12914 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12915 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
12916 if (!fn)
12917 return NULL_TREE;
12918
12919 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12920 }
12921 return NULL_TREE;
12922 }
12923
12924 /* If __builtin_strncat_chk is used, assume strncat is available. */
12925 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
12926 if (!fn)
12927 return NULL_TREE;
12928
12929 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12930 }
12931
12932 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
12933 Return NULL_TREE if a normal call should be emitted rather than
12934 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
12935 or BUILT_IN_VSPRINTF_CHK. */
12936
12937 static tree
12938 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
12939 enum built_in_function fcode)
12940 {
12941 tree dest, size, len, fn, fmt, flag;
12942 const char *fmt_str;
12943
12944 /* Verify the required arguments in the original call. */
12945 if (nargs < 4)
12946 return NULL_TREE;
12947 dest = args[0];
12948 if (!validate_arg (dest, POINTER_TYPE))
12949 return NULL_TREE;
12950 flag = args[1];
12951 if (!validate_arg (flag, INTEGER_TYPE))
12952 return NULL_TREE;
12953 size = args[2];
12954 if (!validate_arg (size, INTEGER_TYPE))
12955 return NULL_TREE;
12956 fmt = args[3];
12957 if (!validate_arg (fmt, POINTER_TYPE))
12958 return NULL_TREE;
12959
12960 if (! host_integerp (size, 1))
12961 return NULL_TREE;
12962
12963 len = NULL_TREE;
12964
12965 if (!init_target_chars ())
12966 return NULL_TREE;
12967
12968 /* Check whether the format is a literal string constant. */
12969 fmt_str = c_getstr (fmt);
12970 if (fmt_str != NULL)
12971 {
12972 /* If the format doesn't contain % args or %%, we know the size. */
12973 if (strchr (fmt_str, target_percent) == 0)
12974 {
12975 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12976 len = build_int_cstu (size_type_node, strlen (fmt_str));
12977 }
12978 /* If the format is "%s" and first ... argument is a string literal,
12979 we know the size too. */
12980 else if (fcode == BUILT_IN_SPRINTF_CHK
12981 && strcmp (fmt_str, target_percent_s) == 0)
12982 {
12983 tree arg;
12984
12985 if (nargs == 5)
12986 {
12987 arg = args[4];
12988 if (validate_arg (arg, POINTER_TYPE))
12989 {
12990 len = c_strlen (arg, 1);
12991 if (! len || ! host_integerp (len, 1))
12992 len = NULL_TREE;
12993 }
12994 }
12995 }
12996 }
12997
12998 if (! integer_all_onesp (size))
12999 {
13000 if (! len || ! tree_int_cst_lt (len, size))
13001 return NULL_TREE;
13002 }
13003
13004 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13005 or if format doesn't contain % chars or is "%s". */
13006 if (! integer_zerop (flag))
13007 {
13008 if (fmt_str == NULL)
13009 return NULL_TREE;
13010 if (strchr (fmt_str, target_percent) != NULL
13011 && strcmp (fmt_str, target_percent_s))
13012 return NULL_TREE;
13013 }
13014
13015 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13016 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
13017 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
13018 if (!fn)
13019 return NULL_TREE;
13020
13021 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
13022 }
13023
13024 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13025 a normal call should be emitted rather than expanding the function
13026 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13027
13028 static tree
13029 fold_builtin_sprintf_chk (location_t loc, tree exp,
13030 enum built_in_function fcode)
13031 {
13032 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
13033 CALL_EXPR_ARGP (exp), fcode);
13034 }
13035
13036 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13037 NULL_TREE if a normal call should be emitted rather than expanding
13038 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13039 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13040 passed as second argument. */
13041
13042 static tree
13043 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
13044 tree maxlen, enum built_in_function fcode)
13045 {
13046 tree dest, size, len, fn, fmt, flag;
13047 const char *fmt_str;
13048
13049 /* Verify the required arguments in the original call. */
13050 if (nargs < 5)
13051 return NULL_TREE;
13052 dest = args[0];
13053 if (!validate_arg (dest, POINTER_TYPE))
13054 return NULL_TREE;
13055 len = args[1];
13056 if (!validate_arg (len, INTEGER_TYPE))
13057 return NULL_TREE;
13058 flag = args[2];
13059 if (!validate_arg (flag, INTEGER_TYPE))
13060 return NULL_TREE;
13061 size = args[3];
13062 if (!validate_arg (size, INTEGER_TYPE))
13063 return NULL_TREE;
13064 fmt = args[4];
13065 if (!validate_arg (fmt, POINTER_TYPE))
13066 return NULL_TREE;
13067
13068 if (! host_integerp (size, 1))
13069 return NULL_TREE;
13070
13071 if (! integer_all_onesp (size))
13072 {
13073 if (! host_integerp (len, 1))
13074 {
13075 /* If LEN is not constant, try MAXLEN too.
13076 For MAXLEN only allow optimizing into non-_ocs function
13077 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13078 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13079 return NULL_TREE;
13080 }
13081 else
13082 maxlen = len;
13083
13084 if (tree_int_cst_lt (size, maxlen))
13085 return NULL_TREE;
13086 }
13087
13088 if (!init_target_chars ())
13089 return NULL_TREE;
13090
13091 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13092 or if format doesn't contain % chars or is "%s". */
13093 if (! integer_zerop (flag))
13094 {
13095 fmt_str = c_getstr (fmt);
13096 if (fmt_str == NULL)
13097 return NULL_TREE;
13098 if (strchr (fmt_str, target_percent) != NULL
13099 && strcmp (fmt_str, target_percent_s))
13100 return NULL_TREE;
13101 }
13102
13103 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13104 available. */
13105 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
13106 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
13107 if (!fn)
13108 return NULL_TREE;
13109
13110 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
13111 }
13112
13113 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13114 a normal call should be emitted rather than expanding the function
13115 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13116 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13117 passed as second argument. */
13118
13119 tree
13120 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
13121 enum built_in_function fcode)
13122 {
13123 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
13124 CALL_EXPR_ARGP (exp), maxlen, fcode);
13125 }
13126
13127 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13128 FMT and ARG are the arguments to the call; we don't fold cases with
13129 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13130
13131 Return NULL_TREE if no simplification was possible, otherwise return the
13132 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13133 code of the function to be simplified. */
13134
13135 static tree
13136 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13137 tree arg, bool ignore,
13138 enum built_in_function fcode)
13139 {
13140 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13141 const char *fmt_str = NULL;
13142
13143 /* If the return value is used, don't do the transformation. */
13144 if (! ignore)
13145 return NULL_TREE;
13146
13147 /* Verify the required arguments in the original call. */
13148 if (!validate_arg (fmt, POINTER_TYPE))
13149 return NULL_TREE;
13150
13151 /* Check whether the format is a literal string constant. */
13152 fmt_str = c_getstr (fmt);
13153 if (fmt_str == NULL)
13154 return NULL_TREE;
13155
13156 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13157 {
13158 /* If we're using an unlocked function, assume the other
13159 unlocked functions exist explicitly. */
13160 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
13161 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
13162 }
13163 else
13164 {
13165 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
13166 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
13167 }
13168
13169 if (!init_target_chars ())
13170 return NULL_TREE;
13171
13172 if (strcmp (fmt_str, target_percent_s) == 0
13173 || strchr (fmt_str, target_percent) == NULL)
13174 {
13175 const char *str;
13176
13177 if (strcmp (fmt_str, target_percent_s) == 0)
13178 {
13179 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13180 return NULL_TREE;
13181
13182 if (!arg || !validate_arg (arg, POINTER_TYPE))
13183 return NULL_TREE;
13184
13185 str = c_getstr (arg);
13186 if (str == NULL)
13187 return NULL_TREE;
13188 }
13189 else
13190 {
13191 /* The format specifier doesn't contain any '%' characters. */
13192 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13193 && arg)
13194 return NULL_TREE;
13195 str = fmt_str;
13196 }
13197
13198 /* If the string was "", printf does nothing. */
13199 if (str[0] == '\0')
13200 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13201
13202 /* If the string has length of 1, call putchar. */
13203 if (str[1] == '\0')
13204 {
13205 /* Given printf("c"), (where c is any one character,)
13206 convert "c"[0] to an int and pass that to the replacement
13207 function. */
13208 newarg = build_int_cst (integer_type_node, str[0]);
13209 if (fn_putchar)
13210 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13211 }
13212 else
13213 {
13214 /* If the string was "string\n", call puts("string"). */
13215 size_t len = strlen (str);
13216 if ((unsigned char)str[len - 1] == target_newline
13217 && (size_t) (int) len == len
13218 && (int) len > 0)
13219 {
13220 char *newstr;
13221 tree offset_node, string_cst;
13222
13223 /* Create a NUL-terminated string that's one char shorter
13224 than the original, stripping off the trailing '\n'. */
13225 newarg = build_string_literal (len, str);
13226 string_cst = string_constant (newarg, &offset_node);
13227 gcc_checking_assert (string_cst
13228 && (TREE_STRING_LENGTH (string_cst)
13229 == (int) len)
13230 && integer_zerop (offset_node)
13231 && (unsigned char)
13232 TREE_STRING_POINTER (string_cst)[len - 1]
13233 == target_newline);
13234 /* build_string_literal creates a new STRING_CST,
13235 modify it in place to avoid double copying. */
13236 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
13237 newstr[len - 1] = '\0';
13238 if (fn_puts)
13239 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13240 }
13241 else
13242 /* We'd like to arrange to call fputs(string,stdout) here,
13243 but we need stdout and don't have a way to get it yet. */
13244 return NULL_TREE;
13245 }
13246 }
13247
13248 /* The other optimizations can be done only on the non-va_list variants. */
13249 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13250 return NULL_TREE;
13251
13252 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13253 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13254 {
13255 if (!arg || !validate_arg (arg, POINTER_TYPE))
13256 return NULL_TREE;
13257 if (fn_puts)
13258 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13259 }
13260
13261 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13262 else if (strcmp (fmt_str, target_percent_c) == 0)
13263 {
13264 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13265 return NULL_TREE;
13266 if (fn_putchar)
13267 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13268 }
13269
13270 if (!call)
13271 return NULL_TREE;
13272
13273 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13274 }
13275
13276 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13277 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13278 more than 3 arguments, and ARG may be null in the 2-argument case.
13279
13280 Return NULL_TREE if no simplification was possible, otherwise return the
13281 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13282 code of the function to be simplified. */
13283
13284 static tree
13285 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13286 tree fmt, tree arg, bool ignore,
13287 enum built_in_function fcode)
13288 {
13289 tree fn_fputc, fn_fputs, call = NULL_TREE;
13290 const char *fmt_str = NULL;
13291
13292 /* If the return value is used, don't do the transformation. */
13293 if (! ignore)
13294 return NULL_TREE;
13295
13296 /* Verify the required arguments in the original call. */
13297 if (!validate_arg (fp, POINTER_TYPE))
13298 return NULL_TREE;
13299 if (!validate_arg (fmt, POINTER_TYPE))
13300 return NULL_TREE;
13301
13302 /* Check whether the format is a literal string constant. */
13303 fmt_str = c_getstr (fmt);
13304 if (fmt_str == NULL)
13305 return NULL_TREE;
13306
13307 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13308 {
13309 /* If we're using an unlocked function, assume the other
13310 unlocked functions exist explicitly. */
13311 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
13312 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
13313 }
13314 else
13315 {
13316 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
13317 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
13318 }
13319
13320 if (!init_target_chars ())
13321 return NULL_TREE;
13322
13323 /* If the format doesn't contain % args or %%, use strcpy. */
13324 if (strchr (fmt_str, target_percent) == NULL)
13325 {
13326 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13327 && arg)
13328 return NULL_TREE;
13329
13330 /* If the format specifier was "", fprintf does nothing. */
13331 if (fmt_str[0] == '\0')
13332 {
13333 /* If FP has side-effects, just wait until gimplification is
13334 done. */
13335 if (TREE_SIDE_EFFECTS (fp))
13336 return NULL_TREE;
13337
13338 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13339 }
13340
13341 /* When "string" doesn't contain %, replace all cases of
13342 fprintf (fp, string) with fputs (string, fp). The fputs
13343 builtin will take care of special cases like length == 1. */
13344 if (fn_fputs)
13345 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13346 }
13347
13348 /* The other optimizations can be done only on the non-va_list variants. */
13349 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13350 return NULL_TREE;
13351
13352 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13353 else if (strcmp (fmt_str, target_percent_s) == 0)
13354 {
13355 if (!arg || !validate_arg (arg, POINTER_TYPE))
13356 return NULL_TREE;
13357 if (fn_fputs)
13358 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13359 }
13360
13361 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13362 else if (strcmp (fmt_str, target_percent_c) == 0)
13363 {
13364 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13365 return NULL_TREE;
13366 if (fn_fputc)
13367 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13368 }
13369
13370 if (!call)
13371 return NULL_TREE;
13372 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13373 }
13374
13375 /* Initialize format string characters in the target charset. */
13376
13377 static bool
13378 init_target_chars (void)
13379 {
13380 static bool init;
13381 if (!init)
13382 {
13383 target_newline = lang_hooks.to_target_charset ('\n');
13384 target_percent = lang_hooks.to_target_charset ('%');
13385 target_c = lang_hooks.to_target_charset ('c');
13386 target_s = lang_hooks.to_target_charset ('s');
13387 if (target_newline == 0 || target_percent == 0 || target_c == 0
13388 || target_s == 0)
13389 return false;
13390
13391 target_percent_c[0] = target_percent;
13392 target_percent_c[1] = target_c;
13393 target_percent_c[2] = '\0';
13394
13395 target_percent_s[0] = target_percent;
13396 target_percent_s[1] = target_s;
13397 target_percent_s[2] = '\0';
13398
13399 target_percent_s_newline[0] = target_percent;
13400 target_percent_s_newline[1] = target_s;
13401 target_percent_s_newline[2] = target_newline;
13402 target_percent_s_newline[3] = '\0';
13403
13404 init = true;
13405 }
13406 return true;
13407 }
13408
13409 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13410 and no overflow/underflow occurred. INEXACT is true if M was not
13411 exactly calculated. TYPE is the tree type for the result. This
13412 function assumes that you cleared the MPFR flags and then
13413 calculated M to see if anything subsequently set a flag prior to
13414 entering this function. Return NULL_TREE if any checks fail. */
13415
13416 static tree
13417 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13418 {
13419 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13420 overflow/underflow occurred. If -frounding-math, proceed iff the
13421 result of calling FUNC was exact. */
13422 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13423 && (!flag_rounding_math || !inexact))
13424 {
13425 REAL_VALUE_TYPE rr;
13426
13427 real_from_mpfr (&rr, m, type, GMP_RNDN);
13428 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13429 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13430 but the mpft_t is not, then we underflowed in the
13431 conversion. */
13432 if (real_isfinite (&rr)
13433 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13434 {
13435 REAL_VALUE_TYPE rmode;
13436
13437 real_convert (&rmode, TYPE_MODE (type), &rr);
13438 /* Proceed iff the specified mode can hold the value. */
13439 if (real_identical (&rmode, &rr))
13440 return build_real (type, rmode);
13441 }
13442 }
13443 return NULL_TREE;
13444 }
13445
13446 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13447 number and no overflow/underflow occurred. INEXACT is true if M
13448 was not exactly calculated. TYPE is the tree type for the result.
13449 This function assumes that you cleared the MPFR flags and then
13450 calculated M to see if anything subsequently set a flag prior to
13451 entering this function. Return NULL_TREE if any checks fail, if
13452 FORCE_CONVERT is true, then bypass the checks. */
13453
13454 static tree
13455 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13456 {
13457 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13458 overflow/underflow occurred. If -frounding-math, proceed iff the
13459 result of calling FUNC was exact. */
13460 if (force_convert
13461 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13462 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13463 && (!flag_rounding_math || !inexact)))
13464 {
13465 REAL_VALUE_TYPE re, im;
13466
13467 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13468 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13469 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13470 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13471 but the mpft_t is not, then we underflowed in the
13472 conversion. */
13473 if (force_convert
13474 || (real_isfinite (&re) && real_isfinite (&im)
13475 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13476 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13477 {
13478 REAL_VALUE_TYPE re_mode, im_mode;
13479
13480 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13481 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13482 /* Proceed iff the specified mode can hold the value. */
13483 if (force_convert
13484 || (real_identical (&re_mode, &re)
13485 && real_identical (&im_mode, &im)))
13486 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13487 build_real (TREE_TYPE (type), im_mode));
13488 }
13489 }
13490 return NULL_TREE;
13491 }
13492
13493 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13494 FUNC on it and return the resulting value as a tree with type TYPE.
13495 If MIN and/or MAX are not NULL, then the supplied ARG must be
13496 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13497 acceptable values, otherwise they are not. The mpfr precision is
13498 set to the precision of TYPE. We assume that function FUNC returns
13499 zero if the result could be calculated exactly within the requested
13500 precision. */
13501
13502 static tree
13503 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13504 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13505 bool inclusive)
13506 {
13507 tree result = NULL_TREE;
13508
13509 STRIP_NOPS (arg);
13510
13511 /* To proceed, MPFR must exactly represent the target floating point
13512 format, which only happens when the target base equals two. */
13513 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13514 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13515 {
13516 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13517
13518 if (real_isfinite (ra)
13519 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13520 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13521 {
13522 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13523 const int prec = fmt->p;
13524 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13525 int inexact;
13526 mpfr_t m;
13527
13528 mpfr_init2 (m, prec);
13529 mpfr_from_real (m, ra, GMP_RNDN);
13530 mpfr_clear_flags ();
13531 inexact = func (m, m, rnd);
13532 result = do_mpfr_ckconv (m, type, inexact);
13533 mpfr_clear (m);
13534 }
13535 }
13536
13537 return result;
13538 }
13539
13540 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13541 FUNC on it and return the resulting value as a tree with type TYPE.
13542 The mpfr precision is set to the precision of TYPE. We assume that
13543 function FUNC returns zero if the result could be calculated
13544 exactly within the requested precision. */
13545
13546 static tree
13547 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13548 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13549 {
13550 tree result = NULL_TREE;
13551
13552 STRIP_NOPS (arg1);
13553 STRIP_NOPS (arg2);
13554
13555 /* To proceed, MPFR must exactly represent the target floating point
13556 format, which only happens when the target base equals two. */
13557 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13558 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13559 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13560 {
13561 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13562 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13563
13564 if (real_isfinite (ra1) && real_isfinite (ra2))
13565 {
13566 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13567 const int prec = fmt->p;
13568 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13569 int inexact;
13570 mpfr_t m1, m2;
13571
13572 mpfr_inits2 (prec, m1, m2, NULL);
13573 mpfr_from_real (m1, ra1, GMP_RNDN);
13574 mpfr_from_real (m2, ra2, GMP_RNDN);
13575 mpfr_clear_flags ();
13576 inexact = func (m1, m1, m2, rnd);
13577 result = do_mpfr_ckconv (m1, type, inexact);
13578 mpfr_clears (m1, m2, NULL);
13579 }
13580 }
13581
13582 return result;
13583 }
13584
13585 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13586 FUNC on it and return the resulting value as a tree with type TYPE.
13587 The mpfr precision is set to the precision of TYPE. We assume that
13588 function FUNC returns zero if the result could be calculated
13589 exactly within the requested precision. */
13590
13591 static tree
13592 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13593 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13594 {
13595 tree result = NULL_TREE;
13596
13597 STRIP_NOPS (arg1);
13598 STRIP_NOPS (arg2);
13599 STRIP_NOPS (arg3);
13600
13601 /* To proceed, MPFR must exactly represent the target floating point
13602 format, which only happens when the target base equals two. */
13603 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13604 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13605 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13606 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13607 {
13608 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13609 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13610 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13611
13612 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13613 {
13614 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13615 const int prec = fmt->p;
13616 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13617 int inexact;
13618 mpfr_t m1, m2, m3;
13619
13620 mpfr_inits2 (prec, m1, m2, m3, NULL);
13621 mpfr_from_real (m1, ra1, GMP_RNDN);
13622 mpfr_from_real (m2, ra2, GMP_RNDN);
13623 mpfr_from_real (m3, ra3, GMP_RNDN);
13624 mpfr_clear_flags ();
13625 inexact = func (m1, m1, m2, m3, rnd);
13626 result = do_mpfr_ckconv (m1, type, inexact);
13627 mpfr_clears (m1, m2, m3, NULL);
13628 }
13629 }
13630
13631 return result;
13632 }
13633
13634 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13635 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13636 If ARG_SINP and ARG_COSP are NULL then the result is returned
13637 as a complex value.
13638 The type is taken from the type of ARG and is used for setting the
13639 precision of the calculation and results. */
13640
13641 static tree
13642 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13643 {
13644 tree const type = TREE_TYPE (arg);
13645 tree result = NULL_TREE;
13646
13647 STRIP_NOPS (arg);
13648
13649 /* To proceed, MPFR must exactly represent the target floating point
13650 format, which only happens when the target base equals two. */
13651 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13652 && TREE_CODE (arg) == REAL_CST
13653 && !TREE_OVERFLOW (arg))
13654 {
13655 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13656
13657 if (real_isfinite (ra))
13658 {
13659 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13660 const int prec = fmt->p;
13661 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13662 tree result_s, result_c;
13663 int inexact;
13664 mpfr_t m, ms, mc;
13665
13666 mpfr_inits2 (prec, m, ms, mc, NULL);
13667 mpfr_from_real (m, ra, GMP_RNDN);
13668 mpfr_clear_flags ();
13669 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13670 result_s = do_mpfr_ckconv (ms, type, inexact);
13671 result_c = do_mpfr_ckconv (mc, type, inexact);
13672 mpfr_clears (m, ms, mc, NULL);
13673 if (result_s && result_c)
13674 {
13675 /* If we are to return in a complex value do so. */
13676 if (!arg_sinp && !arg_cosp)
13677 return build_complex (build_complex_type (type),
13678 result_c, result_s);
13679
13680 /* Dereference the sin/cos pointer arguments. */
13681 arg_sinp = build_fold_indirect_ref (arg_sinp);
13682 arg_cosp = build_fold_indirect_ref (arg_cosp);
13683 /* Proceed if valid pointer type were passed in. */
13684 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13685 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13686 {
13687 /* Set the values. */
13688 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13689 result_s);
13690 TREE_SIDE_EFFECTS (result_s) = 1;
13691 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13692 result_c);
13693 TREE_SIDE_EFFECTS (result_c) = 1;
13694 /* Combine the assignments into a compound expr. */
13695 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13696 result_s, result_c));
13697 }
13698 }
13699 }
13700 }
13701 return result;
13702 }
13703
13704 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13705 two-argument mpfr order N Bessel function FUNC on them and return
13706 the resulting value as a tree with type TYPE. The mpfr precision
13707 is set to the precision of TYPE. We assume that function FUNC
13708 returns zero if the result could be calculated exactly within the
13709 requested precision. */
13710 static tree
13711 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13712 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13713 const REAL_VALUE_TYPE *min, bool inclusive)
13714 {
13715 tree result = NULL_TREE;
13716
13717 STRIP_NOPS (arg1);
13718 STRIP_NOPS (arg2);
13719
13720 /* To proceed, MPFR must exactly represent the target floating point
13721 format, which only happens when the target base equals two. */
13722 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13723 && host_integerp (arg1, 0)
13724 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13725 {
13726 const HOST_WIDE_INT n = tree_low_cst (arg1, 0);
13727 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13728
13729 if (n == (long)n
13730 && real_isfinite (ra)
13731 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13732 {
13733 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13734 const int prec = fmt->p;
13735 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13736 int inexact;
13737 mpfr_t m;
13738
13739 mpfr_init2 (m, prec);
13740 mpfr_from_real (m, ra, GMP_RNDN);
13741 mpfr_clear_flags ();
13742 inexact = func (m, n, m, rnd);
13743 result = do_mpfr_ckconv (m, type, inexact);
13744 mpfr_clear (m);
13745 }
13746 }
13747
13748 return result;
13749 }
13750
13751 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13752 the pointer *(ARG_QUO) and return the result. The type is taken
13753 from the type of ARG0 and is used for setting the precision of the
13754 calculation and results. */
13755
13756 static tree
13757 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13758 {
13759 tree const type = TREE_TYPE (arg0);
13760 tree result = NULL_TREE;
13761
13762 STRIP_NOPS (arg0);
13763 STRIP_NOPS (arg1);
13764
13765 /* To proceed, MPFR must exactly represent the target floating point
13766 format, which only happens when the target base equals two. */
13767 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13768 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13769 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13770 {
13771 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13772 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13773
13774 if (real_isfinite (ra0) && real_isfinite (ra1))
13775 {
13776 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13777 const int prec = fmt->p;
13778 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13779 tree result_rem;
13780 long integer_quo;
13781 mpfr_t m0, m1;
13782
13783 mpfr_inits2 (prec, m0, m1, NULL);
13784 mpfr_from_real (m0, ra0, GMP_RNDN);
13785 mpfr_from_real (m1, ra1, GMP_RNDN);
13786 mpfr_clear_flags ();
13787 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13788 /* Remquo is independent of the rounding mode, so pass
13789 inexact=0 to do_mpfr_ckconv(). */
13790 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13791 mpfr_clears (m0, m1, NULL);
13792 if (result_rem)
13793 {
13794 /* MPFR calculates quo in the host's long so it may
13795 return more bits in quo than the target int can hold
13796 if sizeof(host long) > sizeof(target int). This can
13797 happen even for native compilers in LP64 mode. In
13798 these cases, modulo the quo value with the largest
13799 number that the target int can hold while leaving one
13800 bit for the sign. */
13801 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13802 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13803
13804 /* Dereference the quo pointer argument. */
13805 arg_quo = build_fold_indirect_ref (arg_quo);
13806 /* Proceed iff a valid pointer type was passed in. */
13807 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13808 {
13809 /* Set the value. */
13810 tree result_quo
13811 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13812 build_int_cst (TREE_TYPE (arg_quo),
13813 integer_quo));
13814 TREE_SIDE_EFFECTS (result_quo) = 1;
13815 /* Combine the quo assignment with the rem. */
13816 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13817 result_quo, result_rem));
13818 }
13819 }
13820 }
13821 }
13822 return result;
13823 }
13824
13825 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13826 resulting value as a tree with type TYPE. The mpfr precision is
13827 set to the precision of TYPE. We assume that this mpfr function
13828 returns zero if the result could be calculated exactly within the
13829 requested precision. In addition, the integer pointer represented
13830 by ARG_SG will be dereferenced and set to the appropriate signgam
13831 (-1,1) value. */
13832
13833 static tree
13834 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13835 {
13836 tree result = NULL_TREE;
13837
13838 STRIP_NOPS (arg);
13839
13840 /* To proceed, MPFR must exactly represent the target floating point
13841 format, which only happens when the target base equals two. Also
13842 verify ARG is a constant and that ARG_SG is an int pointer. */
13843 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13844 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13845 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13846 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13847 {
13848 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13849
13850 /* In addition to NaN and Inf, the argument cannot be zero or a
13851 negative integer. */
13852 if (real_isfinite (ra)
13853 && ra->cl != rvc_zero
13854 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
13855 {
13856 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13857 const int prec = fmt->p;
13858 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13859 int inexact, sg;
13860 mpfr_t m;
13861 tree result_lg;
13862
13863 mpfr_init2 (m, prec);
13864 mpfr_from_real (m, ra, GMP_RNDN);
13865 mpfr_clear_flags ();
13866 inexact = mpfr_lgamma (m, &sg, m, rnd);
13867 result_lg = do_mpfr_ckconv (m, type, inexact);
13868 mpfr_clear (m);
13869 if (result_lg)
13870 {
13871 tree result_sg;
13872
13873 /* Dereference the arg_sg pointer argument. */
13874 arg_sg = build_fold_indirect_ref (arg_sg);
13875 /* Assign the signgam value into *arg_sg. */
13876 result_sg = fold_build2 (MODIFY_EXPR,
13877 TREE_TYPE (arg_sg), arg_sg,
13878 build_int_cst (TREE_TYPE (arg_sg), sg));
13879 TREE_SIDE_EFFECTS (result_sg) = 1;
13880 /* Combine the signgam assignment with the lgamma result. */
13881 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13882 result_sg, result_lg));
13883 }
13884 }
13885 }
13886
13887 return result;
13888 }
13889
13890 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13891 function FUNC on it and return the resulting value as a tree with
13892 type TYPE. The mpfr precision is set to the precision of TYPE. We
13893 assume that function FUNC returns zero if the result could be
13894 calculated exactly within the requested precision. */
13895
13896 static tree
13897 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13898 {
13899 tree result = NULL_TREE;
13900
13901 STRIP_NOPS (arg);
13902
13903 /* To proceed, MPFR must exactly represent the target floating point
13904 format, which only happens when the target base equals two. */
13905 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13906 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13907 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13908 {
13909 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13910 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13911
13912 if (real_isfinite (re) && real_isfinite (im))
13913 {
13914 const struct real_format *const fmt =
13915 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13916 const int prec = fmt->p;
13917 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13918 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13919 int inexact;
13920 mpc_t m;
13921
13922 mpc_init2 (m, prec);
13923 mpfr_from_real (mpc_realref (m), re, rnd);
13924 mpfr_from_real (mpc_imagref (m), im, rnd);
13925 mpfr_clear_flags ();
13926 inexact = func (m, m, crnd);
13927 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13928 mpc_clear (m);
13929 }
13930 }
13931
13932 return result;
13933 }
13934
13935 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13936 mpc function FUNC on it and return the resulting value as a tree
13937 with type TYPE. The mpfr precision is set to the precision of
13938 TYPE. We assume that function FUNC returns zero if the result
13939 could be calculated exactly within the requested precision. If
13940 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13941 in the arguments and/or results. */
13942
13943 tree
13944 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13945 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13946 {
13947 tree result = NULL_TREE;
13948
13949 STRIP_NOPS (arg0);
13950 STRIP_NOPS (arg1);
13951
13952 /* To proceed, MPFR must exactly represent the target floating point
13953 format, which only happens when the target base equals two. */
13954 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13955 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13956 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13957 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13958 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13959 {
13960 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13961 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13962 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13963 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13964
13965 if (do_nonfinite
13966 || (real_isfinite (re0) && real_isfinite (im0)
13967 && real_isfinite (re1) && real_isfinite (im1)))
13968 {
13969 const struct real_format *const fmt =
13970 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13971 const int prec = fmt->p;
13972 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13973 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13974 int inexact;
13975 mpc_t m0, m1;
13976
13977 mpc_init2 (m0, prec);
13978 mpc_init2 (m1, prec);
13979 mpfr_from_real (mpc_realref (m0), re0, rnd);
13980 mpfr_from_real (mpc_imagref (m0), im0, rnd);
13981 mpfr_from_real (mpc_realref (m1), re1, rnd);
13982 mpfr_from_real (mpc_imagref (m1), im1, rnd);
13983 mpfr_clear_flags ();
13984 inexact = func (m0, m0, m1, crnd);
13985 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13986 mpc_clear (m0);
13987 mpc_clear (m1);
13988 }
13989 }
13990
13991 return result;
13992 }
13993
13994 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13995 a normal call should be emitted rather than expanding the function
13996 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13997
13998 static tree
13999 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
14000 {
14001 int nargs = gimple_call_num_args (stmt);
14002
14003 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
14004 (nargs > 0
14005 ? gimple_call_arg_ptr (stmt, 0)
14006 : &error_mark_node), fcode);
14007 }
14008
14009 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14010 a normal call should be emitted rather than expanding the function
14011 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14012 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14013 passed as second argument. */
14014
14015 tree
14016 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14017 enum built_in_function fcode)
14018 {
14019 int nargs = gimple_call_num_args (stmt);
14020
14021 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
14022 (nargs > 0
14023 ? gimple_call_arg_ptr (stmt, 0)
14024 : &error_mark_node), maxlen, fcode);
14025 }
14026
14027 /* Builtins with folding operations that operate on "..." arguments
14028 need special handling; we need to store the arguments in a convenient
14029 data structure before attempting any folding. Fortunately there are
14030 only a few builtins that fall into this category. FNDECL is the
14031 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14032 result of the function call is ignored. */
14033
14034 static tree
14035 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14036 bool ignore ATTRIBUTE_UNUSED)
14037 {
14038 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14039 tree ret = NULL_TREE;
14040
14041 switch (fcode)
14042 {
14043 case BUILT_IN_SPRINTF_CHK:
14044 case BUILT_IN_VSPRINTF_CHK:
14045 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14046 break;
14047
14048 case BUILT_IN_SNPRINTF_CHK:
14049 case BUILT_IN_VSNPRINTF_CHK:
14050 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14051
14052 default:
14053 break;
14054 }
14055 if (ret)
14056 {
14057 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14058 TREE_NO_WARNING (ret) = 1;
14059 return ret;
14060 }
14061 return NULL_TREE;
14062 }
14063
14064 /* A wrapper function for builtin folding that prevents warnings for
14065 "statement without effect" and the like, caused by removing the
14066 call node earlier than the warning is generated. */
14067
14068 tree
14069 fold_call_stmt (gimple stmt, bool ignore)
14070 {
14071 tree ret = NULL_TREE;
14072 tree fndecl = gimple_call_fndecl (stmt);
14073 location_t loc = gimple_location (stmt);
14074 if (fndecl
14075 && TREE_CODE (fndecl) == FUNCTION_DECL
14076 && DECL_BUILT_IN (fndecl)
14077 && !gimple_call_va_arg_pack_p (stmt))
14078 {
14079 int nargs = gimple_call_num_args (stmt);
14080 tree *args = (nargs > 0
14081 ? gimple_call_arg_ptr (stmt, 0)
14082 : &error_mark_node);
14083
14084 if (avoid_folding_inline_builtin (fndecl))
14085 return NULL_TREE;
14086 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14087 {
14088 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14089 }
14090 else
14091 {
14092 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14093 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14094 if (!ret)
14095 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14096 if (ret)
14097 {
14098 /* Propagate location information from original call to
14099 expansion of builtin. Otherwise things like
14100 maybe_emit_chk_warning, that operate on the expansion
14101 of a builtin, will use the wrong location information. */
14102 if (gimple_has_location (stmt))
14103 {
14104 tree realret = ret;
14105 if (TREE_CODE (ret) == NOP_EXPR)
14106 realret = TREE_OPERAND (ret, 0);
14107 if (CAN_HAVE_LOCATION_P (realret)
14108 && !EXPR_HAS_LOCATION (realret))
14109 SET_EXPR_LOCATION (realret, loc);
14110 return realret;
14111 }
14112 return ret;
14113 }
14114 }
14115 }
14116 return NULL_TREE;
14117 }
14118
14119 /* Look up the function in builtin_decl that corresponds to DECL
14120 and set ASMSPEC as its user assembler name. DECL must be a
14121 function decl that declares a builtin. */
14122
14123 void
14124 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14125 {
14126 tree builtin;
14127 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
14128 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
14129 && asmspec != 0);
14130
14131 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14132 set_user_assembler_name (builtin, asmspec);
14133 switch (DECL_FUNCTION_CODE (decl))
14134 {
14135 case BUILT_IN_MEMCPY:
14136 init_block_move_fn (asmspec);
14137 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
14138 break;
14139 case BUILT_IN_MEMSET:
14140 init_block_clear_fn (asmspec);
14141 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
14142 break;
14143 case BUILT_IN_MEMMOVE:
14144 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
14145 break;
14146 case BUILT_IN_MEMCMP:
14147 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
14148 break;
14149 case BUILT_IN_ABORT:
14150 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
14151 break;
14152 case BUILT_IN_FFS:
14153 if (INT_TYPE_SIZE < BITS_PER_WORD)
14154 {
14155 set_user_assembler_libfunc ("ffs", asmspec);
14156 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
14157 MODE_INT, 0), "ffs");
14158 }
14159 break;
14160 default:
14161 break;
14162 }
14163 }
14164
14165 /* Return true if DECL is a builtin that expands to a constant or similarly
14166 simple code. */
14167 bool
14168 is_simple_builtin (tree decl)
14169 {
14170 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14171 switch (DECL_FUNCTION_CODE (decl))
14172 {
14173 /* Builtins that expand to constants. */
14174 case BUILT_IN_CONSTANT_P:
14175 case BUILT_IN_EXPECT:
14176 case BUILT_IN_OBJECT_SIZE:
14177 case BUILT_IN_UNREACHABLE:
14178 /* Simple register moves or loads from stack. */
14179 case BUILT_IN_ASSUME_ALIGNED:
14180 case BUILT_IN_RETURN_ADDRESS:
14181 case BUILT_IN_EXTRACT_RETURN_ADDR:
14182 case BUILT_IN_FROB_RETURN_ADDR:
14183 case BUILT_IN_RETURN:
14184 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14185 case BUILT_IN_FRAME_ADDRESS:
14186 case BUILT_IN_VA_END:
14187 case BUILT_IN_STACK_SAVE:
14188 case BUILT_IN_STACK_RESTORE:
14189 /* Exception state returns or moves registers around. */
14190 case BUILT_IN_EH_FILTER:
14191 case BUILT_IN_EH_POINTER:
14192 case BUILT_IN_EH_COPY_VALUES:
14193 return true;
14194
14195 default:
14196 return false;
14197 }
14198
14199 return false;
14200 }
14201
14202 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14203 most probably expanded inline into reasonably simple code. This is a
14204 superset of is_simple_builtin. */
14205 bool
14206 is_inexpensive_builtin (tree decl)
14207 {
14208 if (!decl)
14209 return false;
14210 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14211 return true;
14212 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14213 switch (DECL_FUNCTION_CODE (decl))
14214 {
14215 case BUILT_IN_ABS:
14216 case BUILT_IN_ALLOCA:
14217 case BUILT_IN_ALLOCA_WITH_ALIGN:
14218 case BUILT_IN_BSWAP16:
14219 case BUILT_IN_BSWAP32:
14220 case BUILT_IN_BSWAP64:
14221 case BUILT_IN_CLZ:
14222 case BUILT_IN_CLZIMAX:
14223 case BUILT_IN_CLZL:
14224 case BUILT_IN_CLZLL:
14225 case BUILT_IN_CTZ:
14226 case BUILT_IN_CTZIMAX:
14227 case BUILT_IN_CTZL:
14228 case BUILT_IN_CTZLL:
14229 case BUILT_IN_FFS:
14230 case BUILT_IN_FFSIMAX:
14231 case BUILT_IN_FFSL:
14232 case BUILT_IN_FFSLL:
14233 case BUILT_IN_IMAXABS:
14234 case BUILT_IN_FINITE:
14235 case BUILT_IN_FINITEF:
14236 case BUILT_IN_FINITEL:
14237 case BUILT_IN_FINITED32:
14238 case BUILT_IN_FINITED64:
14239 case BUILT_IN_FINITED128:
14240 case BUILT_IN_FPCLASSIFY:
14241 case BUILT_IN_ISFINITE:
14242 case BUILT_IN_ISINF_SIGN:
14243 case BUILT_IN_ISINF:
14244 case BUILT_IN_ISINFF:
14245 case BUILT_IN_ISINFL:
14246 case BUILT_IN_ISINFD32:
14247 case BUILT_IN_ISINFD64:
14248 case BUILT_IN_ISINFD128:
14249 case BUILT_IN_ISNAN:
14250 case BUILT_IN_ISNANF:
14251 case BUILT_IN_ISNANL:
14252 case BUILT_IN_ISNAND32:
14253 case BUILT_IN_ISNAND64:
14254 case BUILT_IN_ISNAND128:
14255 case BUILT_IN_ISNORMAL:
14256 case BUILT_IN_ISGREATER:
14257 case BUILT_IN_ISGREATEREQUAL:
14258 case BUILT_IN_ISLESS:
14259 case BUILT_IN_ISLESSEQUAL:
14260 case BUILT_IN_ISLESSGREATER:
14261 case BUILT_IN_ISUNORDERED:
14262 case BUILT_IN_VA_ARG_PACK:
14263 case BUILT_IN_VA_ARG_PACK_LEN:
14264 case BUILT_IN_VA_COPY:
14265 case BUILT_IN_TRAP:
14266 case BUILT_IN_SAVEREGS:
14267 case BUILT_IN_POPCOUNTL:
14268 case BUILT_IN_POPCOUNTLL:
14269 case BUILT_IN_POPCOUNTIMAX:
14270 case BUILT_IN_POPCOUNT:
14271 case BUILT_IN_PARITYL:
14272 case BUILT_IN_PARITYLL:
14273 case BUILT_IN_PARITYIMAX:
14274 case BUILT_IN_PARITY:
14275 case BUILT_IN_LABS:
14276 case BUILT_IN_LLABS:
14277 case BUILT_IN_PREFETCH:
14278 return true;
14279
14280 default:
14281 return is_simple_builtin (decl);
14282 }
14283
14284 return false;
14285 }