tree-ssa.h: Remove all #include's
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "realmpfr.h"
28 #include "gimple.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "expr.h"
36 #include "optabs.h"
37 #include "libfuncs.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "predict.h"
42 #include "tm_p.h"
43 #include "target.h"
44 #include "langhooks.h"
45 #include "basic-block.h"
46 #include "tree-ssanames.h"
47 #include "tree-dfa.h"
48 #include "value-prof.h"
49 #include "diagnostic-core.h"
50 #include "builtins.h"
51 #include "ubsan.h"
52
53
54 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
55
56 struct target_builtins default_target_builtins;
57 #if SWITCHABLE_TARGET
58 struct target_builtins *this_target_builtins = &default_target_builtins;
59 #endif
60
61 /* Define the names of the builtin function types and codes. */
62 const char *const built_in_class_names[BUILT_IN_LAST]
63 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
64
65 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
66 const char * built_in_names[(int) END_BUILTINS] =
67 {
68 #include "builtins.def"
69 };
70 #undef DEF_BUILTIN
71
72 /* Setup an array of _DECL trees, make sure each element is
73 initialized to NULL_TREE. */
74 builtin_info_type builtin_info;
75
76 /* Non-zero if __builtin_constant_p should be folded right away. */
77 bool force_folding_builtin_constant_p;
78
79 static const char *c_getstr (tree);
80 static rtx c_readstr (const char *, enum machine_mode);
81 static int target_char_cast (tree, char *);
82 static rtx get_memory_rtx (tree, tree);
83 static int apply_args_size (void);
84 static int apply_result_size (void);
85 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
86 static rtx result_vector (int, rtx);
87 #endif
88 static void expand_builtin_update_setjmp_buf (rtx);
89 static void expand_builtin_prefetch (tree);
90 static rtx expand_builtin_apply_args (void);
91 static rtx expand_builtin_apply_args_1 (void);
92 static rtx expand_builtin_apply (rtx, rtx, rtx);
93 static void expand_builtin_return (rtx);
94 static enum type_class type_to_class (tree);
95 static rtx expand_builtin_classify_type (tree);
96 static void expand_errno_check (tree, rtx);
97 static rtx expand_builtin_mathfn (tree, rtx, rtx);
98 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
99 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
100 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
101 static rtx expand_builtin_interclass_mathfn (tree, rtx);
102 static rtx expand_builtin_sincos (tree);
103 static rtx expand_builtin_cexpi (tree, rtx);
104 static rtx expand_builtin_int_roundingfn (tree, rtx);
105 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
106 static rtx expand_builtin_next_arg (void);
107 static rtx expand_builtin_va_start (tree);
108 static rtx expand_builtin_va_end (tree);
109 static rtx expand_builtin_va_copy (tree);
110 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strcmp (tree, rtx);
112 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
113 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
114 static rtx expand_builtin_memcpy (tree, rtx);
115 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
117 enum machine_mode, int);
118 static rtx expand_builtin_strcpy (tree, rtx);
119 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
120 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_strncpy (tree, rtx);
122 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
123 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
125 static rtx expand_builtin_bzero (tree);
126 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
127 static rtx expand_builtin_alloca (tree, bool);
128 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
129 static rtx expand_builtin_frame_address (tree, tree);
130 static tree stabilize_va_list_loc (location_t, tree, int);
131 static rtx expand_builtin_expect (tree, rtx);
132 static tree fold_builtin_constant_p (tree);
133 static tree fold_builtin_expect (location_t, tree, tree);
134 static tree fold_builtin_classify_type (tree);
135 static tree fold_builtin_strlen (location_t, tree, tree);
136 static tree fold_builtin_inf (location_t, tree, int);
137 static tree fold_builtin_nan (tree, tree, int);
138 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
139 static bool validate_arg (const_tree, enum tree_code code);
140 static bool integer_valued_real_p (tree);
141 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
142 static bool readonly_data_expr (tree);
143 static rtx expand_builtin_fabs (tree, rtx, rtx);
144 static rtx expand_builtin_signbit (tree, rtx);
145 static tree fold_builtin_sqrt (location_t, tree, tree);
146 static tree fold_builtin_cbrt (location_t, tree, tree);
147 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
148 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
149 static tree fold_builtin_cos (location_t, tree, tree, tree);
150 static tree fold_builtin_cosh (location_t, tree, tree, tree);
151 static tree fold_builtin_tan (tree, tree);
152 static tree fold_builtin_trunc (location_t, tree, tree);
153 static tree fold_builtin_floor (location_t, tree, tree);
154 static tree fold_builtin_ceil (location_t, tree, tree);
155 static tree fold_builtin_round (location_t, tree, tree);
156 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
157 static tree fold_builtin_bitop (tree, tree);
158 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
159 static tree fold_builtin_strchr (location_t, tree, tree, tree);
160 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
161 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
162 static tree fold_builtin_strcmp (location_t, tree, tree);
163 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
164 static tree fold_builtin_signbit (location_t, tree, tree);
165 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
166 static tree fold_builtin_isascii (location_t, tree);
167 static tree fold_builtin_toascii (location_t, tree);
168 static tree fold_builtin_isdigit (location_t, tree);
169 static tree fold_builtin_fabs (location_t, tree, tree);
170 static tree fold_builtin_abs (location_t, tree, tree);
171 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
172 enum tree_code);
173 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
174 static tree fold_builtin_0 (location_t, tree, bool);
175 static tree fold_builtin_1 (location_t, tree, tree, bool);
176 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
177 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
178 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
179 static tree fold_builtin_varargs (location_t, tree, tree, bool);
180
181 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
182 static tree fold_builtin_strstr (location_t, tree, tree, tree);
183 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
184 static tree fold_builtin_strcat (location_t, tree, tree);
185 static tree fold_builtin_strncat (location_t, tree, tree, tree);
186 static tree fold_builtin_strspn (location_t, tree, tree);
187 static tree fold_builtin_strcspn (location_t, tree, tree);
188 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
189 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
190
191 static rtx expand_builtin_object_size (tree);
192 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
193 enum built_in_function);
194 static void maybe_emit_chk_warning (tree, enum built_in_function);
195 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
196 static void maybe_emit_free_warning (tree);
197 static tree fold_builtin_object_size (tree, tree);
198 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
199 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
200 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
201 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
202 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
203 enum built_in_function);
204 static bool init_target_chars (void);
205
206 static unsigned HOST_WIDE_INT target_newline;
207 static unsigned HOST_WIDE_INT target_percent;
208 static unsigned HOST_WIDE_INT target_c;
209 static unsigned HOST_WIDE_INT target_s;
210 static char target_percent_c[3];
211 static char target_percent_s[3];
212 static char target_percent_s_newline[4];
213 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
214 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
215 static tree do_mpfr_arg2 (tree, tree, tree,
216 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
217 static tree do_mpfr_arg3 (tree, tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219 static tree do_mpfr_sincos (tree, tree, tree);
220 static tree do_mpfr_bessel_n (tree, tree, tree,
221 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
222 const REAL_VALUE_TYPE *, bool);
223 static tree do_mpfr_remquo (tree, tree, tree);
224 static tree do_mpfr_lgamma_r (tree, tree, tree);
225 static void expand_builtin_sync_synchronize (void);
226
227 /* Return true if NAME starts with __builtin_ or __sync_. */
228
229 static bool
230 is_builtin_name (const char *name)
231 {
232 if (strncmp (name, "__builtin_", 10) == 0)
233 return true;
234 if (strncmp (name, "__sync_", 7) == 0)
235 return true;
236 if (strncmp (name, "__atomic_", 9) == 0)
237 return true;
238 return false;
239 }
240
241
242 /* Return true if DECL is a function symbol representing a built-in. */
243
244 bool
245 is_builtin_fn (tree decl)
246 {
247 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
248 }
249
250 /* By default we assume that c99 functions are present at the runtime,
251 but sincos is not. */
252 bool
253 default_libc_has_function (enum function_class fn_class)
254 {
255 if (fn_class == function_c94
256 || fn_class == function_c99_misc
257 || fn_class == function_c99_math_complex)
258 return true;
259
260 return false;
261 }
262
263 bool
264 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
265 {
266 return true;
267 }
268
269 bool
270 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
271 {
272 return false;
273 }
274
275 /* Return true if NODE should be considered for inline expansion regardless
276 of the optimization level. This means whenever a function is invoked with
277 its "internal" name, which normally contains the prefix "__builtin". */
278
279 static bool
280 called_as_built_in (tree node)
281 {
282 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
283 we want the name used to call the function, not the name it
284 will have. */
285 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
286 return is_builtin_name (name);
287 }
288
289 /* Compute values M and N such that M divides (address of EXP - N) and such
290 that N < M. If these numbers can be determined, store M in alignp and N in
291 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
292 *alignp and any bit-offset to *bitposp.
293
294 Note that the address (and thus the alignment) computed here is based
295 on the address to which a symbol resolves, whereas DECL_ALIGN is based
296 on the address at which an object is actually located. These two
297 addresses are not always the same. For example, on ARM targets,
298 the address &foo of a Thumb function foo() has the lowest bit set,
299 whereas foo() itself starts on an even address.
300
301 If ADDR_P is true we are taking the address of the memory reference EXP
302 and thus cannot rely on the access taking place. */
303
304 static bool
305 get_object_alignment_2 (tree exp, unsigned int *alignp,
306 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
307 {
308 HOST_WIDE_INT bitsize, bitpos;
309 tree offset;
310 enum machine_mode mode;
311 int unsignedp, volatilep;
312 unsigned int inner, align = BITS_PER_UNIT;
313 bool known_alignment = false;
314
315 /* Get the innermost object and the constant (bitpos) and possibly
316 variable (offset) offset of the access. */
317 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
318 &mode, &unsignedp, &volatilep, true);
319
320 /* Extract alignment information from the innermost object and
321 possibly adjust bitpos and offset. */
322 if (TREE_CODE (exp) == FUNCTION_DECL)
323 {
324 /* Function addresses can encode extra information besides their
325 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
326 allows the low bit to be used as a virtual bit, we know
327 that the address itself must be at least 2-byte aligned. */
328 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
329 align = 2 * BITS_PER_UNIT;
330 }
331 else if (TREE_CODE (exp) == LABEL_DECL)
332 ;
333 else if (TREE_CODE (exp) == CONST_DECL)
334 {
335 /* The alignment of a CONST_DECL is determined by its initializer. */
336 exp = DECL_INITIAL (exp);
337 align = TYPE_ALIGN (TREE_TYPE (exp));
338 #ifdef CONSTANT_ALIGNMENT
339 if (CONSTANT_CLASS_P (exp))
340 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
341 #endif
342 known_alignment = true;
343 }
344 else if (DECL_P (exp))
345 {
346 align = DECL_ALIGN (exp);
347 known_alignment = true;
348 }
349 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
350 {
351 align = TYPE_ALIGN (TREE_TYPE (exp));
352 }
353 else if (TREE_CODE (exp) == INDIRECT_REF
354 || TREE_CODE (exp) == MEM_REF
355 || TREE_CODE (exp) == TARGET_MEM_REF)
356 {
357 tree addr = TREE_OPERAND (exp, 0);
358 unsigned ptr_align;
359 unsigned HOST_WIDE_INT ptr_bitpos;
360
361 if (TREE_CODE (addr) == BIT_AND_EXPR
362 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
363 {
364 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
365 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
366 align *= BITS_PER_UNIT;
367 addr = TREE_OPERAND (addr, 0);
368 }
369
370 known_alignment
371 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
372 align = MAX (ptr_align, align);
373
374 /* The alignment of the pointer operand in a TARGET_MEM_REF
375 has to take the variable offset parts into account. */
376 if (TREE_CODE (exp) == TARGET_MEM_REF)
377 {
378 if (TMR_INDEX (exp))
379 {
380 unsigned HOST_WIDE_INT step = 1;
381 if (TMR_STEP (exp))
382 step = TREE_INT_CST_LOW (TMR_STEP (exp));
383 align = MIN (align, (step & -step) * BITS_PER_UNIT);
384 }
385 if (TMR_INDEX2 (exp))
386 align = BITS_PER_UNIT;
387 known_alignment = false;
388 }
389
390 /* When EXP is an actual memory reference then we can use
391 TYPE_ALIGN of a pointer indirection to derive alignment.
392 Do so only if get_pointer_alignment_1 did not reveal absolute
393 alignment knowledge and if using that alignment would
394 improve the situation. */
395 if (!addr_p && !known_alignment
396 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
397 align = TYPE_ALIGN (TREE_TYPE (exp));
398 else
399 {
400 /* Else adjust bitpos accordingly. */
401 bitpos += ptr_bitpos;
402 if (TREE_CODE (exp) == MEM_REF
403 || TREE_CODE (exp) == TARGET_MEM_REF)
404 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
405 }
406 }
407 else if (TREE_CODE (exp) == STRING_CST)
408 {
409 /* STRING_CST are the only constant objects we allow to be not
410 wrapped inside a CONST_DECL. */
411 align = TYPE_ALIGN (TREE_TYPE (exp));
412 #ifdef CONSTANT_ALIGNMENT
413 if (CONSTANT_CLASS_P (exp))
414 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
415 #endif
416 known_alignment = true;
417 }
418
419 /* If there is a non-constant offset part extract the maximum
420 alignment that can prevail. */
421 inner = ~0U;
422 while (offset)
423 {
424 tree next_offset;
425
426 if (TREE_CODE (offset) == PLUS_EXPR)
427 {
428 next_offset = TREE_OPERAND (offset, 0);
429 offset = TREE_OPERAND (offset, 1);
430 }
431 else
432 next_offset = NULL;
433 if (host_integerp (offset, 1))
434 {
435 /* Any overflow in calculating offset_bits won't change
436 the alignment. */
437 unsigned offset_bits
438 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
439
440 if (offset_bits)
441 inner = MIN (inner, (offset_bits & -offset_bits));
442 }
443 else if (TREE_CODE (offset) == MULT_EXPR
444 && host_integerp (TREE_OPERAND (offset, 1), 1))
445 {
446 /* Any overflow in calculating offset_factor won't change
447 the alignment. */
448 unsigned offset_factor
449 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
450 * BITS_PER_UNIT);
451
452 if (offset_factor)
453 inner = MIN (inner, (offset_factor & -offset_factor));
454 }
455 else
456 {
457 inner = MIN (inner, BITS_PER_UNIT);
458 break;
459 }
460 offset = next_offset;
461 }
462 /* Alignment is innermost object alignment adjusted by the constant
463 and non-constant offset parts. */
464 align = MIN (align, inner);
465
466 *alignp = align;
467 *bitposp = bitpos & (*alignp - 1);
468 return known_alignment;
469 }
470
471 /* For a memory reference expression EXP compute values M and N such that M
472 divides (&EXP - N) and such that N < M. If these numbers can be determined,
473 store M in alignp and N in *BITPOSP and return true. Otherwise return false
474 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
475
476 bool
477 get_object_alignment_1 (tree exp, unsigned int *alignp,
478 unsigned HOST_WIDE_INT *bitposp)
479 {
480 return get_object_alignment_2 (exp, alignp, bitposp, false);
481 }
482
483 /* Return the alignment in bits of EXP, an object. */
484
485 unsigned int
486 get_object_alignment (tree exp)
487 {
488 unsigned HOST_WIDE_INT bitpos = 0;
489 unsigned int align;
490
491 get_object_alignment_1 (exp, &align, &bitpos);
492
493 /* align and bitpos now specify known low bits of the pointer.
494 ptr & (align - 1) == bitpos. */
495
496 if (bitpos != 0)
497 align = (bitpos & -bitpos);
498 return align;
499 }
500
501 /* For a pointer valued expression EXP compute values M and N such that M
502 divides (EXP - N) and such that N < M. If these numbers can be determined,
503 store M in alignp and N in *BITPOSP and return true. Return false if
504 the results are just a conservative approximation.
505
506 If EXP is not a pointer, false is returned too. */
507
508 bool
509 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
510 unsigned HOST_WIDE_INT *bitposp)
511 {
512 STRIP_NOPS (exp);
513
514 if (TREE_CODE (exp) == ADDR_EXPR)
515 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
516 alignp, bitposp, true);
517 else if (TREE_CODE (exp) == SSA_NAME
518 && POINTER_TYPE_P (TREE_TYPE (exp)))
519 {
520 unsigned int ptr_align, ptr_misalign;
521 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
522
523 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
524 {
525 *bitposp = ptr_misalign * BITS_PER_UNIT;
526 *alignp = ptr_align * BITS_PER_UNIT;
527 /* We cannot really tell whether this result is an approximation. */
528 return true;
529 }
530 else
531 {
532 *bitposp = 0;
533 *alignp = BITS_PER_UNIT;
534 return false;
535 }
536 }
537 else if (TREE_CODE (exp) == INTEGER_CST)
538 {
539 *alignp = BIGGEST_ALIGNMENT;
540 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
541 & (BIGGEST_ALIGNMENT - 1));
542 return true;
543 }
544
545 *bitposp = 0;
546 *alignp = BITS_PER_UNIT;
547 return false;
548 }
549
550 /* Return the alignment in bits of EXP, a pointer valued expression.
551 The alignment returned is, by default, the alignment of the thing that
552 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
553
554 Otherwise, look at the expression to see if we can do better, i.e., if the
555 expression is actually pointing at an object whose alignment is tighter. */
556
557 unsigned int
558 get_pointer_alignment (tree exp)
559 {
560 unsigned HOST_WIDE_INT bitpos = 0;
561 unsigned int align;
562
563 get_pointer_alignment_1 (exp, &align, &bitpos);
564
565 /* align and bitpos now specify known low bits of the pointer.
566 ptr & (align - 1) == bitpos. */
567
568 if (bitpos != 0)
569 align = (bitpos & -bitpos);
570
571 return align;
572 }
573
574 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
575 way, because it could contain a zero byte in the middle.
576 TREE_STRING_LENGTH is the size of the character array, not the string.
577
578 ONLY_VALUE should be nonzero if the result is not going to be emitted
579 into the instruction stream and zero if it is going to be expanded.
580 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
581 is returned, otherwise NULL, since
582 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
583 evaluate the side-effects.
584
585 The value returned is of type `ssizetype'.
586
587 Unfortunately, string_constant can't access the values of const char
588 arrays with initializers, so neither can we do so here. */
589
590 tree
591 c_strlen (tree src, int only_value)
592 {
593 tree offset_node;
594 HOST_WIDE_INT offset;
595 int max;
596 const char *ptr;
597 location_t loc;
598
599 STRIP_NOPS (src);
600 if (TREE_CODE (src) == COND_EXPR
601 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
602 {
603 tree len1, len2;
604
605 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
606 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
607 if (tree_int_cst_equal (len1, len2))
608 return len1;
609 }
610
611 if (TREE_CODE (src) == COMPOUND_EXPR
612 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
613 return c_strlen (TREE_OPERAND (src, 1), only_value);
614
615 loc = EXPR_LOC_OR_HERE (src);
616
617 src = string_constant (src, &offset_node);
618 if (src == 0)
619 return NULL_TREE;
620
621 max = TREE_STRING_LENGTH (src) - 1;
622 ptr = TREE_STRING_POINTER (src);
623
624 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
625 {
626 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
627 compute the offset to the following null if we don't know where to
628 start searching for it. */
629 int i;
630
631 for (i = 0; i < max; i++)
632 if (ptr[i] == 0)
633 return NULL_TREE;
634
635 /* We don't know the starting offset, but we do know that the string
636 has no internal zero bytes. We can assume that the offset falls
637 within the bounds of the string; otherwise, the programmer deserves
638 what he gets. Subtract the offset from the length of the string,
639 and return that. This would perhaps not be valid if we were dealing
640 with named arrays in addition to literal string constants. */
641
642 return size_diffop_loc (loc, size_int (max), offset_node);
643 }
644
645 /* We have a known offset into the string. Start searching there for
646 a null character if we can represent it as a single HOST_WIDE_INT. */
647 if (offset_node == 0)
648 offset = 0;
649 else if (! host_integerp (offset_node, 0))
650 offset = -1;
651 else
652 offset = tree_low_cst (offset_node, 0);
653
654 /* If the offset is known to be out of bounds, warn, and call strlen at
655 runtime. */
656 if (offset < 0 || offset > max)
657 {
658 /* Suppress multiple warnings for propagated constant strings. */
659 if (! TREE_NO_WARNING (src))
660 {
661 warning_at (loc, 0, "offset outside bounds of constant string");
662 TREE_NO_WARNING (src) = 1;
663 }
664 return NULL_TREE;
665 }
666
667 /* Use strlen to search for the first zero byte. Since any strings
668 constructed with build_string will have nulls appended, we win even
669 if we get handed something like (char[4])"abcd".
670
671 Since OFFSET is our starting index into the string, no further
672 calculation is needed. */
673 return ssize_int (strlen (ptr + offset));
674 }
675
676 /* Return a char pointer for a C string if it is a string constant
677 or sum of string constant and integer constant. */
678
679 static const char *
680 c_getstr (tree src)
681 {
682 tree offset_node;
683
684 src = string_constant (src, &offset_node);
685 if (src == 0)
686 return 0;
687
688 if (offset_node == 0)
689 return TREE_STRING_POINTER (src);
690 else if (!host_integerp (offset_node, 1)
691 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
692 return 0;
693
694 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
695 }
696
697 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
698 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
699
700 static rtx
701 c_readstr (const char *str, enum machine_mode mode)
702 {
703 HOST_WIDE_INT c[2];
704 HOST_WIDE_INT ch;
705 unsigned int i, j;
706
707 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
708
709 c[0] = 0;
710 c[1] = 0;
711 ch = 1;
712 for (i = 0; i < GET_MODE_SIZE (mode); i++)
713 {
714 j = i;
715 if (WORDS_BIG_ENDIAN)
716 j = GET_MODE_SIZE (mode) - i - 1;
717 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
718 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
719 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
720 j *= BITS_PER_UNIT;
721 gcc_assert (j < HOST_BITS_PER_DOUBLE_INT);
722
723 if (ch)
724 ch = (unsigned char) str[i];
725 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
726 }
727 return immed_double_const (c[0], c[1], mode);
728 }
729
730 /* Cast a target constant CST to target CHAR and if that value fits into
731 host char type, return zero and put that value into variable pointed to by
732 P. */
733
734 static int
735 target_char_cast (tree cst, char *p)
736 {
737 unsigned HOST_WIDE_INT val, hostval;
738
739 if (TREE_CODE (cst) != INTEGER_CST
740 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
741 return 1;
742
743 val = TREE_INT_CST_LOW (cst);
744 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
745 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
746
747 hostval = val;
748 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
749 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
750
751 if (val != hostval)
752 return 1;
753
754 *p = hostval;
755 return 0;
756 }
757
758 /* Similar to save_expr, but assumes that arbitrary code is not executed
759 in between the multiple evaluations. In particular, we assume that a
760 non-addressable local variable will not be modified. */
761
762 static tree
763 builtin_save_expr (tree exp)
764 {
765 if (TREE_CODE (exp) == SSA_NAME
766 || (TREE_ADDRESSABLE (exp) == 0
767 && (TREE_CODE (exp) == PARM_DECL
768 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
769 return exp;
770
771 return save_expr (exp);
772 }
773
774 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
775 times to get the address of either a higher stack frame, or a return
776 address located within it (depending on FNDECL_CODE). */
777
778 static rtx
779 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
780 {
781 int i;
782
783 #ifdef INITIAL_FRAME_ADDRESS_RTX
784 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
785 #else
786 rtx tem;
787
788 /* For a zero count with __builtin_return_address, we don't care what
789 frame address we return, because target-specific definitions will
790 override us. Therefore frame pointer elimination is OK, and using
791 the soft frame pointer is OK.
792
793 For a nonzero count, or a zero count with __builtin_frame_address,
794 we require a stable offset from the current frame pointer to the
795 previous one, so we must use the hard frame pointer, and
796 we must disable frame pointer elimination. */
797 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
798 tem = frame_pointer_rtx;
799 else
800 {
801 tem = hard_frame_pointer_rtx;
802
803 /* Tell reload not to eliminate the frame pointer. */
804 crtl->accesses_prior_frames = 1;
805 }
806 #endif
807
808 /* Some machines need special handling before we can access
809 arbitrary frames. For example, on the SPARC, we must first flush
810 all register windows to the stack. */
811 #ifdef SETUP_FRAME_ADDRESSES
812 if (count > 0)
813 SETUP_FRAME_ADDRESSES ();
814 #endif
815
816 /* On the SPARC, the return address is not in the frame, it is in a
817 register. There is no way to access it off of the current frame
818 pointer, but it can be accessed off the previous frame pointer by
819 reading the value from the register window save area. */
820 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
821 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
822 count--;
823 #endif
824
825 /* Scan back COUNT frames to the specified frame. */
826 for (i = 0; i < count; i++)
827 {
828 /* Assume the dynamic chain pointer is in the word that the
829 frame address points to, unless otherwise specified. */
830 #ifdef DYNAMIC_CHAIN_ADDRESS
831 tem = DYNAMIC_CHAIN_ADDRESS (tem);
832 #endif
833 tem = memory_address (Pmode, tem);
834 tem = gen_frame_mem (Pmode, tem);
835 tem = copy_to_reg (tem);
836 }
837
838 /* For __builtin_frame_address, return what we've got. But, on
839 the SPARC for example, we may have to add a bias. */
840 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
841 #ifdef FRAME_ADDR_RTX
842 return FRAME_ADDR_RTX (tem);
843 #else
844 return tem;
845 #endif
846
847 /* For __builtin_return_address, get the return address from that frame. */
848 #ifdef RETURN_ADDR_RTX
849 tem = RETURN_ADDR_RTX (count, tem);
850 #else
851 tem = memory_address (Pmode,
852 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
853 tem = gen_frame_mem (Pmode, tem);
854 #endif
855 return tem;
856 }
857
858 /* Alias set used for setjmp buffer. */
859 static alias_set_type setjmp_alias_set = -1;
860
861 /* Construct the leading half of a __builtin_setjmp call. Control will
862 return to RECEIVER_LABEL. This is also called directly by the SJLJ
863 exception handling code. */
864
865 void
866 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
867 {
868 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
869 rtx stack_save;
870 rtx mem;
871
872 if (setjmp_alias_set == -1)
873 setjmp_alias_set = new_alias_set ();
874
875 buf_addr = convert_memory_address (Pmode, buf_addr);
876
877 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
878
879 /* We store the frame pointer and the address of receiver_label in
880 the buffer and use the rest of it for the stack save area, which
881 is machine-dependent. */
882
883 mem = gen_rtx_MEM (Pmode, buf_addr);
884 set_mem_alias_set (mem, setjmp_alias_set);
885 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
886
887 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
888 GET_MODE_SIZE (Pmode))),
889 set_mem_alias_set (mem, setjmp_alias_set);
890
891 emit_move_insn (validize_mem (mem),
892 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
893
894 stack_save = gen_rtx_MEM (sa_mode,
895 plus_constant (Pmode, buf_addr,
896 2 * GET_MODE_SIZE (Pmode)));
897 set_mem_alias_set (stack_save, setjmp_alias_set);
898 emit_stack_save (SAVE_NONLOCAL, &stack_save);
899
900 /* If there is further processing to do, do it. */
901 #ifdef HAVE_builtin_setjmp_setup
902 if (HAVE_builtin_setjmp_setup)
903 emit_insn (gen_builtin_setjmp_setup (buf_addr));
904 #endif
905
906 /* We have a nonlocal label. */
907 cfun->has_nonlocal_label = 1;
908 }
909
910 /* Construct the trailing part of a __builtin_setjmp call. This is
911 also called directly by the SJLJ exception handling code.
912 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
913
914 void
915 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
916 {
917 rtx chain;
918
919 /* Mark the FP as used when we get here, so we have to make sure it's
920 marked as used by this function. */
921 emit_use (hard_frame_pointer_rtx);
922
923 /* Mark the static chain as clobbered here so life information
924 doesn't get messed up for it. */
925 chain = targetm.calls.static_chain (current_function_decl, true);
926 if (chain && REG_P (chain))
927 emit_clobber (chain);
928
929 /* Now put in the code to restore the frame pointer, and argument
930 pointer, if needed. */
931 #ifdef HAVE_nonlocal_goto
932 if (! HAVE_nonlocal_goto)
933 #endif
934 /* First adjust our frame pointer to its actual value. It was
935 previously set to the start of the virtual area corresponding to
936 the stacked variables when we branched here and now needs to be
937 adjusted to the actual hardware fp value.
938
939 Assignments to virtual registers are converted by
940 instantiate_virtual_regs into the corresponding assignment
941 to the underlying register (fp in this case) that makes
942 the original assignment true.
943 So the following insn will actually be decrementing fp by
944 STARTING_FRAME_OFFSET. */
945 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
946
947 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
948 if (fixed_regs[ARG_POINTER_REGNUM])
949 {
950 #ifdef ELIMINABLE_REGS
951 /* If the argument pointer can be eliminated in favor of the
952 frame pointer, we don't need to restore it. We assume here
953 that if such an elimination is present, it can always be used.
954 This is the case on all known machines; if we don't make this
955 assumption, we do unnecessary saving on many machines. */
956 size_t i;
957 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
958
959 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
960 if (elim_regs[i].from == ARG_POINTER_REGNUM
961 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
962 break;
963
964 if (i == ARRAY_SIZE (elim_regs))
965 #endif
966 {
967 /* Now restore our arg pointer from the address at which it
968 was saved in our stack frame. */
969 emit_move_insn (crtl->args.internal_arg_pointer,
970 copy_to_reg (get_arg_pointer_save_area ()));
971 }
972 }
973 #endif
974
975 #ifdef HAVE_builtin_setjmp_receiver
976 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
977 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
978 else
979 #endif
980 #ifdef HAVE_nonlocal_goto_receiver
981 if (HAVE_nonlocal_goto_receiver)
982 emit_insn (gen_nonlocal_goto_receiver ());
983 else
984 #endif
985 { /* Nothing */ }
986
987 /* We must not allow the code we just generated to be reordered by
988 scheduling. Specifically, the update of the frame pointer must
989 happen immediately, not later. Similarly, we must block
990 (frame-related) register values to be used across this code. */
991 emit_insn (gen_blockage ());
992 }
993
994 /* __builtin_longjmp is passed a pointer to an array of five words (not
995 all will be used on all machines). It operates similarly to the C
996 library function of the same name, but is more efficient. Much of
997 the code below is copied from the handling of non-local gotos. */
998
999 static void
1000 expand_builtin_longjmp (rtx buf_addr, rtx value)
1001 {
1002 rtx fp, lab, stack, insn, last;
1003 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1004
1005 /* DRAP is needed for stack realign if longjmp is expanded to current
1006 function */
1007 if (SUPPORTS_STACK_ALIGNMENT)
1008 crtl->need_drap = true;
1009
1010 if (setjmp_alias_set == -1)
1011 setjmp_alias_set = new_alias_set ();
1012
1013 buf_addr = convert_memory_address (Pmode, buf_addr);
1014
1015 buf_addr = force_reg (Pmode, buf_addr);
1016
1017 /* We require that the user must pass a second argument of 1, because
1018 that is what builtin_setjmp will return. */
1019 gcc_assert (value == const1_rtx);
1020
1021 last = get_last_insn ();
1022 #ifdef HAVE_builtin_longjmp
1023 if (HAVE_builtin_longjmp)
1024 emit_insn (gen_builtin_longjmp (buf_addr));
1025 else
1026 #endif
1027 {
1028 fp = gen_rtx_MEM (Pmode, buf_addr);
1029 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1030 GET_MODE_SIZE (Pmode)));
1031
1032 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1033 2 * GET_MODE_SIZE (Pmode)));
1034 set_mem_alias_set (fp, setjmp_alias_set);
1035 set_mem_alias_set (lab, setjmp_alias_set);
1036 set_mem_alias_set (stack, setjmp_alias_set);
1037
1038 /* Pick up FP, label, and SP from the block and jump. This code is
1039 from expand_goto in stmt.c; see there for detailed comments. */
1040 #ifdef HAVE_nonlocal_goto
1041 if (HAVE_nonlocal_goto)
1042 /* We have to pass a value to the nonlocal_goto pattern that will
1043 get copied into the static_chain pointer, but it does not matter
1044 what that value is, because builtin_setjmp does not use it. */
1045 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1046 else
1047 #endif
1048 {
1049 lab = copy_to_reg (lab);
1050
1051 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1052 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1053
1054 emit_move_insn (hard_frame_pointer_rtx, fp);
1055 emit_stack_restore (SAVE_NONLOCAL, stack);
1056
1057 emit_use (hard_frame_pointer_rtx);
1058 emit_use (stack_pointer_rtx);
1059 emit_indirect_jump (lab);
1060 }
1061 }
1062
1063 /* Search backwards and mark the jump insn as a non-local goto.
1064 Note that this precludes the use of __builtin_longjmp to a
1065 __builtin_setjmp target in the same function. However, we've
1066 already cautioned the user that these functions are for
1067 internal exception handling use only. */
1068 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1069 {
1070 gcc_assert (insn != last);
1071
1072 if (JUMP_P (insn))
1073 {
1074 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1075 break;
1076 }
1077 else if (CALL_P (insn))
1078 break;
1079 }
1080 }
1081
1082 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1083 and the address of the save area. */
1084
1085 static rtx
1086 expand_builtin_nonlocal_goto (tree exp)
1087 {
1088 tree t_label, t_save_area;
1089 rtx r_label, r_save_area, r_fp, r_sp, insn;
1090
1091 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1092 return NULL_RTX;
1093
1094 t_label = CALL_EXPR_ARG (exp, 0);
1095 t_save_area = CALL_EXPR_ARG (exp, 1);
1096
1097 r_label = expand_normal (t_label);
1098 r_label = convert_memory_address (Pmode, r_label);
1099 r_save_area = expand_normal (t_save_area);
1100 r_save_area = convert_memory_address (Pmode, r_save_area);
1101 /* Copy the address of the save location to a register just in case it was
1102 based on the frame pointer. */
1103 r_save_area = copy_to_reg (r_save_area);
1104 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1105 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1106 plus_constant (Pmode, r_save_area,
1107 GET_MODE_SIZE (Pmode)));
1108
1109 crtl->has_nonlocal_goto = 1;
1110
1111 #ifdef HAVE_nonlocal_goto
1112 /* ??? We no longer need to pass the static chain value, afaik. */
1113 if (HAVE_nonlocal_goto)
1114 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1115 else
1116 #endif
1117 {
1118 r_label = copy_to_reg (r_label);
1119
1120 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1121 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1122
1123 /* Restore frame pointer for containing function. */
1124 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1125 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1126
1127 /* USE of hard_frame_pointer_rtx added for consistency;
1128 not clear if really needed. */
1129 emit_use (hard_frame_pointer_rtx);
1130 emit_use (stack_pointer_rtx);
1131
1132 /* If the architecture is using a GP register, we must
1133 conservatively assume that the target function makes use of it.
1134 The prologue of functions with nonlocal gotos must therefore
1135 initialize the GP register to the appropriate value, and we
1136 must then make sure that this value is live at the point
1137 of the jump. (Note that this doesn't necessarily apply
1138 to targets with a nonlocal_goto pattern; they are free
1139 to implement it in their own way. Note also that this is
1140 a no-op if the GP register is a global invariant.) */
1141 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1142 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1143 emit_use (pic_offset_table_rtx);
1144
1145 emit_indirect_jump (r_label);
1146 }
1147
1148 /* Search backwards to the jump insn and mark it as a
1149 non-local goto. */
1150 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1151 {
1152 if (JUMP_P (insn))
1153 {
1154 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1155 break;
1156 }
1157 else if (CALL_P (insn))
1158 break;
1159 }
1160
1161 return const0_rtx;
1162 }
1163
1164 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1165 (not all will be used on all machines) that was passed to __builtin_setjmp.
1166 It updates the stack pointer in that block to correspond to the current
1167 stack pointer. */
1168
1169 static void
1170 expand_builtin_update_setjmp_buf (rtx buf_addr)
1171 {
1172 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1173 rtx stack_save
1174 = gen_rtx_MEM (sa_mode,
1175 memory_address
1176 (sa_mode,
1177 plus_constant (Pmode, buf_addr,
1178 2 * GET_MODE_SIZE (Pmode))));
1179
1180 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1181 }
1182
1183 /* Expand a call to __builtin_prefetch. For a target that does not support
1184 data prefetch, evaluate the memory address argument in case it has side
1185 effects. */
1186
1187 static void
1188 expand_builtin_prefetch (tree exp)
1189 {
1190 tree arg0, arg1, arg2;
1191 int nargs;
1192 rtx op0, op1, op2;
1193
1194 if (!validate_arglist (exp, POINTER_TYPE, 0))
1195 return;
1196
1197 arg0 = CALL_EXPR_ARG (exp, 0);
1198
1199 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1200 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1201 locality). */
1202 nargs = call_expr_nargs (exp);
1203 if (nargs > 1)
1204 arg1 = CALL_EXPR_ARG (exp, 1);
1205 else
1206 arg1 = integer_zero_node;
1207 if (nargs > 2)
1208 arg2 = CALL_EXPR_ARG (exp, 2);
1209 else
1210 arg2 = integer_three_node;
1211
1212 /* Argument 0 is an address. */
1213 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1214
1215 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1216 if (TREE_CODE (arg1) != INTEGER_CST)
1217 {
1218 error ("second argument to %<__builtin_prefetch%> must be a constant");
1219 arg1 = integer_zero_node;
1220 }
1221 op1 = expand_normal (arg1);
1222 /* Argument 1 must be either zero or one. */
1223 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1224 {
1225 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1226 " using zero");
1227 op1 = const0_rtx;
1228 }
1229
1230 /* Argument 2 (locality) must be a compile-time constant int. */
1231 if (TREE_CODE (arg2) != INTEGER_CST)
1232 {
1233 error ("third argument to %<__builtin_prefetch%> must be a constant");
1234 arg2 = integer_zero_node;
1235 }
1236 op2 = expand_normal (arg2);
1237 /* Argument 2 must be 0, 1, 2, or 3. */
1238 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1239 {
1240 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1241 op2 = const0_rtx;
1242 }
1243
1244 #ifdef HAVE_prefetch
1245 if (HAVE_prefetch)
1246 {
1247 struct expand_operand ops[3];
1248
1249 create_address_operand (&ops[0], op0);
1250 create_integer_operand (&ops[1], INTVAL (op1));
1251 create_integer_operand (&ops[2], INTVAL (op2));
1252 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1253 return;
1254 }
1255 #endif
1256
1257 /* Don't do anything with direct references to volatile memory, but
1258 generate code to handle other side effects. */
1259 if (!MEM_P (op0) && side_effects_p (op0))
1260 emit_insn (op0);
1261 }
1262
1263 /* Get a MEM rtx for expression EXP which is the address of an operand
1264 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1265 the maximum length of the block of memory that might be accessed or
1266 NULL if unknown. */
1267
1268 static rtx
1269 get_memory_rtx (tree exp, tree len)
1270 {
1271 tree orig_exp = exp;
1272 rtx addr, mem;
1273
1274 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1275 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1276 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1277 exp = TREE_OPERAND (exp, 0);
1278
1279 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1280 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1281
1282 /* Get an expression we can use to find the attributes to assign to MEM.
1283 First remove any nops. */
1284 while (CONVERT_EXPR_P (exp)
1285 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1286 exp = TREE_OPERAND (exp, 0);
1287
1288 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1289 (as builtin stringops may alias with anything). */
1290 exp = fold_build2 (MEM_REF,
1291 build_array_type (char_type_node,
1292 build_range_type (sizetype,
1293 size_one_node, len)),
1294 exp, build_int_cst (ptr_type_node, 0));
1295
1296 /* If the MEM_REF has no acceptable address, try to get the base object
1297 from the original address we got, and build an all-aliasing
1298 unknown-sized access to that one. */
1299 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1300 set_mem_attributes (mem, exp, 0);
1301 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1302 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1303 0))))
1304 {
1305 exp = build_fold_addr_expr (exp);
1306 exp = fold_build2 (MEM_REF,
1307 build_array_type (char_type_node,
1308 build_range_type (sizetype,
1309 size_zero_node,
1310 NULL)),
1311 exp, build_int_cst (ptr_type_node, 0));
1312 set_mem_attributes (mem, exp, 0);
1313 }
1314 set_mem_alias_set (mem, 0);
1315 return mem;
1316 }
1317 \f
1318 /* Built-in functions to perform an untyped call and return. */
1319
1320 #define apply_args_mode \
1321 (this_target_builtins->x_apply_args_mode)
1322 #define apply_result_mode \
1323 (this_target_builtins->x_apply_result_mode)
1324
1325 /* Return the size required for the block returned by __builtin_apply_args,
1326 and initialize apply_args_mode. */
1327
1328 static int
1329 apply_args_size (void)
1330 {
1331 static int size = -1;
1332 int align;
1333 unsigned int regno;
1334 enum machine_mode mode;
1335
1336 /* The values computed by this function never change. */
1337 if (size < 0)
1338 {
1339 /* The first value is the incoming arg-pointer. */
1340 size = GET_MODE_SIZE (Pmode);
1341
1342 /* The second value is the structure value address unless this is
1343 passed as an "invisible" first argument. */
1344 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1345 size += GET_MODE_SIZE (Pmode);
1346
1347 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1348 if (FUNCTION_ARG_REGNO_P (regno))
1349 {
1350 mode = targetm.calls.get_raw_arg_mode (regno);
1351
1352 gcc_assert (mode != VOIDmode);
1353
1354 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1355 if (size % align != 0)
1356 size = CEIL (size, align) * align;
1357 size += GET_MODE_SIZE (mode);
1358 apply_args_mode[regno] = mode;
1359 }
1360 else
1361 {
1362 apply_args_mode[regno] = VOIDmode;
1363 }
1364 }
1365 return size;
1366 }
1367
1368 /* Return the size required for the block returned by __builtin_apply,
1369 and initialize apply_result_mode. */
1370
1371 static int
1372 apply_result_size (void)
1373 {
1374 static int size = -1;
1375 int align, regno;
1376 enum machine_mode mode;
1377
1378 /* The values computed by this function never change. */
1379 if (size < 0)
1380 {
1381 size = 0;
1382
1383 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1384 if (targetm.calls.function_value_regno_p (regno))
1385 {
1386 mode = targetm.calls.get_raw_result_mode (regno);
1387
1388 gcc_assert (mode != VOIDmode);
1389
1390 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1391 if (size % align != 0)
1392 size = CEIL (size, align) * align;
1393 size += GET_MODE_SIZE (mode);
1394 apply_result_mode[regno] = mode;
1395 }
1396 else
1397 apply_result_mode[regno] = VOIDmode;
1398
1399 /* Allow targets that use untyped_call and untyped_return to override
1400 the size so that machine-specific information can be stored here. */
1401 #ifdef APPLY_RESULT_SIZE
1402 size = APPLY_RESULT_SIZE;
1403 #endif
1404 }
1405 return size;
1406 }
1407
1408 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1409 /* Create a vector describing the result block RESULT. If SAVEP is true,
1410 the result block is used to save the values; otherwise it is used to
1411 restore the values. */
1412
1413 static rtx
1414 result_vector (int savep, rtx result)
1415 {
1416 int regno, size, align, nelts;
1417 enum machine_mode mode;
1418 rtx reg, mem;
1419 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1420
1421 size = nelts = 0;
1422 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1423 if ((mode = apply_result_mode[regno]) != VOIDmode)
1424 {
1425 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1426 if (size % align != 0)
1427 size = CEIL (size, align) * align;
1428 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1429 mem = adjust_address (result, mode, size);
1430 savevec[nelts++] = (savep
1431 ? gen_rtx_SET (VOIDmode, mem, reg)
1432 : gen_rtx_SET (VOIDmode, reg, mem));
1433 size += GET_MODE_SIZE (mode);
1434 }
1435 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1436 }
1437 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1438
1439 /* Save the state required to perform an untyped call with the same
1440 arguments as were passed to the current function. */
1441
1442 static rtx
1443 expand_builtin_apply_args_1 (void)
1444 {
1445 rtx registers, tem;
1446 int size, align, regno;
1447 enum machine_mode mode;
1448 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1449
1450 /* Create a block where the arg-pointer, structure value address,
1451 and argument registers can be saved. */
1452 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1453
1454 /* Walk past the arg-pointer and structure value address. */
1455 size = GET_MODE_SIZE (Pmode);
1456 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1457 size += GET_MODE_SIZE (Pmode);
1458
1459 /* Save each register used in calling a function to the block. */
1460 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1461 if ((mode = apply_args_mode[regno]) != VOIDmode)
1462 {
1463 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1464 if (size % align != 0)
1465 size = CEIL (size, align) * align;
1466
1467 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1468
1469 emit_move_insn (adjust_address (registers, mode, size), tem);
1470 size += GET_MODE_SIZE (mode);
1471 }
1472
1473 /* Save the arg pointer to the block. */
1474 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1475 #ifdef STACK_GROWS_DOWNWARD
1476 /* We need the pointer as the caller actually passed them to us, not
1477 as we might have pretended they were passed. Make sure it's a valid
1478 operand, as emit_move_insn isn't expected to handle a PLUS. */
1479 tem
1480 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1481 NULL_RTX);
1482 #endif
1483 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1484
1485 size = GET_MODE_SIZE (Pmode);
1486
1487 /* Save the structure value address unless this is passed as an
1488 "invisible" first argument. */
1489 if (struct_incoming_value)
1490 {
1491 emit_move_insn (adjust_address (registers, Pmode, size),
1492 copy_to_reg (struct_incoming_value));
1493 size += GET_MODE_SIZE (Pmode);
1494 }
1495
1496 /* Return the address of the block. */
1497 return copy_addr_to_reg (XEXP (registers, 0));
1498 }
1499
1500 /* __builtin_apply_args returns block of memory allocated on
1501 the stack into which is stored the arg pointer, structure
1502 value address, static chain, and all the registers that might
1503 possibly be used in performing a function call. The code is
1504 moved to the start of the function so the incoming values are
1505 saved. */
1506
1507 static rtx
1508 expand_builtin_apply_args (void)
1509 {
1510 /* Don't do __builtin_apply_args more than once in a function.
1511 Save the result of the first call and reuse it. */
1512 if (apply_args_value != 0)
1513 return apply_args_value;
1514 {
1515 /* When this function is called, it means that registers must be
1516 saved on entry to this function. So we migrate the
1517 call to the first insn of this function. */
1518 rtx temp;
1519 rtx seq;
1520
1521 start_sequence ();
1522 temp = expand_builtin_apply_args_1 ();
1523 seq = get_insns ();
1524 end_sequence ();
1525
1526 apply_args_value = temp;
1527
1528 /* Put the insns after the NOTE that starts the function.
1529 If this is inside a start_sequence, make the outer-level insn
1530 chain current, so the code is placed at the start of the
1531 function. If internal_arg_pointer is a non-virtual pseudo,
1532 it needs to be placed after the function that initializes
1533 that pseudo. */
1534 push_topmost_sequence ();
1535 if (REG_P (crtl->args.internal_arg_pointer)
1536 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1537 emit_insn_before (seq, parm_birth_insn);
1538 else
1539 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1540 pop_topmost_sequence ();
1541 return temp;
1542 }
1543 }
1544
1545 /* Perform an untyped call and save the state required to perform an
1546 untyped return of whatever value was returned by the given function. */
1547
1548 static rtx
1549 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1550 {
1551 int size, align, regno;
1552 enum machine_mode mode;
1553 rtx incoming_args, result, reg, dest, src, call_insn;
1554 rtx old_stack_level = 0;
1555 rtx call_fusage = 0;
1556 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1557
1558 arguments = convert_memory_address (Pmode, arguments);
1559
1560 /* Create a block where the return registers can be saved. */
1561 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1562
1563 /* Fetch the arg pointer from the ARGUMENTS block. */
1564 incoming_args = gen_reg_rtx (Pmode);
1565 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1566 #ifndef STACK_GROWS_DOWNWARD
1567 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1568 incoming_args, 0, OPTAB_LIB_WIDEN);
1569 #endif
1570
1571 /* Push a new argument block and copy the arguments. Do not allow
1572 the (potential) memcpy call below to interfere with our stack
1573 manipulations. */
1574 do_pending_stack_adjust ();
1575 NO_DEFER_POP;
1576
1577 /* Save the stack with nonlocal if available. */
1578 #ifdef HAVE_save_stack_nonlocal
1579 if (HAVE_save_stack_nonlocal)
1580 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1581 else
1582 #endif
1583 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1584
1585 /* Allocate a block of memory onto the stack and copy the memory
1586 arguments to the outgoing arguments address. We can pass TRUE
1587 as the 4th argument because we just saved the stack pointer
1588 and will restore it right after the call. */
1589 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1590
1591 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1592 may have already set current_function_calls_alloca to true.
1593 current_function_calls_alloca won't be set if argsize is zero,
1594 so we have to guarantee need_drap is true here. */
1595 if (SUPPORTS_STACK_ALIGNMENT)
1596 crtl->need_drap = true;
1597
1598 dest = virtual_outgoing_args_rtx;
1599 #ifndef STACK_GROWS_DOWNWARD
1600 if (CONST_INT_P (argsize))
1601 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1602 else
1603 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1604 #endif
1605 dest = gen_rtx_MEM (BLKmode, dest);
1606 set_mem_align (dest, PARM_BOUNDARY);
1607 src = gen_rtx_MEM (BLKmode, incoming_args);
1608 set_mem_align (src, PARM_BOUNDARY);
1609 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1610
1611 /* Refer to the argument block. */
1612 apply_args_size ();
1613 arguments = gen_rtx_MEM (BLKmode, arguments);
1614 set_mem_align (arguments, PARM_BOUNDARY);
1615
1616 /* Walk past the arg-pointer and structure value address. */
1617 size = GET_MODE_SIZE (Pmode);
1618 if (struct_value)
1619 size += GET_MODE_SIZE (Pmode);
1620
1621 /* Restore each of the registers previously saved. Make USE insns
1622 for each of these registers for use in making the call. */
1623 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1624 if ((mode = apply_args_mode[regno]) != VOIDmode)
1625 {
1626 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1627 if (size % align != 0)
1628 size = CEIL (size, align) * align;
1629 reg = gen_rtx_REG (mode, regno);
1630 emit_move_insn (reg, adjust_address (arguments, mode, size));
1631 use_reg (&call_fusage, reg);
1632 size += GET_MODE_SIZE (mode);
1633 }
1634
1635 /* Restore the structure value address unless this is passed as an
1636 "invisible" first argument. */
1637 size = GET_MODE_SIZE (Pmode);
1638 if (struct_value)
1639 {
1640 rtx value = gen_reg_rtx (Pmode);
1641 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1642 emit_move_insn (struct_value, value);
1643 if (REG_P (struct_value))
1644 use_reg (&call_fusage, struct_value);
1645 size += GET_MODE_SIZE (Pmode);
1646 }
1647
1648 /* All arguments and registers used for the call are set up by now! */
1649 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1650
1651 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1652 and we don't want to load it into a register as an optimization,
1653 because prepare_call_address already did it if it should be done. */
1654 if (GET_CODE (function) != SYMBOL_REF)
1655 function = memory_address (FUNCTION_MODE, function);
1656
1657 /* Generate the actual call instruction and save the return value. */
1658 #ifdef HAVE_untyped_call
1659 if (HAVE_untyped_call)
1660 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1661 result, result_vector (1, result)));
1662 else
1663 #endif
1664 #ifdef HAVE_call_value
1665 if (HAVE_call_value)
1666 {
1667 rtx valreg = 0;
1668
1669 /* Locate the unique return register. It is not possible to
1670 express a call that sets more than one return register using
1671 call_value; use untyped_call for that. In fact, untyped_call
1672 only needs to save the return registers in the given block. */
1673 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1674 if ((mode = apply_result_mode[regno]) != VOIDmode)
1675 {
1676 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1677
1678 valreg = gen_rtx_REG (mode, regno);
1679 }
1680
1681 emit_call_insn (GEN_CALL_VALUE (valreg,
1682 gen_rtx_MEM (FUNCTION_MODE, function),
1683 const0_rtx, NULL_RTX, const0_rtx));
1684
1685 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1686 }
1687 else
1688 #endif
1689 gcc_unreachable ();
1690
1691 /* Find the CALL insn we just emitted, and attach the register usage
1692 information. */
1693 call_insn = last_call_insn ();
1694 add_function_usage_to (call_insn, call_fusage);
1695
1696 /* Restore the stack. */
1697 #ifdef HAVE_save_stack_nonlocal
1698 if (HAVE_save_stack_nonlocal)
1699 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1700 else
1701 #endif
1702 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1703 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1704
1705 OK_DEFER_POP;
1706
1707 /* Return the address of the result block. */
1708 result = copy_addr_to_reg (XEXP (result, 0));
1709 return convert_memory_address (ptr_mode, result);
1710 }
1711
1712 /* Perform an untyped return. */
1713
1714 static void
1715 expand_builtin_return (rtx result)
1716 {
1717 int size, align, regno;
1718 enum machine_mode mode;
1719 rtx reg;
1720 rtx call_fusage = 0;
1721
1722 result = convert_memory_address (Pmode, result);
1723
1724 apply_result_size ();
1725 result = gen_rtx_MEM (BLKmode, result);
1726
1727 #ifdef HAVE_untyped_return
1728 if (HAVE_untyped_return)
1729 {
1730 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1731 emit_barrier ();
1732 return;
1733 }
1734 #endif
1735
1736 /* Restore the return value and note that each value is used. */
1737 size = 0;
1738 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1739 if ((mode = apply_result_mode[regno]) != VOIDmode)
1740 {
1741 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1742 if (size % align != 0)
1743 size = CEIL (size, align) * align;
1744 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1745 emit_move_insn (reg, adjust_address (result, mode, size));
1746
1747 push_to_sequence (call_fusage);
1748 emit_use (reg);
1749 call_fusage = get_insns ();
1750 end_sequence ();
1751 size += GET_MODE_SIZE (mode);
1752 }
1753
1754 /* Put the USE insns before the return. */
1755 emit_insn (call_fusage);
1756
1757 /* Return whatever values was restored by jumping directly to the end
1758 of the function. */
1759 expand_naked_return ();
1760 }
1761
1762 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1763
1764 static enum type_class
1765 type_to_class (tree type)
1766 {
1767 switch (TREE_CODE (type))
1768 {
1769 case VOID_TYPE: return void_type_class;
1770 case INTEGER_TYPE: return integer_type_class;
1771 case ENUMERAL_TYPE: return enumeral_type_class;
1772 case BOOLEAN_TYPE: return boolean_type_class;
1773 case POINTER_TYPE: return pointer_type_class;
1774 case REFERENCE_TYPE: return reference_type_class;
1775 case OFFSET_TYPE: return offset_type_class;
1776 case REAL_TYPE: return real_type_class;
1777 case COMPLEX_TYPE: return complex_type_class;
1778 case FUNCTION_TYPE: return function_type_class;
1779 case METHOD_TYPE: return method_type_class;
1780 case RECORD_TYPE: return record_type_class;
1781 case UNION_TYPE:
1782 case QUAL_UNION_TYPE: return union_type_class;
1783 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1784 ? string_type_class : array_type_class);
1785 case LANG_TYPE: return lang_type_class;
1786 default: return no_type_class;
1787 }
1788 }
1789
1790 /* Expand a call EXP to __builtin_classify_type. */
1791
1792 static rtx
1793 expand_builtin_classify_type (tree exp)
1794 {
1795 if (call_expr_nargs (exp))
1796 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1797 return GEN_INT (no_type_class);
1798 }
1799
1800 /* This helper macro, meant to be used in mathfn_built_in below,
1801 determines which among a set of three builtin math functions is
1802 appropriate for a given type mode. The `F' and `L' cases are
1803 automatically generated from the `double' case. */
1804 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1805 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1806 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1807 fcodel = BUILT_IN_MATHFN##L ; break;
1808 /* Similar to above, but appends _R after any F/L suffix. */
1809 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1810 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1811 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1812 fcodel = BUILT_IN_MATHFN##L_R ; break;
1813
1814 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1815 if available. If IMPLICIT is true use the implicit builtin declaration,
1816 otherwise use the explicit declaration. If we can't do the conversion,
1817 return zero. */
1818
1819 static tree
1820 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1821 {
1822 enum built_in_function fcode, fcodef, fcodel, fcode2;
1823
1824 switch (fn)
1825 {
1826 CASE_MATHFN (BUILT_IN_ACOS)
1827 CASE_MATHFN (BUILT_IN_ACOSH)
1828 CASE_MATHFN (BUILT_IN_ASIN)
1829 CASE_MATHFN (BUILT_IN_ASINH)
1830 CASE_MATHFN (BUILT_IN_ATAN)
1831 CASE_MATHFN (BUILT_IN_ATAN2)
1832 CASE_MATHFN (BUILT_IN_ATANH)
1833 CASE_MATHFN (BUILT_IN_CBRT)
1834 CASE_MATHFN (BUILT_IN_CEIL)
1835 CASE_MATHFN (BUILT_IN_CEXPI)
1836 CASE_MATHFN (BUILT_IN_COPYSIGN)
1837 CASE_MATHFN (BUILT_IN_COS)
1838 CASE_MATHFN (BUILT_IN_COSH)
1839 CASE_MATHFN (BUILT_IN_DREM)
1840 CASE_MATHFN (BUILT_IN_ERF)
1841 CASE_MATHFN (BUILT_IN_ERFC)
1842 CASE_MATHFN (BUILT_IN_EXP)
1843 CASE_MATHFN (BUILT_IN_EXP10)
1844 CASE_MATHFN (BUILT_IN_EXP2)
1845 CASE_MATHFN (BUILT_IN_EXPM1)
1846 CASE_MATHFN (BUILT_IN_FABS)
1847 CASE_MATHFN (BUILT_IN_FDIM)
1848 CASE_MATHFN (BUILT_IN_FLOOR)
1849 CASE_MATHFN (BUILT_IN_FMA)
1850 CASE_MATHFN (BUILT_IN_FMAX)
1851 CASE_MATHFN (BUILT_IN_FMIN)
1852 CASE_MATHFN (BUILT_IN_FMOD)
1853 CASE_MATHFN (BUILT_IN_FREXP)
1854 CASE_MATHFN (BUILT_IN_GAMMA)
1855 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1856 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1857 CASE_MATHFN (BUILT_IN_HYPOT)
1858 CASE_MATHFN (BUILT_IN_ILOGB)
1859 CASE_MATHFN (BUILT_IN_ICEIL)
1860 CASE_MATHFN (BUILT_IN_IFLOOR)
1861 CASE_MATHFN (BUILT_IN_INF)
1862 CASE_MATHFN (BUILT_IN_IRINT)
1863 CASE_MATHFN (BUILT_IN_IROUND)
1864 CASE_MATHFN (BUILT_IN_ISINF)
1865 CASE_MATHFN (BUILT_IN_J0)
1866 CASE_MATHFN (BUILT_IN_J1)
1867 CASE_MATHFN (BUILT_IN_JN)
1868 CASE_MATHFN (BUILT_IN_LCEIL)
1869 CASE_MATHFN (BUILT_IN_LDEXP)
1870 CASE_MATHFN (BUILT_IN_LFLOOR)
1871 CASE_MATHFN (BUILT_IN_LGAMMA)
1872 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1873 CASE_MATHFN (BUILT_IN_LLCEIL)
1874 CASE_MATHFN (BUILT_IN_LLFLOOR)
1875 CASE_MATHFN (BUILT_IN_LLRINT)
1876 CASE_MATHFN (BUILT_IN_LLROUND)
1877 CASE_MATHFN (BUILT_IN_LOG)
1878 CASE_MATHFN (BUILT_IN_LOG10)
1879 CASE_MATHFN (BUILT_IN_LOG1P)
1880 CASE_MATHFN (BUILT_IN_LOG2)
1881 CASE_MATHFN (BUILT_IN_LOGB)
1882 CASE_MATHFN (BUILT_IN_LRINT)
1883 CASE_MATHFN (BUILT_IN_LROUND)
1884 CASE_MATHFN (BUILT_IN_MODF)
1885 CASE_MATHFN (BUILT_IN_NAN)
1886 CASE_MATHFN (BUILT_IN_NANS)
1887 CASE_MATHFN (BUILT_IN_NEARBYINT)
1888 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1889 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1890 CASE_MATHFN (BUILT_IN_POW)
1891 CASE_MATHFN (BUILT_IN_POWI)
1892 CASE_MATHFN (BUILT_IN_POW10)
1893 CASE_MATHFN (BUILT_IN_REMAINDER)
1894 CASE_MATHFN (BUILT_IN_REMQUO)
1895 CASE_MATHFN (BUILT_IN_RINT)
1896 CASE_MATHFN (BUILT_IN_ROUND)
1897 CASE_MATHFN (BUILT_IN_SCALB)
1898 CASE_MATHFN (BUILT_IN_SCALBLN)
1899 CASE_MATHFN (BUILT_IN_SCALBN)
1900 CASE_MATHFN (BUILT_IN_SIGNBIT)
1901 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1902 CASE_MATHFN (BUILT_IN_SIN)
1903 CASE_MATHFN (BUILT_IN_SINCOS)
1904 CASE_MATHFN (BUILT_IN_SINH)
1905 CASE_MATHFN (BUILT_IN_SQRT)
1906 CASE_MATHFN (BUILT_IN_TAN)
1907 CASE_MATHFN (BUILT_IN_TANH)
1908 CASE_MATHFN (BUILT_IN_TGAMMA)
1909 CASE_MATHFN (BUILT_IN_TRUNC)
1910 CASE_MATHFN (BUILT_IN_Y0)
1911 CASE_MATHFN (BUILT_IN_Y1)
1912 CASE_MATHFN (BUILT_IN_YN)
1913
1914 default:
1915 return NULL_TREE;
1916 }
1917
1918 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1919 fcode2 = fcode;
1920 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1921 fcode2 = fcodef;
1922 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1923 fcode2 = fcodel;
1924 else
1925 return NULL_TREE;
1926
1927 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1928 return NULL_TREE;
1929
1930 return builtin_decl_explicit (fcode2);
1931 }
1932
1933 /* Like mathfn_built_in_1(), but always use the implicit array. */
1934
1935 tree
1936 mathfn_built_in (tree type, enum built_in_function fn)
1937 {
1938 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1939 }
1940
1941 /* If errno must be maintained, expand the RTL to check if the result,
1942 TARGET, of a built-in function call, EXP, is NaN, and if so set
1943 errno to EDOM. */
1944
1945 static void
1946 expand_errno_check (tree exp, rtx target)
1947 {
1948 rtx lab = gen_label_rtx ();
1949
1950 /* Test the result; if it is NaN, set errno=EDOM because
1951 the argument was not in the domain. */
1952 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1953 NULL_RTX, NULL_RTX, lab,
1954 /* The jump is very likely. */
1955 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1956
1957 #ifdef TARGET_EDOM
1958 /* If this built-in doesn't throw an exception, set errno directly. */
1959 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1960 {
1961 #ifdef GEN_ERRNO_RTX
1962 rtx errno_rtx = GEN_ERRNO_RTX;
1963 #else
1964 rtx errno_rtx
1965 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1966 #endif
1967 emit_move_insn (errno_rtx,
1968 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1969 emit_label (lab);
1970 return;
1971 }
1972 #endif
1973
1974 /* Make sure the library call isn't expanded as a tail call. */
1975 CALL_EXPR_TAILCALL (exp) = 0;
1976
1977 /* We can't set errno=EDOM directly; let the library call do it.
1978 Pop the arguments right away in case the call gets deleted. */
1979 NO_DEFER_POP;
1980 expand_call (exp, target, 0);
1981 OK_DEFER_POP;
1982 emit_label (lab);
1983 }
1984
1985 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1986 Return NULL_RTX if a normal call should be emitted rather than expanding
1987 the function in-line. EXP is the expression that is a call to the builtin
1988 function; if convenient, the result should be placed in TARGET.
1989 SUBTARGET may be used as the target for computing one of EXP's operands. */
1990
1991 static rtx
1992 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1993 {
1994 optab builtin_optab;
1995 rtx op0, insns;
1996 tree fndecl = get_callee_fndecl (exp);
1997 enum machine_mode mode;
1998 bool errno_set = false;
1999 bool try_widening = false;
2000 tree arg;
2001
2002 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2003 return NULL_RTX;
2004
2005 arg = CALL_EXPR_ARG (exp, 0);
2006
2007 switch (DECL_FUNCTION_CODE (fndecl))
2008 {
2009 CASE_FLT_FN (BUILT_IN_SQRT):
2010 errno_set = ! tree_expr_nonnegative_p (arg);
2011 try_widening = true;
2012 builtin_optab = sqrt_optab;
2013 break;
2014 CASE_FLT_FN (BUILT_IN_EXP):
2015 errno_set = true; builtin_optab = exp_optab; break;
2016 CASE_FLT_FN (BUILT_IN_EXP10):
2017 CASE_FLT_FN (BUILT_IN_POW10):
2018 errno_set = true; builtin_optab = exp10_optab; break;
2019 CASE_FLT_FN (BUILT_IN_EXP2):
2020 errno_set = true; builtin_optab = exp2_optab; break;
2021 CASE_FLT_FN (BUILT_IN_EXPM1):
2022 errno_set = true; builtin_optab = expm1_optab; break;
2023 CASE_FLT_FN (BUILT_IN_LOGB):
2024 errno_set = true; builtin_optab = logb_optab; break;
2025 CASE_FLT_FN (BUILT_IN_LOG):
2026 errno_set = true; builtin_optab = log_optab; break;
2027 CASE_FLT_FN (BUILT_IN_LOG10):
2028 errno_set = true; builtin_optab = log10_optab; break;
2029 CASE_FLT_FN (BUILT_IN_LOG2):
2030 errno_set = true; builtin_optab = log2_optab; break;
2031 CASE_FLT_FN (BUILT_IN_LOG1P):
2032 errno_set = true; builtin_optab = log1p_optab; break;
2033 CASE_FLT_FN (BUILT_IN_ASIN):
2034 builtin_optab = asin_optab; break;
2035 CASE_FLT_FN (BUILT_IN_ACOS):
2036 builtin_optab = acos_optab; break;
2037 CASE_FLT_FN (BUILT_IN_TAN):
2038 builtin_optab = tan_optab; break;
2039 CASE_FLT_FN (BUILT_IN_ATAN):
2040 builtin_optab = atan_optab; break;
2041 CASE_FLT_FN (BUILT_IN_FLOOR):
2042 builtin_optab = floor_optab; break;
2043 CASE_FLT_FN (BUILT_IN_CEIL):
2044 builtin_optab = ceil_optab; break;
2045 CASE_FLT_FN (BUILT_IN_TRUNC):
2046 builtin_optab = btrunc_optab; break;
2047 CASE_FLT_FN (BUILT_IN_ROUND):
2048 builtin_optab = round_optab; break;
2049 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2050 builtin_optab = nearbyint_optab;
2051 if (flag_trapping_math)
2052 break;
2053 /* Else fallthrough and expand as rint. */
2054 CASE_FLT_FN (BUILT_IN_RINT):
2055 builtin_optab = rint_optab; break;
2056 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2057 builtin_optab = significand_optab; break;
2058 default:
2059 gcc_unreachable ();
2060 }
2061
2062 /* Make a suitable register to place result in. */
2063 mode = TYPE_MODE (TREE_TYPE (exp));
2064
2065 if (! flag_errno_math || ! HONOR_NANS (mode))
2066 errno_set = false;
2067
2068 /* Before working hard, check whether the instruction is available, but try
2069 to widen the mode for specific operations. */
2070 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2071 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2072 && (!errno_set || !optimize_insn_for_size_p ()))
2073 {
2074 rtx result = gen_reg_rtx (mode);
2075
2076 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2077 need to expand the argument again. This way, we will not perform
2078 side-effects more the once. */
2079 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2080
2081 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2082
2083 start_sequence ();
2084
2085 /* Compute into RESULT.
2086 Set RESULT to wherever the result comes back. */
2087 result = expand_unop (mode, builtin_optab, op0, result, 0);
2088
2089 if (result != 0)
2090 {
2091 if (errno_set)
2092 expand_errno_check (exp, result);
2093
2094 /* Output the entire sequence. */
2095 insns = get_insns ();
2096 end_sequence ();
2097 emit_insn (insns);
2098 return result;
2099 }
2100
2101 /* If we were unable to expand via the builtin, stop the sequence
2102 (without outputting the insns) and call to the library function
2103 with the stabilized argument list. */
2104 end_sequence ();
2105 }
2106
2107 return expand_call (exp, target, target == const0_rtx);
2108 }
2109
2110 /* Expand a call to the builtin binary math functions (pow and atan2).
2111 Return NULL_RTX if a normal call should be emitted rather than expanding the
2112 function in-line. EXP is the expression that is a call to the builtin
2113 function; if convenient, the result should be placed in TARGET.
2114 SUBTARGET may be used as the target for computing one of EXP's
2115 operands. */
2116
2117 static rtx
2118 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2119 {
2120 optab builtin_optab;
2121 rtx op0, op1, insns, result;
2122 int op1_type = REAL_TYPE;
2123 tree fndecl = get_callee_fndecl (exp);
2124 tree arg0, arg1;
2125 enum machine_mode mode;
2126 bool errno_set = true;
2127
2128 switch (DECL_FUNCTION_CODE (fndecl))
2129 {
2130 CASE_FLT_FN (BUILT_IN_SCALBN):
2131 CASE_FLT_FN (BUILT_IN_SCALBLN):
2132 CASE_FLT_FN (BUILT_IN_LDEXP):
2133 op1_type = INTEGER_TYPE;
2134 default:
2135 break;
2136 }
2137
2138 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2139 return NULL_RTX;
2140
2141 arg0 = CALL_EXPR_ARG (exp, 0);
2142 arg1 = CALL_EXPR_ARG (exp, 1);
2143
2144 switch (DECL_FUNCTION_CODE (fndecl))
2145 {
2146 CASE_FLT_FN (BUILT_IN_POW):
2147 builtin_optab = pow_optab; break;
2148 CASE_FLT_FN (BUILT_IN_ATAN2):
2149 builtin_optab = atan2_optab; break;
2150 CASE_FLT_FN (BUILT_IN_SCALB):
2151 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2152 return 0;
2153 builtin_optab = scalb_optab; break;
2154 CASE_FLT_FN (BUILT_IN_SCALBN):
2155 CASE_FLT_FN (BUILT_IN_SCALBLN):
2156 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2157 return 0;
2158 /* Fall through... */
2159 CASE_FLT_FN (BUILT_IN_LDEXP):
2160 builtin_optab = ldexp_optab; break;
2161 CASE_FLT_FN (BUILT_IN_FMOD):
2162 builtin_optab = fmod_optab; break;
2163 CASE_FLT_FN (BUILT_IN_REMAINDER):
2164 CASE_FLT_FN (BUILT_IN_DREM):
2165 builtin_optab = remainder_optab; break;
2166 default:
2167 gcc_unreachable ();
2168 }
2169
2170 /* Make a suitable register to place result in. */
2171 mode = TYPE_MODE (TREE_TYPE (exp));
2172
2173 /* Before working hard, check whether the instruction is available. */
2174 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2175 return NULL_RTX;
2176
2177 result = gen_reg_rtx (mode);
2178
2179 if (! flag_errno_math || ! HONOR_NANS (mode))
2180 errno_set = false;
2181
2182 if (errno_set && optimize_insn_for_size_p ())
2183 return 0;
2184
2185 /* Always stabilize the argument list. */
2186 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2187 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2188
2189 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2190 op1 = expand_normal (arg1);
2191
2192 start_sequence ();
2193
2194 /* Compute into RESULT.
2195 Set RESULT to wherever the result comes back. */
2196 result = expand_binop (mode, builtin_optab, op0, op1,
2197 result, 0, OPTAB_DIRECT);
2198
2199 /* If we were unable to expand via the builtin, stop the sequence
2200 (without outputting the insns) and call to the library function
2201 with the stabilized argument list. */
2202 if (result == 0)
2203 {
2204 end_sequence ();
2205 return expand_call (exp, target, target == const0_rtx);
2206 }
2207
2208 if (errno_set)
2209 expand_errno_check (exp, result);
2210
2211 /* Output the entire sequence. */
2212 insns = get_insns ();
2213 end_sequence ();
2214 emit_insn (insns);
2215
2216 return result;
2217 }
2218
2219 /* Expand a call to the builtin trinary math functions (fma).
2220 Return NULL_RTX if a normal call should be emitted rather than expanding the
2221 function in-line. EXP is the expression that is a call to the builtin
2222 function; if convenient, the result should be placed in TARGET.
2223 SUBTARGET may be used as the target for computing one of EXP's
2224 operands. */
2225
2226 static rtx
2227 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2228 {
2229 optab builtin_optab;
2230 rtx op0, op1, op2, insns, result;
2231 tree fndecl = get_callee_fndecl (exp);
2232 tree arg0, arg1, arg2;
2233 enum machine_mode mode;
2234
2235 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2236 return NULL_RTX;
2237
2238 arg0 = CALL_EXPR_ARG (exp, 0);
2239 arg1 = CALL_EXPR_ARG (exp, 1);
2240 arg2 = CALL_EXPR_ARG (exp, 2);
2241
2242 switch (DECL_FUNCTION_CODE (fndecl))
2243 {
2244 CASE_FLT_FN (BUILT_IN_FMA):
2245 builtin_optab = fma_optab; break;
2246 default:
2247 gcc_unreachable ();
2248 }
2249
2250 /* Make a suitable register to place result in. */
2251 mode = TYPE_MODE (TREE_TYPE (exp));
2252
2253 /* Before working hard, check whether the instruction is available. */
2254 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2255 return NULL_RTX;
2256
2257 result = gen_reg_rtx (mode);
2258
2259 /* Always stabilize the argument list. */
2260 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2261 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2262 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2263
2264 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2265 op1 = expand_normal (arg1);
2266 op2 = expand_normal (arg2);
2267
2268 start_sequence ();
2269
2270 /* Compute into RESULT.
2271 Set RESULT to wherever the result comes back. */
2272 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2273 result, 0);
2274
2275 /* If we were unable to expand via the builtin, stop the sequence
2276 (without outputting the insns) and call to the library function
2277 with the stabilized argument list. */
2278 if (result == 0)
2279 {
2280 end_sequence ();
2281 return expand_call (exp, target, target == const0_rtx);
2282 }
2283
2284 /* Output the entire sequence. */
2285 insns = get_insns ();
2286 end_sequence ();
2287 emit_insn (insns);
2288
2289 return result;
2290 }
2291
2292 /* Expand a call to the builtin sin and cos math functions.
2293 Return NULL_RTX if a normal call should be emitted rather than expanding the
2294 function in-line. EXP is the expression that is a call to the builtin
2295 function; if convenient, the result should be placed in TARGET.
2296 SUBTARGET may be used as the target for computing one of EXP's
2297 operands. */
2298
2299 static rtx
2300 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2301 {
2302 optab builtin_optab;
2303 rtx op0, insns;
2304 tree fndecl = get_callee_fndecl (exp);
2305 enum machine_mode mode;
2306 tree arg;
2307
2308 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2309 return NULL_RTX;
2310
2311 arg = CALL_EXPR_ARG (exp, 0);
2312
2313 switch (DECL_FUNCTION_CODE (fndecl))
2314 {
2315 CASE_FLT_FN (BUILT_IN_SIN):
2316 CASE_FLT_FN (BUILT_IN_COS):
2317 builtin_optab = sincos_optab; break;
2318 default:
2319 gcc_unreachable ();
2320 }
2321
2322 /* Make a suitable register to place result in. */
2323 mode = TYPE_MODE (TREE_TYPE (exp));
2324
2325 /* Check if sincos insn is available, otherwise fallback
2326 to sin or cos insn. */
2327 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2328 switch (DECL_FUNCTION_CODE (fndecl))
2329 {
2330 CASE_FLT_FN (BUILT_IN_SIN):
2331 builtin_optab = sin_optab; break;
2332 CASE_FLT_FN (BUILT_IN_COS):
2333 builtin_optab = cos_optab; break;
2334 default:
2335 gcc_unreachable ();
2336 }
2337
2338 /* Before working hard, check whether the instruction is available. */
2339 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2340 {
2341 rtx result = gen_reg_rtx (mode);
2342
2343 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2344 need to expand the argument again. This way, we will not perform
2345 side-effects more the once. */
2346 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2347
2348 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2349
2350 start_sequence ();
2351
2352 /* Compute into RESULT.
2353 Set RESULT to wherever the result comes back. */
2354 if (builtin_optab == sincos_optab)
2355 {
2356 int ok;
2357
2358 switch (DECL_FUNCTION_CODE (fndecl))
2359 {
2360 CASE_FLT_FN (BUILT_IN_SIN):
2361 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2362 break;
2363 CASE_FLT_FN (BUILT_IN_COS):
2364 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2365 break;
2366 default:
2367 gcc_unreachable ();
2368 }
2369 gcc_assert (ok);
2370 }
2371 else
2372 result = expand_unop (mode, builtin_optab, op0, result, 0);
2373
2374 if (result != 0)
2375 {
2376 /* Output the entire sequence. */
2377 insns = get_insns ();
2378 end_sequence ();
2379 emit_insn (insns);
2380 return result;
2381 }
2382
2383 /* If we were unable to expand via the builtin, stop the sequence
2384 (without outputting the insns) and call to the library function
2385 with the stabilized argument list. */
2386 end_sequence ();
2387 }
2388
2389 return expand_call (exp, target, target == const0_rtx);
2390 }
2391
2392 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2393 return an RTL instruction code that implements the functionality.
2394 If that isn't possible or available return CODE_FOR_nothing. */
2395
2396 static enum insn_code
2397 interclass_mathfn_icode (tree arg, tree fndecl)
2398 {
2399 bool errno_set = false;
2400 optab builtin_optab = unknown_optab;
2401 enum machine_mode mode;
2402
2403 switch (DECL_FUNCTION_CODE (fndecl))
2404 {
2405 CASE_FLT_FN (BUILT_IN_ILOGB):
2406 errno_set = true; builtin_optab = ilogb_optab; break;
2407 CASE_FLT_FN (BUILT_IN_ISINF):
2408 builtin_optab = isinf_optab; break;
2409 case BUILT_IN_ISNORMAL:
2410 case BUILT_IN_ISFINITE:
2411 CASE_FLT_FN (BUILT_IN_FINITE):
2412 case BUILT_IN_FINITED32:
2413 case BUILT_IN_FINITED64:
2414 case BUILT_IN_FINITED128:
2415 case BUILT_IN_ISINFD32:
2416 case BUILT_IN_ISINFD64:
2417 case BUILT_IN_ISINFD128:
2418 /* These builtins have no optabs (yet). */
2419 break;
2420 default:
2421 gcc_unreachable ();
2422 }
2423
2424 /* There's no easy way to detect the case we need to set EDOM. */
2425 if (flag_errno_math && errno_set)
2426 return CODE_FOR_nothing;
2427
2428 /* Optab mode depends on the mode of the input argument. */
2429 mode = TYPE_MODE (TREE_TYPE (arg));
2430
2431 if (builtin_optab)
2432 return optab_handler (builtin_optab, mode);
2433 return CODE_FOR_nothing;
2434 }
2435
2436 /* Expand a call to one of the builtin math functions that operate on
2437 floating point argument and output an integer result (ilogb, isinf,
2438 isnan, etc).
2439 Return 0 if a normal call should be emitted rather than expanding the
2440 function in-line. EXP is the expression that is a call to the builtin
2441 function; if convenient, the result should be placed in TARGET. */
2442
2443 static rtx
2444 expand_builtin_interclass_mathfn (tree exp, rtx target)
2445 {
2446 enum insn_code icode = CODE_FOR_nothing;
2447 rtx op0;
2448 tree fndecl = get_callee_fndecl (exp);
2449 enum machine_mode mode;
2450 tree arg;
2451
2452 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2453 return NULL_RTX;
2454
2455 arg = CALL_EXPR_ARG (exp, 0);
2456 icode = interclass_mathfn_icode (arg, fndecl);
2457 mode = TYPE_MODE (TREE_TYPE (arg));
2458
2459 if (icode != CODE_FOR_nothing)
2460 {
2461 struct expand_operand ops[1];
2462 rtx last = get_last_insn ();
2463 tree orig_arg = arg;
2464
2465 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2466 need to expand the argument again. This way, we will not perform
2467 side-effects more the once. */
2468 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2469
2470 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2471
2472 if (mode != GET_MODE (op0))
2473 op0 = convert_to_mode (mode, op0, 0);
2474
2475 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2476 if (maybe_legitimize_operands (icode, 0, 1, ops)
2477 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2478 return ops[0].value;
2479
2480 delete_insns_since (last);
2481 CALL_EXPR_ARG (exp, 0) = orig_arg;
2482 }
2483
2484 return NULL_RTX;
2485 }
2486
2487 /* Expand a call to the builtin sincos math function.
2488 Return NULL_RTX if a normal call should be emitted rather than expanding the
2489 function in-line. EXP is the expression that is a call to the builtin
2490 function. */
2491
2492 static rtx
2493 expand_builtin_sincos (tree exp)
2494 {
2495 rtx op0, op1, op2, target1, target2;
2496 enum machine_mode mode;
2497 tree arg, sinp, cosp;
2498 int result;
2499 location_t loc = EXPR_LOCATION (exp);
2500 tree alias_type, alias_off;
2501
2502 if (!validate_arglist (exp, REAL_TYPE,
2503 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2504 return NULL_RTX;
2505
2506 arg = CALL_EXPR_ARG (exp, 0);
2507 sinp = CALL_EXPR_ARG (exp, 1);
2508 cosp = CALL_EXPR_ARG (exp, 2);
2509
2510 /* Make a suitable register to place result in. */
2511 mode = TYPE_MODE (TREE_TYPE (arg));
2512
2513 /* Check if sincos insn is available, otherwise emit the call. */
2514 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2515 return NULL_RTX;
2516
2517 target1 = gen_reg_rtx (mode);
2518 target2 = gen_reg_rtx (mode);
2519
2520 op0 = expand_normal (arg);
2521 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2522 alias_off = build_int_cst (alias_type, 0);
2523 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2524 sinp, alias_off));
2525 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2526 cosp, alias_off));
2527
2528 /* Compute into target1 and target2.
2529 Set TARGET to wherever the result comes back. */
2530 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2531 gcc_assert (result);
2532
2533 /* Move target1 and target2 to the memory locations indicated
2534 by op1 and op2. */
2535 emit_move_insn (op1, target1);
2536 emit_move_insn (op2, target2);
2537
2538 return const0_rtx;
2539 }
2540
2541 /* Expand a call to the internal cexpi builtin to the sincos math function.
2542 EXP is the expression that is a call to the builtin function; if convenient,
2543 the result should be placed in TARGET. */
2544
2545 static rtx
2546 expand_builtin_cexpi (tree exp, rtx target)
2547 {
2548 tree fndecl = get_callee_fndecl (exp);
2549 tree arg, type;
2550 enum machine_mode mode;
2551 rtx op0, op1, op2;
2552 location_t loc = EXPR_LOCATION (exp);
2553
2554 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2555 return NULL_RTX;
2556
2557 arg = CALL_EXPR_ARG (exp, 0);
2558 type = TREE_TYPE (arg);
2559 mode = TYPE_MODE (TREE_TYPE (arg));
2560
2561 /* Try expanding via a sincos optab, fall back to emitting a libcall
2562 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2563 is only generated from sincos, cexp or if we have either of them. */
2564 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2565 {
2566 op1 = gen_reg_rtx (mode);
2567 op2 = gen_reg_rtx (mode);
2568
2569 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2570
2571 /* Compute into op1 and op2. */
2572 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2573 }
2574 else if (targetm.libc_has_function (function_sincos))
2575 {
2576 tree call, fn = NULL_TREE;
2577 tree top1, top2;
2578 rtx op1a, op2a;
2579
2580 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2581 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2582 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2583 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2584 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2585 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2586 else
2587 gcc_unreachable ();
2588
2589 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2590 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2591 op1a = copy_addr_to_reg (XEXP (op1, 0));
2592 op2a = copy_addr_to_reg (XEXP (op2, 0));
2593 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2594 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2595
2596 /* Make sure not to fold the sincos call again. */
2597 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2598 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2599 call, 3, arg, top1, top2));
2600 }
2601 else
2602 {
2603 tree call, fn = NULL_TREE, narg;
2604 tree ctype = build_complex_type (type);
2605
2606 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2607 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2608 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2609 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2610 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2611 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2612 else
2613 gcc_unreachable ();
2614
2615 /* If we don't have a decl for cexp create one. This is the
2616 friendliest fallback if the user calls __builtin_cexpi
2617 without full target C99 function support. */
2618 if (fn == NULL_TREE)
2619 {
2620 tree fntype;
2621 const char *name = NULL;
2622
2623 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2624 name = "cexpf";
2625 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2626 name = "cexp";
2627 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2628 name = "cexpl";
2629
2630 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2631 fn = build_fn_decl (name, fntype);
2632 }
2633
2634 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2635 build_real (type, dconst0), arg);
2636
2637 /* Make sure not to fold the cexp call again. */
2638 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2639 return expand_expr (build_call_nary (ctype, call, 1, narg),
2640 target, VOIDmode, EXPAND_NORMAL);
2641 }
2642
2643 /* Now build the proper return type. */
2644 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2645 make_tree (TREE_TYPE (arg), op2),
2646 make_tree (TREE_TYPE (arg), op1)),
2647 target, VOIDmode, EXPAND_NORMAL);
2648 }
2649
2650 /* Conveniently construct a function call expression. FNDECL names the
2651 function to be called, N is the number of arguments, and the "..."
2652 parameters are the argument expressions. Unlike build_call_exr
2653 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2654
2655 static tree
2656 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2657 {
2658 va_list ap;
2659 tree fntype = TREE_TYPE (fndecl);
2660 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2661
2662 va_start (ap, n);
2663 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2664 va_end (ap);
2665 SET_EXPR_LOCATION (fn, loc);
2666 return fn;
2667 }
2668
2669 /* Expand a call to one of the builtin rounding functions gcc defines
2670 as an extension (lfloor and lceil). As these are gcc extensions we
2671 do not need to worry about setting errno to EDOM.
2672 If expanding via optab fails, lower expression to (int)(floor(x)).
2673 EXP is the expression that is a call to the builtin function;
2674 if convenient, the result should be placed in TARGET. */
2675
2676 static rtx
2677 expand_builtin_int_roundingfn (tree exp, rtx target)
2678 {
2679 convert_optab builtin_optab;
2680 rtx op0, insns, tmp;
2681 tree fndecl = get_callee_fndecl (exp);
2682 enum built_in_function fallback_fn;
2683 tree fallback_fndecl;
2684 enum machine_mode mode;
2685 tree arg;
2686
2687 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2688 gcc_unreachable ();
2689
2690 arg = CALL_EXPR_ARG (exp, 0);
2691
2692 switch (DECL_FUNCTION_CODE (fndecl))
2693 {
2694 CASE_FLT_FN (BUILT_IN_ICEIL):
2695 CASE_FLT_FN (BUILT_IN_LCEIL):
2696 CASE_FLT_FN (BUILT_IN_LLCEIL):
2697 builtin_optab = lceil_optab;
2698 fallback_fn = BUILT_IN_CEIL;
2699 break;
2700
2701 CASE_FLT_FN (BUILT_IN_IFLOOR):
2702 CASE_FLT_FN (BUILT_IN_LFLOOR):
2703 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2704 builtin_optab = lfloor_optab;
2705 fallback_fn = BUILT_IN_FLOOR;
2706 break;
2707
2708 default:
2709 gcc_unreachable ();
2710 }
2711
2712 /* Make a suitable register to place result in. */
2713 mode = TYPE_MODE (TREE_TYPE (exp));
2714
2715 target = gen_reg_rtx (mode);
2716
2717 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2718 need to expand the argument again. This way, we will not perform
2719 side-effects more the once. */
2720 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2721
2722 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2723
2724 start_sequence ();
2725
2726 /* Compute into TARGET. */
2727 if (expand_sfix_optab (target, op0, builtin_optab))
2728 {
2729 /* Output the entire sequence. */
2730 insns = get_insns ();
2731 end_sequence ();
2732 emit_insn (insns);
2733 return target;
2734 }
2735
2736 /* If we were unable to expand via the builtin, stop the sequence
2737 (without outputting the insns). */
2738 end_sequence ();
2739
2740 /* Fall back to floating point rounding optab. */
2741 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2742
2743 /* For non-C99 targets we may end up without a fallback fndecl here
2744 if the user called __builtin_lfloor directly. In this case emit
2745 a call to the floor/ceil variants nevertheless. This should result
2746 in the best user experience for not full C99 targets. */
2747 if (fallback_fndecl == NULL_TREE)
2748 {
2749 tree fntype;
2750 const char *name = NULL;
2751
2752 switch (DECL_FUNCTION_CODE (fndecl))
2753 {
2754 case BUILT_IN_ICEIL:
2755 case BUILT_IN_LCEIL:
2756 case BUILT_IN_LLCEIL:
2757 name = "ceil";
2758 break;
2759 case BUILT_IN_ICEILF:
2760 case BUILT_IN_LCEILF:
2761 case BUILT_IN_LLCEILF:
2762 name = "ceilf";
2763 break;
2764 case BUILT_IN_ICEILL:
2765 case BUILT_IN_LCEILL:
2766 case BUILT_IN_LLCEILL:
2767 name = "ceill";
2768 break;
2769 case BUILT_IN_IFLOOR:
2770 case BUILT_IN_LFLOOR:
2771 case BUILT_IN_LLFLOOR:
2772 name = "floor";
2773 break;
2774 case BUILT_IN_IFLOORF:
2775 case BUILT_IN_LFLOORF:
2776 case BUILT_IN_LLFLOORF:
2777 name = "floorf";
2778 break;
2779 case BUILT_IN_IFLOORL:
2780 case BUILT_IN_LFLOORL:
2781 case BUILT_IN_LLFLOORL:
2782 name = "floorl";
2783 break;
2784 default:
2785 gcc_unreachable ();
2786 }
2787
2788 fntype = build_function_type_list (TREE_TYPE (arg),
2789 TREE_TYPE (arg), NULL_TREE);
2790 fallback_fndecl = build_fn_decl (name, fntype);
2791 }
2792
2793 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2794
2795 tmp = expand_normal (exp);
2796 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2797
2798 /* Truncate the result of floating point optab to integer
2799 via expand_fix (). */
2800 target = gen_reg_rtx (mode);
2801 expand_fix (target, tmp, 0);
2802
2803 return target;
2804 }
2805
2806 /* Expand a call to one of the builtin math functions doing integer
2807 conversion (lrint).
2808 Return 0 if a normal call should be emitted rather than expanding the
2809 function in-line. EXP is the expression that is a call to the builtin
2810 function; if convenient, the result should be placed in TARGET. */
2811
2812 static rtx
2813 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2814 {
2815 convert_optab builtin_optab;
2816 rtx op0, insns;
2817 tree fndecl = get_callee_fndecl (exp);
2818 tree arg;
2819 enum machine_mode mode;
2820 enum built_in_function fallback_fn = BUILT_IN_NONE;
2821
2822 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2823 gcc_unreachable ();
2824
2825 arg = CALL_EXPR_ARG (exp, 0);
2826
2827 switch (DECL_FUNCTION_CODE (fndecl))
2828 {
2829 CASE_FLT_FN (BUILT_IN_IRINT):
2830 fallback_fn = BUILT_IN_LRINT;
2831 /* FALLTHRU */
2832 CASE_FLT_FN (BUILT_IN_LRINT):
2833 CASE_FLT_FN (BUILT_IN_LLRINT):
2834 builtin_optab = lrint_optab;
2835 break;
2836
2837 CASE_FLT_FN (BUILT_IN_IROUND):
2838 fallback_fn = BUILT_IN_LROUND;
2839 /* FALLTHRU */
2840 CASE_FLT_FN (BUILT_IN_LROUND):
2841 CASE_FLT_FN (BUILT_IN_LLROUND):
2842 builtin_optab = lround_optab;
2843 break;
2844
2845 default:
2846 gcc_unreachable ();
2847 }
2848
2849 /* There's no easy way to detect the case we need to set EDOM. */
2850 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2851 return NULL_RTX;
2852
2853 /* Make a suitable register to place result in. */
2854 mode = TYPE_MODE (TREE_TYPE (exp));
2855
2856 /* There's no easy way to detect the case we need to set EDOM. */
2857 if (!flag_errno_math)
2858 {
2859 rtx result = gen_reg_rtx (mode);
2860
2861 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2862 need to expand the argument again. This way, we will not perform
2863 side-effects more the once. */
2864 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2865
2866 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2867
2868 start_sequence ();
2869
2870 if (expand_sfix_optab (result, op0, builtin_optab))
2871 {
2872 /* Output the entire sequence. */
2873 insns = get_insns ();
2874 end_sequence ();
2875 emit_insn (insns);
2876 return result;
2877 }
2878
2879 /* If we were unable to expand via the builtin, stop the sequence
2880 (without outputting the insns) and call to the library function
2881 with the stabilized argument list. */
2882 end_sequence ();
2883 }
2884
2885 if (fallback_fn != BUILT_IN_NONE)
2886 {
2887 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2888 targets, (int) round (x) should never be transformed into
2889 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2890 a call to lround in the hope that the target provides at least some
2891 C99 functions. This should result in the best user experience for
2892 not full C99 targets. */
2893 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2894 fallback_fn, 0);
2895
2896 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2897 fallback_fndecl, 1, arg);
2898
2899 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2900 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2901 return convert_to_mode (mode, target, 0);
2902 }
2903
2904 return expand_call (exp, target, target == const0_rtx);
2905 }
2906
2907 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2908 a normal call should be emitted rather than expanding the function
2909 in-line. EXP is the expression that is a call to the builtin
2910 function; if convenient, the result should be placed in TARGET. */
2911
2912 static rtx
2913 expand_builtin_powi (tree exp, rtx target)
2914 {
2915 tree arg0, arg1;
2916 rtx op0, op1;
2917 enum machine_mode mode;
2918 enum machine_mode mode2;
2919
2920 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2921 return NULL_RTX;
2922
2923 arg0 = CALL_EXPR_ARG (exp, 0);
2924 arg1 = CALL_EXPR_ARG (exp, 1);
2925 mode = TYPE_MODE (TREE_TYPE (exp));
2926
2927 /* Emit a libcall to libgcc. */
2928
2929 /* Mode of the 2nd argument must match that of an int. */
2930 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2931
2932 if (target == NULL_RTX)
2933 target = gen_reg_rtx (mode);
2934
2935 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2936 if (GET_MODE (op0) != mode)
2937 op0 = convert_to_mode (mode, op0, 0);
2938 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2939 if (GET_MODE (op1) != mode2)
2940 op1 = convert_to_mode (mode2, op1, 0);
2941
2942 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2943 target, LCT_CONST, mode, 2,
2944 op0, mode, op1, mode2);
2945
2946 return target;
2947 }
2948
2949 /* Expand expression EXP which is a call to the strlen builtin. Return
2950 NULL_RTX if we failed the caller should emit a normal call, otherwise
2951 try to get the result in TARGET, if convenient. */
2952
2953 static rtx
2954 expand_builtin_strlen (tree exp, rtx target,
2955 enum machine_mode target_mode)
2956 {
2957 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2958 return NULL_RTX;
2959 else
2960 {
2961 struct expand_operand ops[4];
2962 rtx pat;
2963 tree len;
2964 tree src = CALL_EXPR_ARG (exp, 0);
2965 rtx src_reg, before_strlen;
2966 enum machine_mode insn_mode = target_mode;
2967 enum insn_code icode = CODE_FOR_nothing;
2968 unsigned int align;
2969
2970 /* If the length can be computed at compile-time, return it. */
2971 len = c_strlen (src, 0);
2972 if (len)
2973 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2974
2975 /* If the length can be computed at compile-time and is constant
2976 integer, but there are side-effects in src, evaluate
2977 src for side-effects, then return len.
2978 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2979 can be optimized into: i++; x = 3; */
2980 len = c_strlen (src, 1);
2981 if (len && TREE_CODE (len) == INTEGER_CST)
2982 {
2983 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2984 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2985 }
2986
2987 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2988
2989 /* If SRC is not a pointer type, don't do this operation inline. */
2990 if (align == 0)
2991 return NULL_RTX;
2992
2993 /* Bail out if we can't compute strlen in the right mode. */
2994 while (insn_mode != VOIDmode)
2995 {
2996 icode = optab_handler (strlen_optab, insn_mode);
2997 if (icode != CODE_FOR_nothing)
2998 break;
2999
3000 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3001 }
3002 if (insn_mode == VOIDmode)
3003 return NULL_RTX;
3004
3005 /* Make a place to hold the source address. We will not expand
3006 the actual source until we are sure that the expansion will
3007 not fail -- there are trees that cannot be expanded twice. */
3008 src_reg = gen_reg_rtx (Pmode);
3009
3010 /* Mark the beginning of the strlen sequence so we can emit the
3011 source operand later. */
3012 before_strlen = get_last_insn ();
3013
3014 create_output_operand (&ops[0], target, insn_mode);
3015 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3016 create_integer_operand (&ops[2], 0);
3017 create_integer_operand (&ops[3], align);
3018 if (!maybe_expand_insn (icode, 4, ops))
3019 return NULL_RTX;
3020
3021 /* Now that we are assured of success, expand the source. */
3022 start_sequence ();
3023 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3024 if (pat != src_reg)
3025 {
3026 #ifdef POINTERS_EXTEND_UNSIGNED
3027 if (GET_MODE (pat) != Pmode)
3028 pat = convert_to_mode (Pmode, pat,
3029 POINTERS_EXTEND_UNSIGNED);
3030 #endif
3031 emit_move_insn (src_reg, pat);
3032 }
3033 pat = get_insns ();
3034 end_sequence ();
3035
3036 if (before_strlen)
3037 emit_insn_after (pat, before_strlen);
3038 else
3039 emit_insn_before (pat, get_insns ());
3040
3041 /* Return the value in the proper mode for this function. */
3042 if (GET_MODE (ops[0].value) == target_mode)
3043 target = ops[0].value;
3044 else if (target != 0)
3045 convert_move (target, ops[0].value, 0);
3046 else
3047 target = convert_to_mode (target_mode, ops[0].value, 0);
3048
3049 return target;
3050 }
3051 }
3052
3053 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3054 bytes from constant string DATA + OFFSET and return it as target
3055 constant. */
3056
3057 static rtx
3058 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3059 enum machine_mode mode)
3060 {
3061 const char *str = (const char *) data;
3062
3063 gcc_assert (offset >= 0
3064 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3065 <= strlen (str) + 1));
3066
3067 return c_readstr (str + offset, mode);
3068 }
3069
3070 /* Expand a call EXP to the memcpy builtin.
3071 Return NULL_RTX if we failed, the caller should emit a normal call,
3072 otherwise try to get the result in TARGET, if convenient (and in
3073 mode MODE if that's convenient). */
3074
3075 static rtx
3076 expand_builtin_memcpy (tree exp, rtx target)
3077 {
3078 if (!validate_arglist (exp,
3079 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3080 return NULL_RTX;
3081 else
3082 {
3083 tree dest = CALL_EXPR_ARG (exp, 0);
3084 tree src = CALL_EXPR_ARG (exp, 1);
3085 tree len = CALL_EXPR_ARG (exp, 2);
3086 const char *src_str;
3087 unsigned int src_align = get_pointer_alignment (src);
3088 unsigned int dest_align = get_pointer_alignment (dest);
3089 rtx dest_mem, src_mem, dest_addr, len_rtx;
3090 HOST_WIDE_INT expected_size = -1;
3091 unsigned int expected_align = 0;
3092
3093 /* If DEST is not a pointer type, call the normal function. */
3094 if (dest_align == 0)
3095 return NULL_RTX;
3096
3097 /* If either SRC is not a pointer type, don't do this
3098 operation in-line. */
3099 if (src_align == 0)
3100 return NULL_RTX;
3101
3102 if (currently_expanding_gimple_stmt)
3103 stringop_block_profile (currently_expanding_gimple_stmt,
3104 &expected_align, &expected_size);
3105
3106 if (expected_align < dest_align)
3107 expected_align = dest_align;
3108 dest_mem = get_memory_rtx (dest, len);
3109 set_mem_align (dest_mem, dest_align);
3110 len_rtx = expand_normal (len);
3111 src_str = c_getstr (src);
3112
3113 /* If SRC is a string constant and block move would be done
3114 by pieces, we can avoid loading the string from memory
3115 and only stored the computed constants. */
3116 if (src_str
3117 && CONST_INT_P (len_rtx)
3118 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3119 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3120 CONST_CAST (char *, src_str),
3121 dest_align, false))
3122 {
3123 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3124 builtin_memcpy_read_str,
3125 CONST_CAST (char *, src_str),
3126 dest_align, false, 0);
3127 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3128 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3129 return dest_mem;
3130 }
3131
3132 src_mem = get_memory_rtx (src, len);
3133 set_mem_align (src_mem, src_align);
3134
3135 /* Copy word part most expediently. */
3136 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3137 CALL_EXPR_TAILCALL (exp)
3138 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3139 expected_align, expected_size);
3140
3141 if (dest_addr == 0)
3142 {
3143 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3144 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3145 }
3146 return dest_addr;
3147 }
3148 }
3149
3150 /* Expand a call EXP to the mempcpy builtin.
3151 Return NULL_RTX if we failed; the caller should emit a normal call,
3152 otherwise try to get the result in TARGET, if convenient (and in
3153 mode MODE if that's convenient). If ENDP is 0 return the
3154 destination pointer, if ENDP is 1 return the end pointer ala
3155 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3156 stpcpy. */
3157
3158 static rtx
3159 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3160 {
3161 if (!validate_arglist (exp,
3162 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3163 return NULL_RTX;
3164 else
3165 {
3166 tree dest = CALL_EXPR_ARG (exp, 0);
3167 tree src = CALL_EXPR_ARG (exp, 1);
3168 tree len = CALL_EXPR_ARG (exp, 2);
3169 return expand_builtin_mempcpy_args (dest, src, len,
3170 target, mode, /*endp=*/ 1);
3171 }
3172 }
3173
3174 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3175 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3176 so that this can also be called without constructing an actual CALL_EXPR.
3177 The other arguments and return value are the same as for
3178 expand_builtin_mempcpy. */
3179
3180 static rtx
3181 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3182 rtx target, enum machine_mode mode, int endp)
3183 {
3184 /* If return value is ignored, transform mempcpy into memcpy. */
3185 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3186 {
3187 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3188 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3189 dest, src, len);
3190 return expand_expr (result, target, mode, EXPAND_NORMAL);
3191 }
3192 else
3193 {
3194 const char *src_str;
3195 unsigned int src_align = get_pointer_alignment (src);
3196 unsigned int dest_align = get_pointer_alignment (dest);
3197 rtx dest_mem, src_mem, len_rtx;
3198
3199 /* If either SRC or DEST is not a pointer type, don't do this
3200 operation in-line. */
3201 if (dest_align == 0 || src_align == 0)
3202 return NULL_RTX;
3203
3204 /* If LEN is not constant, call the normal function. */
3205 if (! host_integerp (len, 1))
3206 return NULL_RTX;
3207
3208 len_rtx = expand_normal (len);
3209 src_str = c_getstr (src);
3210
3211 /* If SRC is a string constant and block move would be done
3212 by pieces, we can avoid loading the string from memory
3213 and only stored the computed constants. */
3214 if (src_str
3215 && CONST_INT_P (len_rtx)
3216 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3217 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3218 CONST_CAST (char *, src_str),
3219 dest_align, false))
3220 {
3221 dest_mem = get_memory_rtx (dest, len);
3222 set_mem_align (dest_mem, dest_align);
3223 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3224 builtin_memcpy_read_str,
3225 CONST_CAST (char *, src_str),
3226 dest_align, false, endp);
3227 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3228 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3229 return dest_mem;
3230 }
3231
3232 if (CONST_INT_P (len_rtx)
3233 && can_move_by_pieces (INTVAL (len_rtx),
3234 MIN (dest_align, src_align)))
3235 {
3236 dest_mem = get_memory_rtx (dest, len);
3237 set_mem_align (dest_mem, dest_align);
3238 src_mem = get_memory_rtx (src, len);
3239 set_mem_align (src_mem, src_align);
3240 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3241 MIN (dest_align, src_align), endp);
3242 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3243 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3244 return dest_mem;
3245 }
3246
3247 return NULL_RTX;
3248 }
3249 }
3250
3251 #ifndef HAVE_movstr
3252 # define HAVE_movstr 0
3253 # define CODE_FOR_movstr CODE_FOR_nothing
3254 #endif
3255
3256 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3257 we failed, the caller should emit a normal call, otherwise try to
3258 get the result in TARGET, if convenient. If ENDP is 0 return the
3259 destination pointer, if ENDP is 1 return the end pointer ala
3260 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3261 stpcpy. */
3262
3263 static rtx
3264 expand_movstr (tree dest, tree src, rtx target, int endp)
3265 {
3266 struct expand_operand ops[3];
3267 rtx dest_mem;
3268 rtx src_mem;
3269
3270 if (!HAVE_movstr)
3271 return NULL_RTX;
3272
3273 dest_mem = get_memory_rtx (dest, NULL);
3274 src_mem = get_memory_rtx (src, NULL);
3275 if (!endp)
3276 {
3277 target = force_reg (Pmode, XEXP (dest_mem, 0));
3278 dest_mem = replace_equiv_address (dest_mem, target);
3279 }
3280
3281 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3282 create_fixed_operand (&ops[1], dest_mem);
3283 create_fixed_operand (&ops[2], src_mem);
3284 expand_insn (CODE_FOR_movstr, 3, ops);
3285
3286 if (endp && target != const0_rtx)
3287 {
3288 target = ops[0].value;
3289 /* movstr is supposed to set end to the address of the NUL
3290 terminator. If the caller requested a mempcpy-like return value,
3291 adjust it. */
3292 if (endp == 1)
3293 {
3294 rtx tem = plus_constant (GET_MODE (target),
3295 gen_lowpart (GET_MODE (target), target), 1);
3296 emit_move_insn (target, force_operand (tem, NULL_RTX));
3297 }
3298 }
3299 return target;
3300 }
3301
3302 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3303 NULL_RTX if we failed the caller should emit a normal call, otherwise
3304 try to get the result in TARGET, if convenient (and in mode MODE if that's
3305 convenient). */
3306
3307 static rtx
3308 expand_builtin_strcpy (tree exp, rtx target)
3309 {
3310 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3311 {
3312 tree dest = CALL_EXPR_ARG (exp, 0);
3313 tree src = CALL_EXPR_ARG (exp, 1);
3314 return expand_builtin_strcpy_args (dest, src, target);
3315 }
3316 return NULL_RTX;
3317 }
3318
3319 /* Helper function to do the actual work for expand_builtin_strcpy. The
3320 arguments to the builtin_strcpy call DEST and SRC are broken out
3321 so that this can also be called without constructing an actual CALL_EXPR.
3322 The other arguments and return value are the same as for
3323 expand_builtin_strcpy. */
3324
3325 static rtx
3326 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3327 {
3328 return expand_movstr (dest, src, target, /*endp=*/0);
3329 }
3330
3331 /* Expand a call EXP to the stpcpy builtin.
3332 Return NULL_RTX if we failed the caller should emit a normal call,
3333 otherwise try to get the result in TARGET, if convenient (and in
3334 mode MODE if that's convenient). */
3335
3336 static rtx
3337 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3338 {
3339 tree dst, src;
3340 location_t loc = EXPR_LOCATION (exp);
3341
3342 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3343 return NULL_RTX;
3344
3345 dst = CALL_EXPR_ARG (exp, 0);
3346 src = CALL_EXPR_ARG (exp, 1);
3347
3348 /* If return value is ignored, transform stpcpy into strcpy. */
3349 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3350 {
3351 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3352 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3353 return expand_expr (result, target, mode, EXPAND_NORMAL);
3354 }
3355 else
3356 {
3357 tree len, lenp1;
3358 rtx ret;
3359
3360 /* Ensure we get an actual string whose length can be evaluated at
3361 compile-time, not an expression containing a string. This is
3362 because the latter will potentially produce pessimized code
3363 when used to produce the return value. */
3364 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3365 return expand_movstr (dst, src, target, /*endp=*/2);
3366
3367 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3368 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3369 target, mode, /*endp=*/2);
3370
3371 if (ret)
3372 return ret;
3373
3374 if (TREE_CODE (len) == INTEGER_CST)
3375 {
3376 rtx len_rtx = expand_normal (len);
3377
3378 if (CONST_INT_P (len_rtx))
3379 {
3380 ret = expand_builtin_strcpy_args (dst, src, target);
3381
3382 if (ret)
3383 {
3384 if (! target)
3385 {
3386 if (mode != VOIDmode)
3387 target = gen_reg_rtx (mode);
3388 else
3389 target = gen_reg_rtx (GET_MODE (ret));
3390 }
3391 if (GET_MODE (target) != GET_MODE (ret))
3392 ret = gen_lowpart (GET_MODE (target), ret);
3393
3394 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3395 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3396 gcc_assert (ret);
3397
3398 return target;
3399 }
3400 }
3401 }
3402
3403 return expand_movstr (dst, src, target, /*endp=*/2);
3404 }
3405 }
3406
3407 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3408 bytes from constant string DATA + OFFSET and return it as target
3409 constant. */
3410
3411 rtx
3412 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3413 enum machine_mode mode)
3414 {
3415 const char *str = (const char *) data;
3416
3417 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3418 return const0_rtx;
3419
3420 return c_readstr (str + offset, mode);
3421 }
3422
3423 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3424 NULL_RTX if we failed the caller should emit a normal call. */
3425
3426 static rtx
3427 expand_builtin_strncpy (tree exp, rtx target)
3428 {
3429 location_t loc = EXPR_LOCATION (exp);
3430
3431 if (validate_arglist (exp,
3432 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3433 {
3434 tree dest = CALL_EXPR_ARG (exp, 0);
3435 tree src = CALL_EXPR_ARG (exp, 1);
3436 tree len = CALL_EXPR_ARG (exp, 2);
3437 tree slen = c_strlen (src, 1);
3438
3439 /* We must be passed a constant len and src parameter. */
3440 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3441 return NULL_RTX;
3442
3443 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3444
3445 /* We're required to pad with trailing zeros if the requested
3446 len is greater than strlen(s2)+1. In that case try to
3447 use store_by_pieces, if it fails, punt. */
3448 if (tree_int_cst_lt (slen, len))
3449 {
3450 unsigned int dest_align = get_pointer_alignment (dest);
3451 const char *p = c_getstr (src);
3452 rtx dest_mem;
3453
3454 if (!p || dest_align == 0 || !host_integerp (len, 1)
3455 || !can_store_by_pieces (tree_low_cst (len, 1),
3456 builtin_strncpy_read_str,
3457 CONST_CAST (char *, p),
3458 dest_align, false))
3459 return NULL_RTX;
3460
3461 dest_mem = get_memory_rtx (dest, len);
3462 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3463 builtin_strncpy_read_str,
3464 CONST_CAST (char *, p), dest_align, false, 0);
3465 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3466 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3467 return dest_mem;
3468 }
3469 }
3470 return NULL_RTX;
3471 }
3472
3473 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3474 bytes from constant string DATA + OFFSET and return it as target
3475 constant. */
3476
3477 rtx
3478 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3479 enum machine_mode mode)
3480 {
3481 const char *c = (const char *) data;
3482 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3483
3484 memset (p, *c, GET_MODE_SIZE (mode));
3485
3486 return c_readstr (p, mode);
3487 }
3488
3489 /* Callback routine for store_by_pieces. Return the RTL of a register
3490 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3491 char value given in the RTL register data. For example, if mode is
3492 4 bytes wide, return the RTL for 0x01010101*data. */
3493
3494 static rtx
3495 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3496 enum machine_mode mode)
3497 {
3498 rtx target, coeff;
3499 size_t size;
3500 char *p;
3501
3502 size = GET_MODE_SIZE (mode);
3503 if (size == 1)
3504 return (rtx) data;
3505
3506 p = XALLOCAVEC (char, size);
3507 memset (p, 1, size);
3508 coeff = c_readstr (p, mode);
3509
3510 target = convert_to_mode (mode, (rtx) data, 1);
3511 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3512 return force_reg (mode, target);
3513 }
3514
3515 /* Expand expression EXP, which is a call to the memset builtin. Return
3516 NULL_RTX if we failed the caller should emit a normal call, otherwise
3517 try to get the result in TARGET, if convenient (and in mode MODE if that's
3518 convenient). */
3519
3520 static rtx
3521 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3522 {
3523 if (!validate_arglist (exp,
3524 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3525 return NULL_RTX;
3526 else
3527 {
3528 tree dest = CALL_EXPR_ARG (exp, 0);
3529 tree val = CALL_EXPR_ARG (exp, 1);
3530 tree len = CALL_EXPR_ARG (exp, 2);
3531 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3532 }
3533 }
3534
3535 /* Helper function to do the actual work for expand_builtin_memset. The
3536 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3537 so that this can also be called without constructing an actual CALL_EXPR.
3538 The other arguments and return value are the same as for
3539 expand_builtin_memset. */
3540
3541 static rtx
3542 expand_builtin_memset_args (tree dest, tree val, tree len,
3543 rtx target, enum machine_mode mode, tree orig_exp)
3544 {
3545 tree fndecl, fn;
3546 enum built_in_function fcode;
3547 enum machine_mode val_mode;
3548 char c;
3549 unsigned int dest_align;
3550 rtx dest_mem, dest_addr, len_rtx;
3551 HOST_WIDE_INT expected_size = -1;
3552 unsigned int expected_align = 0;
3553
3554 dest_align = get_pointer_alignment (dest);
3555
3556 /* If DEST is not a pointer type, don't do this operation in-line. */
3557 if (dest_align == 0)
3558 return NULL_RTX;
3559
3560 if (currently_expanding_gimple_stmt)
3561 stringop_block_profile (currently_expanding_gimple_stmt,
3562 &expected_align, &expected_size);
3563
3564 if (expected_align < dest_align)
3565 expected_align = dest_align;
3566
3567 /* If the LEN parameter is zero, return DEST. */
3568 if (integer_zerop (len))
3569 {
3570 /* Evaluate and ignore VAL in case it has side-effects. */
3571 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3572 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3573 }
3574
3575 /* Stabilize the arguments in case we fail. */
3576 dest = builtin_save_expr (dest);
3577 val = builtin_save_expr (val);
3578 len = builtin_save_expr (len);
3579
3580 len_rtx = expand_normal (len);
3581 dest_mem = get_memory_rtx (dest, len);
3582 val_mode = TYPE_MODE (unsigned_char_type_node);
3583
3584 if (TREE_CODE (val) != INTEGER_CST)
3585 {
3586 rtx val_rtx;
3587
3588 val_rtx = expand_normal (val);
3589 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3590
3591 /* Assume that we can memset by pieces if we can store
3592 * the coefficients by pieces (in the required modes).
3593 * We can't pass builtin_memset_gen_str as that emits RTL. */
3594 c = 1;
3595 if (host_integerp (len, 1)
3596 && can_store_by_pieces (tree_low_cst (len, 1),
3597 builtin_memset_read_str, &c, dest_align,
3598 true))
3599 {
3600 val_rtx = force_reg (val_mode, val_rtx);
3601 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3602 builtin_memset_gen_str, val_rtx, dest_align,
3603 true, 0);
3604 }
3605 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3606 dest_align, expected_align,
3607 expected_size))
3608 goto do_libcall;
3609
3610 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3611 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3612 return dest_mem;
3613 }
3614
3615 if (target_char_cast (val, &c))
3616 goto do_libcall;
3617
3618 if (c)
3619 {
3620 if (host_integerp (len, 1)
3621 && can_store_by_pieces (tree_low_cst (len, 1),
3622 builtin_memset_read_str, &c, dest_align,
3623 true))
3624 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3625 builtin_memset_read_str, &c, dest_align, true, 0);
3626 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3627 gen_int_mode (c, val_mode),
3628 dest_align, expected_align,
3629 expected_size))
3630 goto do_libcall;
3631
3632 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3633 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3634 return dest_mem;
3635 }
3636
3637 set_mem_align (dest_mem, dest_align);
3638 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3639 CALL_EXPR_TAILCALL (orig_exp)
3640 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3641 expected_align, expected_size);
3642
3643 if (dest_addr == 0)
3644 {
3645 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3646 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3647 }
3648
3649 return dest_addr;
3650
3651 do_libcall:
3652 fndecl = get_callee_fndecl (orig_exp);
3653 fcode = DECL_FUNCTION_CODE (fndecl);
3654 if (fcode == BUILT_IN_MEMSET)
3655 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3656 dest, val, len);
3657 else if (fcode == BUILT_IN_BZERO)
3658 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3659 dest, len);
3660 else
3661 gcc_unreachable ();
3662 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3663 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3664 return expand_call (fn, target, target == const0_rtx);
3665 }
3666
3667 /* Expand expression EXP, which is a call to the bzero builtin. Return
3668 NULL_RTX if we failed the caller should emit a normal call. */
3669
3670 static rtx
3671 expand_builtin_bzero (tree exp)
3672 {
3673 tree dest, size;
3674 location_t loc = EXPR_LOCATION (exp);
3675
3676 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3677 return NULL_RTX;
3678
3679 dest = CALL_EXPR_ARG (exp, 0);
3680 size = CALL_EXPR_ARG (exp, 1);
3681
3682 /* New argument list transforming bzero(ptr x, int y) to
3683 memset(ptr x, int 0, size_t y). This is done this way
3684 so that if it isn't expanded inline, we fallback to
3685 calling bzero instead of memset. */
3686
3687 return expand_builtin_memset_args (dest, integer_zero_node,
3688 fold_convert_loc (loc,
3689 size_type_node, size),
3690 const0_rtx, VOIDmode, exp);
3691 }
3692
3693 /* Expand expression EXP, which is a call to the memcmp built-in function.
3694 Return NULL_RTX if we failed and the caller should emit a normal call,
3695 otherwise try to get the result in TARGET, if convenient (and in mode
3696 MODE, if that's convenient). */
3697
3698 static rtx
3699 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3700 ATTRIBUTE_UNUSED enum machine_mode mode)
3701 {
3702 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3703
3704 if (!validate_arglist (exp,
3705 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3706 return NULL_RTX;
3707
3708 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3709 implementing memcmp because it will stop if it encounters two
3710 zero bytes. */
3711 #if defined HAVE_cmpmemsi
3712 {
3713 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3714 rtx result;
3715 rtx insn;
3716 tree arg1 = CALL_EXPR_ARG (exp, 0);
3717 tree arg2 = CALL_EXPR_ARG (exp, 1);
3718 tree len = CALL_EXPR_ARG (exp, 2);
3719
3720 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3721 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3722 enum machine_mode insn_mode;
3723
3724 if (HAVE_cmpmemsi)
3725 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3726 else
3727 return NULL_RTX;
3728
3729 /* If we don't have POINTER_TYPE, call the function. */
3730 if (arg1_align == 0 || arg2_align == 0)
3731 return NULL_RTX;
3732
3733 /* Make a place to write the result of the instruction. */
3734 result = target;
3735 if (! (result != 0
3736 && REG_P (result) && GET_MODE (result) == insn_mode
3737 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3738 result = gen_reg_rtx (insn_mode);
3739
3740 arg1_rtx = get_memory_rtx (arg1, len);
3741 arg2_rtx = get_memory_rtx (arg2, len);
3742 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3743
3744 /* Set MEM_SIZE as appropriate. */
3745 if (CONST_INT_P (arg3_rtx))
3746 {
3747 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3748 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3749 }
3750
3751 if (HAVE_cmpmemsi)
3752 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3753 GEN_INT (MIN (arg1_align, arg2_align)));
3754 else
3755 gcc_unreachable ();
3756
3757 if (insn)
3758 emit_insn (insn);
3759 else
3760 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3761 TYPE_MODE (integer_type_node), 3,
3762 XEXP (arg1_rtx, 0), Pmode,
3763 XEXP (arg2_rtx, 0), Pmode,
3764 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3765 TYPE_UNSIGNED (sizetype)),
3766 TYPE_MODE (sizetype));
3767
3768 /* Return the value in the proper mode for this function. */
3769 mode = TYPE_MODE (TREE_TYPE (exp));
3770 if (GET_MODE (result) == mode)
3771 return result;
3772 else if (target != 0)
3773 {
3774 convert_move (target, result, 0);
3775 return target;
3776 }
3777 else
3778 return convert_to_mode (mode, result, 0);
3779 }
3780 #endif /* HAVE_cmpmemsi. */
3781
3782 return NULL_RTX;
3783 }
3784
3785 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3786 if we failed the caller should emit a normal call, otherwise try to get
3787 the result in TARGET, if convenient. */
3788
3789 static rtx
3790 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3791 {
3792 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3793 return NULL_RTX;
3794
3795 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3796 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3797 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3798 {
3799 rtx arg1_rtx, arg2_rtx;
3800 rtx result, insn = NULL_RTX;
3801 tree fndecl, fn;
3802 tree arg1 = CALL_EXPR_ARG (exp, 0);
3803 tree arg2 = CALL_EXPR_ARG (exp, 1);
3804
3805 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3806 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3807
3808 /* If we don't have POINTER_TYPE, call the function. */
3809 if (arg1_align == 0 || arg2_align == 0)
3810 return NULL_RTX;
3811
3812 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3813 arg1 = builtin_save_expr (arg1);
3814 arg2 = builtin_save_expr (arg2);
3815
3816 arg1_rtx = get_memory_rtx (arg1, NULL);
3817 arg2_rtx = get_memory_rtx (arg2, NULL);
3818
3819 #ifdef HAVE_cmpstrsi
3820 /* Try to call cmpstrsi. */
3821 if (HAVE_cmpstrsi)
3822 {
3823 enum machine_mode insn_mode
3824 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3825
3826 /* Make a place to write the result of the instruction. */
3827 result = target;
3828 if (! (result != 0
3829 && REG_P (result) && GET_MODE (result) == insn_mode
3830 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3831 result = gen_reg_rtx (insn_mode);
3832
3833 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3834 GEN_INT (MIN (arg1_align, arg2_align)));
3835 }
3836 #endif
3837 #ifdef HAVE_cmpstrnsi
3838 /* Try to determine at least one length and call cmpstrnsi. */
3839 if (!insn && HAVE_cmpstrnsi)
3840 {
3841 tree len;
3842 rtx arg3_rtx;
3843
3844 enum machine_mode insn_mode
3845 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3846 tree len1 = c_strlen (arg1, 1);
3847 tree len2 = c_strlen (arg2, 1);
3848
3849 if (len1)
3850 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3851 if (len2)
3852 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3853
3854 /* If we don't have a constant length for the first, use the length
3855 of the second, if we know it. We don't require a constant for
3856 this case; some cost analysis could be done if both are available
3857 but neither is constant. For now, assume they're equally cheap,
3858 unless one has side effects. If both strings have constant lengths,
3859 use the smaller. */
3860
3861 if (!len1)
3862 len = len2;
3863 else if (!len2)
3864 len = len1;
3865 else if (TREE_SIDE_EFFECTS (len1))
3866 len = len2;
3867 else if (TREE_SIDE_EFFECTS (len2))
3868 len = len1;
3869 else if (TREE_CODE (len1) != INTEGER_CST)
3870 len = len2;
3871 else if (TREE_CODE (len2) != INTEGER_CST)
3872 len = len1;
3873 else if (tree_int_cst_lt (len1, len2))
3874 len = len1;
3875 else
3876 len = len2;
3877
3878 /* If both arguments have side effects, we cannot optimize. */
3879 if (!len || TREE_SIDE_EFFECTS (len))
3880 goto do_libcall;
3881
3882 arg3_rtx = expand_normal (len);
3883
3884 /* Make a place to write the result of the instruction. */
3885 result = target;
3886 if (! (result != 0
3887 && REG_P (result) && GET_MODE (result) == insn_mode
3888 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3889 result = gen_reg_rtx (insn_mode);
3890
3891 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3892 GEN_INT (MIN (arg1_align, arg2_align)));
3893 }
3894 #endif
3895
3896 if (insn)
3897 {
3898 enum machine_mode mode;
3899 emit_insn (insn);
3900
3901 /* Return the value in the proper mode for this function. */
3902 mode = TYPE_MODE (TREE_TYPE (exp));
3903 if (GET_MODE (result) == mode)
3904 return result;
3905 if (target == 0)
3906 return convert_to_mode (mode, result, 0);
3907 convert_move (target, result, 0);
3908 return target;
3909 }
3910
3911 /* Expand the library call ourselves using a stabilized argument
3912 list to avoid re-evaluating the function's arguments twice. */
3913 #ifdef HAVE_cmpstrnsi
3914 do_libcall:
3915 #endif
3916 fndecl = get_callee_fndecl (exp);
3917 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3918 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3919 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3920 return expand_call (fn, target, target == const0_rtx);
3921 }
3922 #endif
3923 return NULL_RTX;
3924 }
3925
3926 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3927 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3928 the result in TARGET, if convenient. */
3929
3930 static rtx
3931 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3932 ATTRIBUTE_UNUSED enum machine_mode mode)
3933 {
3934 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3935
3936 if (!validate_arglist (exp,
3937 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3938 return NULL_RTX;
3939
3940 /* If c_strlen can determine an expression for one of the string
3941 lengths, and it doesn't have side effects, then emit cmpstrnsi
3942 using length MIN(strlen(string)+1, arg3). */
3943 #ifdef HAVE_cmpstrnsi
3944 if (HAVE_cmpstrnsi)
3945 {
3946 tree len, len1, len2;
3947 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3948 rtx result, insn;
3949 tree fndecl, fn;
3950 tree arg1 = CALL_EXPR_ARG (exp, 0);
3951 tree arg2 = CALL_EXPR_ARG (exp, 1);
3952 tree arg3 = CALL_EXPR_ARG (exp, 2);
3953
3954 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3955 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3956 enum machine_mode insn_mode
3957 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3958
3959 len1 = c_strlen (arg1, 1);
3960 len2 = c_strlen (arg2, 1);
3961
3962 if (len1)
3963 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3964 if (len2)
3965 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3966
3967 /* If we don't have a constant length for the first, use the length
3968 of the second, if we know it. We don't require a constant for
3969 this case; some cost analysis could be done if both are available
3970 but neither is constant. For now, assume they're equally cheap,
3971 unless one has side effects. If both strings have constant lengths,
3972 use the smaller. */
3973
3974 if (!len1)
3975 len = len2;
3976 else if (!len2)
3977 len = len1;
3978 else if (TREE_SIDE_EFFECTS (len1))
3979 len = len2;
3980 else if (TREE_SIDE_EFFECTS (len2))
3981 len = len1;
3982 else if (TREE_CODE (len1) != INTEGER_CST)
3983 len = len2;
3984 else if (TREE_CODE (len2) != INTEGER_CST)
3985 len = len1;
3986 else if (tree_int_cst_lt (len1, len2))
3987 len = len1;
3988 else
3989 len = len2;
3990
3991 /* If both arguments have side effects, we cannot optimize. */
3992 if (!len || TREE_SIDE_EFFECTS (len))
3993 return NULL_RTX;
3994
3995 /* The actual new length parameter is MIN(len,arg3). */
3996 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
3997 fold_convert_loc (loc, TREE_TYPE (len), arg3));
3998
3999 /* If we don't have POINTER_TYPE, call the function. */
4000 if (arg1_align == 0 || arg2_align == 0)
4001 return NULL_RTX;
4002
4003 /* Make a place to write the result of the instruction. */
4004 result = target;
4005 if (! (result != 0
4006 && REG_P (result) && GET_MODE (result) == insn_mode
4007 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4008 result = gen_reg_rtx (insn_mode);
4009
4010 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4011 arg1 = builtin_save_expr (arg1);
4012 arg2 = builtin_save_expr (arg2);
4013 len = builtin_save_expr (len);
4014
4015 arg1_rtx = get_memory_rtx (arg1, len);
4016 arg2_rtx = get_memory_rtx (arg2, len);
4017 arg3_rtx = expand_normal (len);
4018 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4019 GEN_INT (MIN (arg1_align, arg2_align)));
4020 if (insn)
4021 {
4022 emit_insn (insn);
4023
4024 /* Return the value in the proper mode for this function. */
4025 mode = TYPE_MODE (TREE_TYPE (exp));
4026 if (GET_MODE (result) == mode)
4027 return result;
4028 if (target == 0)
4029 return convert_to_mode (mode, result, 0);
4030 convert_move (target, result, 0);
4031 return target;
4032 }
4033
4034 /* Expand the library call ourselves using a stabilized argument
4035 list to avoid re-evaluating the function's arguments twice. */
4036 fndecl = get_callee_fndecl (exp);
4037 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4038 arg1, arg2, len);
4039 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4040 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4041 return expand_call (fn, target, target == const0_rtx);
4042 }
4043 #endif
4044 return NULL_RTX;
4045 }
4046
4047 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4048 if that's convenient. */
4049
4050 rtx
4051 expand_builtin_saveregs (void)
4052 {
4053 rtx val, seq;
4054
4055 /* Don't do __builtin_saveregs more than once in a function.
4056 Save the result of the first call and reuse it. */
4057 if (saveregs_value != 0)
4058 return saveregs_value;
4059
4060 /* When this function is called, it means that registers must be
4061 saved on entry to this function. So we migrate the call to the
4062 first insn of this function. */
4063
4064 start_sequence ();
4065
4066 /* Do whatever the machine needs done in this case. */
4067 val = targetm.calls.expand_builtin_saveregs ();
4068
4069 seq = get_insns ();
4070 end_sequence ();
4071
4072 saveregs_value = val;
4073
4074 /* Put the insns after the NOTE that starts the function. If this
4075 is inside a start_sequence, make the outer-level insn chain current, so
4076 the code is placed at the start of the function. */
4077 push_topmost_sequence ();
4078 emit_insn_after (seq, entry_of_function ());
4079 pop_topmost_sequence ();
4080
4081 return val;
4082 }
4083
4084 /* Expand a call to __builtin_next_arg. */
4085
4086 static rtx
4087 expand_builtin_next_arg (void)
4088 {
4089 /* Checking arguments is already done in fold_builtin_next_arg
4090 that must be called before this function. */
4091 return expand_binop (ptr_mode, add_optab,
4092 crtl->args.internal_arg_pointer,
4093 crtl->args.arg_offset_rtx,
4094 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4095 }
4096
4097 /* Make it easier for the backends by protecting the valist argument
4098 from multiple evaluations. */
4099
4100 static tree
4101 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4102 {
4103 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4104
4105 /* The current way of determining the type of valist is completely
4106 bogus. We should have the information on the va builtin instead. */
4107 if (!vatype)
4108 vatype = targetm.fn_abi_va_list (cfun->decl);
4109
4110 if (TREE_CODE (vatype) == ARRAY_TYPE)
4111 {
4112 if (TREE_SIDE_EFFECTS (valist))
4113 valist = save_expr (valist);
4114
4115 /* For this case, the backends will be expecting a pointer to
4116 vatype, but it's possible we've actually been given an array
4117 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4118 So fix it. */
4119 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4120 {
4121 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4122 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4123 }
4124 }
4125 else
4126 {
4127 tree pt = build_pointer_type (vatype);
4128
4129 if (! needs_lvalue)
4130 {
4131 if (! TREE_SIDE_EFFECTS (valist))
4132 return valist;
4133
4134 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4135 TREE_SIDE_EFFECTS (valist) = 1;
4136 }
4137
4138 if (TREE_SIDE_EFFECTS (valist))
4139 valist = save_expr (valist);
4140 valist = fold_build2_loc (loc, MEM_REF,
4141 vatype, valist, build_int_cst (pt, 0));
4142 }
4143
4144 return valist;
4145 }
4146
4147 /* The "standard" definition of va_list is void*. */
4148
4149 tree
4150 std_build_builtin_va_list (void)
4151 {
4152 return ptr_type_node;
4153 }
4154
4155 /* The "standard" abi va_list is va_list_type_node. */
4156
4157 tree
4158 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4159 {
4160 return va_list_type_node;
4161 }
4162
4163 /* The "standard" type of va_list is va_list_type_node. */
4164
4165 tree
4166 std_canonical_va_list_type (tree type)
4167 {
4168 tree wtype, htype;
4169
4170 if (INDIRECT_REF_P (type))
4171 type = TREE_TYPE (type);
4172 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4173 type = TREE_TYPE (type);
4174 wtype = va_list_type_node;
4175 htype = type;
4176 /* Treat structure va_list types. */
4177 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4178 htype = TREE_TYPE (htype);
4179 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4180 {
4181 /* If va_list is an array type, the argument may have decayed
4182 to a pointer type, e.g. by being passed to another function.
4183 In that case, unwrap both types so that we can compare the
4184 underlying records. */
4185 if (TREE_CODE (htype) == ARRAY_TYPE
4186 || POINTER_TYPE_P (htype))
4187 {
4188 wtype = TREE_TYPE (wtype);
4189 htype = TREE_TYPE (htype);
4190 }
4191 }
4192 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4193 return va_list_type_node;
4194
4195 return NULL_TREE;
4196 }
4197
4198 /* The "standard" implementation of va_start: just assign `nextarg' to
4199 the variable. */
4200
4201 void
4202 std_expand_builtin_va_start (tree valist, rtx nextarg)
4203 {
4204 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4205 convert_move (va_r, nextarg, 0);
4206 }
4207
4208 /* Expand EXP, a call to __builtin_va_start. */
4209
4210 static rtx
4211 expand_builtin_va_start (tree exp)
4212 {
4213 rtx nextarg;
4214 tree valist;
4215 location_t loc = EXPR_LOCATION (exp);
4216
4217 if (call_expr_nargs (exp) < 2)
4218 {
4219 error_at (loc, "too few arguments to function %<va_start%>");
4220 return const0_rtx;
4221 }
4222
4223 if (fold_builtin_next_arg (exp, true))
4224 return const0_rtx;
4225
4226 nextarg = expand_builtin_next_arg ();
4227 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4228
4229 if (targetm.expand_builtin_va_start)
4230 targetm.expand_builtin_va_start (valist, nextarg);
4231 else
4232 std_expand_builtin_va_start (valist, nextarg);
4233
4234 return const0_rtx;
4235 }
4236
4237
4238 /* Return a dummy expression of type TYPE in order to keep going after an
4239 error. */
4240
4241 static tree
4242 dummy_object (tree type)
4243 {
4244 tree t = build_int_cst (build_pointer_type (type), 0);
4245 return build2 (MEM_REF, type, t, t);
4246 }
4247
4248 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4249 builtin function, but a very special sort of operator. */
4250
4251 enum gimplify_status
4252 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4253 {
4254 tree promoted_type, have_va_type;
4255 tree valist = TREE_OPERAND (*expr_p, 0);
4256 tree type = TREE_TYPE (*expr_p);
4257 tree t;
4258 location_t loc = EXPR_LOCATION (*expr_p);
4259
4260 /* Verify that valist is of the proper type. */
4261 have_va_type = TREE_TYPE (valist);
4262 if (have_va_type == error_mark_node)
4263 return GS_ERROR;
4264 have_va_type = targetm.canonical_va_list_type (have_va_type);
4265
4266 if (have_va_type == NULL_TREE)
4267 {
4268 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4269 return GS_ERROR;
4270 }
4271
4272 /* Generate a diagnostic for requesting data of a type that cannot
4273 be passed through `...' due to type promotion at the call site. */
4274 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4275 != type)
4276 {
4277 static bool gave_help;
4278 bool warned;
4279
4280 /* Unfortunately, this is merely undefined, rather than a constraint
4281 violation, so we cannot make this an error. If this call is never
4282 executed, the program is still strictly conforming. */
4283 warned = warning_at (loc, 0,
4284 "%qT is promoted to %qT when passed through %<...%>",
4285 type, promoted_type);
4286 if (!gave_help && warned)
4287 {
4288 gave_help = true;
4289 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4290 promoted_type, type);
4291 }
4292
4293 /* We can, however, treat "undefined" any way we please.
4294 Call abort to encourage the user to fix the program. */
4295 if (warned)
4296 inform (loc, "if this code is reached, the program will abort");
4297 /* Before the abort, allow the evaluation of the va_list
4298 expression to exit or longjmp. */
4299 gimplify_and_add (valist, pre_p);
4300 t = build_call_expr_loc (loc,
4301 builtin_decl_implicit (BUILT_IN_TRAP), 0);
4302 gimplify_and_add (t, pre_p);
4303
4304 /* This is dead code, but go ahead and finish so that the
4305 mode of the result comes out right. */
4306 *expr_p = dummy_object (type);
4307 return GS_ALL_DONE;
4308 }
4309 else
4310 {
4311 /* Make it easier for the backends by protecting the valist argument
4312 from multiple evaluations. */
4313 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4314 {
4315 /* For this case, the backends will be expecting a pointer to
4316 TREE_TYPE (abi), but it's possible we've
4317 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4318 So fix it. */
4319 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4320 {
4321 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4322 valist = fold_convert_loc (loc, p1,
4323 build_fold_addr_expr_loc (loc, valist));
4324 }
4325
4326 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4327 }
4328 else
4329 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4330
4331 if (!targetm.gimplify_va_arg_expr)
4332 /* FIXME: Once most targets are converted we should merely
4333 assert this is non-null. */
4334 return GS_ALL_DONE;
4335
4336 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4337 return GS_OK;
4338 }
4339 }
4340
4341 /* Expand EXP, a call to __builtin_va_end. */
4342
4343 static rtx
4344 expand_builtin_va_end (tree exp)
4345 {
4346 tree valist = CALL_EXPR_ARG (exp, 0);
4347
4348 /* Evaluate for side effects, if needed. I hate macros that don't
4349 do that. */
4350 if (TREE_SIDE_EFFECTS (valist))
4351 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4352
4353 return const0_rtx;
4354 }
4355
4356 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4357 builtin rather than just as an assignment in stdarg.h because of the
4358 nastiness of array-type va_list types. */
4359
4360 static rtx
4361 expand_builtin_va_copy (tree exp)
4362 {
4363 tree dst, src, t;
4364 location_t loc = EXPR_LOCATION (exp);
4365
4366 dst = CALL_EXPR_ARG (exp, 0);
4367 src = CALL_EXPR_ARG (exp, 1);
4368
4369 dst = stabilize_va_list_loc (loc, dst, 1);
4370 src = stabilize_va_list_loc (loc, src, 0);
4371
4372 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4373
4374 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4375 {
4376 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4377 TREE_SIDE_EFFECTS (t) = 1;
4378 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4379 }
4380 else
4381 {
4382 rtx dstb, srcb, size;
4383
4384 /* Evaluate to pointers. */
4385 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4386 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4387 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4388 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4389
4390 dstb = convert_memory_address (Pmode, dstb);
4391 srcb = convert_memory_address (Pmode, srcb);
4392
4393 /* "Dereference" to BLKmode memories. */
4394 dstb = gen_rtx_MEM (BLKmode, dstb);
4395 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4396 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4397 srcb = gen_rtx_MEM (BLKmode, srcb);
4398 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4399 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4400
4401 /* Copy. */
4402 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4403 }
4404
4405 return const0_rtx;
4406 }
4407
4408 /* Expand a call to one of the builtin functions __builtin_frame_address or
4409 __builtin_return_address. */
4410
4411 static rtx
4412 expand_builtin_frame_address (tree fndecl, tree exp)
4413 {
4414 /* The argument must be a nonnegative integer constant.
4415 It counts the number of frames to scan up the stack.
4416 The value is the return address saved in that frame. */
4417 if (call_expr_nargs (exp) == 0)
4418 /* Warning about missing arg was already issued. */
4419 return const0_rtx;
4420 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4421 {
4422 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4423 error ("invalid argument to %<__builtin_frame_address%>");
4424 else
4425 error ("invalid argument to %<__builtin_return_address%>");
4426 return const0_rtx;
4427 }
4428 else
4429 {
4430 rtx tem
4431 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4432 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4433
4434 /* Some ports cannot access arbitrary stack frames. */
4435 if (tem == NULL)
4436 {
4437 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4438 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4439 else
4440 warning (0, "unsupported argument to %<__builtin_return_address%>");
4441 return const0_rtx;
4442 }
4443
4444 /* For __builtin_frame_address, return what we've got. */
4445 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4446 return tem;
4447
4448 if (!REG_P (tem)
4449 && ! CONSTANT_P (tem))
4450 tem = copy_addr_to_reg (tem);
4451 return tem;
4452 }
4453 }
4454
4455 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4456 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4457 is the same as for allocate_dynamic_stack_space. */
4458
4459 static rtx
4460 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4461 {
4462 rtx op0;
4463 rtx result;
4464 bool valid_arglist;
4465 unsigned int align;
4466 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4467 == BUILT_IN_ALLOCA_WITH_ALIGN);
4468
4469 /* Emit normal call if we use mudflap. */
4470 if (flag_mudflap)
4471 return NULL_RTX;
4472
4473 valid_arglist
4474 = (alloca_with_align
4475 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4476 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4477
4478 if (!valid_arglist)
4479 return NULL_RTX;
4480
4481 /* Compute the argument. */
4482 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4483
4484 /* Compute the alignment. */
4485 align = (alloca_with_align
4486 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4487 : BIGGEST_ALIGNMENT);
4488
4489 /* Allocate the desired space. */
4490 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4491 result = convert_memory_address (ptr_mode, result);
4492
4493 return result;
4494 }
4495
4496 /* Expand a call to bswap builtin in EXP.
4497 Return NULL_RTX if a normal call should be emitted rather than expanding the
4498 function in-line. If convenient, the result should be placed in TARGET.
4499 SUBTARGET may be used as the target for computing one of EXP's operands. */
4500
4501 static rtx
4502 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4503 rtx subtarget)
4504 {
4505 tree arg;
4506 rtx op0;
4507
4508 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4509 return NULL_RTX;
4510
4511 arg = CALL_EXPR_ARG (exp, 0);
4512 op0 = expand_expr (arg,
4513 subtarget && GET_MODE (subtarget) == target_mode
4514 ? subtarget : NULL_RTX,
4515 target_mode, EXPAND_NORMAL);
4516 if (GET_MODE (op0) != target_mode)
4517 op0 = convert_to_mode (target_mode, op0, 1);
4518
4519 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4520
4521 gcc_assert (target);
4522
4523 return convert_to_mode (target_mode, target, 1);
4524 }
4525
4526 /* Expand a call to a unary builtin in EXP.
4527 Return NULL_RTX if a normal call should be emitted rather than expanding the
4528 function in-line. If convenient, the result should be placed in TARGET.
4529 SUBTARGET may be used as the target for computing one of EXP's operands. */
4530
4531 static rtx
4532 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4533 rtx subtarget, optab op_optab)
4534 {
4535 rtx op0;
4536
4537 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4538 return NULL_RTX;
4539
4540 /* Compute the argument. */
4541 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4542 (subtarget
4543 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4544 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4545 VOIDmode, EXPAND_NORMAL);
4546 /* Compute op, into TARGET if possible.
4547 Set TARGET to wherever the result comes back. */
4548 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4549 op_optab, op0, target, op_optab != clrsb_optab);
4550 gcc_assert (target);
4551
4552 return convert_to_mode (target_mode, target, 0);
4553 }
4554
4555 /* Expand a call to __builtin_expect. We just return our argument
4556 as the builtin_expect semantic should've been already executed by
4557 tree branch prediction pass. */
4558
4559 static rtx
4560 expand_builtin_expect (tree exp, rtx target)
4561 {
4562 tree arg;
4563
4564 if (call_expr_nargs (exp) < 2)
4565 return const0_rtx;
4566 arg = CALL_EXPR_ARG (exp, 0);
4567
4568 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4569 /* When guessing was done, the hints should be already stripped away. */
4570 gcc_assert (!flag_guess_branch_prob
4571 || optimize == 0 || seen_error ());
4572 return target;
4573 }
4574
4575 /* Expand a call to __builtin_assume_aligned. We just return our first
4576 argument as the builtin_assume_aligned semantic should've been already
4577 executed by CCP. */
4578
4579 static rtx
4580 expand_builtin_assume_aligned (tree exp, rtx target)
4581 {
4582 if (call_expr_nargs (exp) < 2)
4583 return const0_rtx;
4584 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4585 EXPAND_NORMAL);
4586 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4587 && (call_expr_nargs (exp) < 3
4588 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4589 return target;
4590 }
4591
4592 void
4593 expand_builtin_trap (void)
4594 {
4595 #ifdef HAVE_trap
4596 if (HAVE_trap)
4597 {
4598 rtx insn = emit_insn (gen_trap ());
4599 /* For trap insns when not accumulating outgoing args force
4600 REG_ARGS_SIZE note to prevent crossjumping of calls with
4601 different args sizes. */
4602 if (!ACCUMULATE_OUTGOING_ARGS)
4603 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4604 }
4605 else
4606 #endif
4607 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4608 emit_barrier ();
4609 }
4610
4611 /* Expand a call to __builtin_unreachable. We do nothing except emit
4612 a barrier saying that control flow will not pass here.
4613
4614 It is the responsibility of the program being compiled to ensure
4615 that control flow does never reach __builtin_unreachable. */
4616 static void
4617 expand_builtin_unreachable (void)
4618 {
4619 emit_barrier ();
4620 }
4621
4622 /* Expand EXP, a call to fabs, fabsf or fabsl.
4623 Return NULL_RTX if a normal call should be emitted rather than expanding
4624 the function inline. If convenient, the result should be placed
4625 in TARGET. SUBTARGET may be used as the target for computing
4626 the operand. */
4627
4628 static rtx
4629 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4630 {
4631 enum machine_mode mode;
4632 tree arg;
4633 rtx op0;
4634
4635 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4636 return NULL_RTX;
4637
4638 arg = CALL_EXPR_ARG (exp, 0);
4639 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4640 mode = TYPE_MODE (TREE_TYPE (arg));
4641 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4642 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4643 }
4644
4645 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4646 Return NULL is a normal call should be emitted rather than expanding the
4647 function inline. If convenient, the result should be placed in TARGET.
4648 SUBTARGET may be used as the target for computing the operand. */
4649
4650 static rtx
4651 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4652 {
4653 rtx op0, op1;
4654 tree arg;
4655
4656 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4657 return NULL_RTX;
4658
4659 arg = CALL_EXPR_ARG (exp, 0);
4660 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4661
4662 arg = CALL_EXPR_ARG (exp, 1);
4663 op1 = expand_normal (arg);
4664
4665 return expand_copysign (op0, op1, target);
4666 }
4667
4668 /* Create a new constant string literal and return a char* pointer to it.
4669 The STRING_CST value is the LEN characters at STR. */
4670 tree
4671 build_string_literal (int len, const char *str)
4672 {
4673 tree t, elem, index, type;
4674
4675 t = build_string (len, str);
4676 elem = build_type_variant (char_type_node, 1, 0);
4677 index = build_index_type (size_int (len - 1));
4678 type = build_array_type (elem, index);
4679 TREE_TYPE (t) = type;
4680 TREE_CONSTANT (t) = 1;
4681 TREE_READONLY (t) = 1;
4682 TREE_STATIC (t) = 1;
4683
4684 type = build_pointer_type (elem);
4685 t = build1 (ADDR_EXPR, type,
4686 build4 (ARRAY_REF, elem,
4687 t, integer_zero_node, NULL_TREE, NULL_TREE));
4688 return t;
4689 }
4690
4691 /* Expand a call to __builtin___clear_cache. */
4692
4693 static rtx
4694 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4695 {
4696 #ifndef HAVE_clear_cache
4697 #ifdef CLEAR_INSN_CACHE
4698 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4699 does something. Just do the default expansion to a call to
4700 __clear_cache(). */
4701 return NULL_RTX;
4702 #else
4703 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4704 does nothing. There is no need to call it. Do nothing. */
4705 return const0_rtx;
4706 #endif /* CLEAR_INSN_CACHE */
4707 #else
4708 /* We have a "clear_cache" insn, and it will handle everything. */
4709 tree begin, end;
4710 rtx begin_rtx, end_rtx;
4711
4712 /* We must not expand to a library call. If we did, any
4713 fallback library function in libgcc that might contain a call to
4714 __builtin___clear_cache() would recurse infinitely. */
4715 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4716 {
4717 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4718 return const0_rtx;
4719 }
4720
4721 if (HAVE_clear_cache)
4722 {
4723 struct expand_operand ops[2];
4724
4725 begin = CALL_EXPR_ARG (exp, 0);
4726 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4727
4728 end = CALL_EXPR_ARG (exp, 1);
4729 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4730
4731 create_address_operand (&ops[0], begin_rtx);
4732 create_address_operand (&ops[1], end_rtx);
4733 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4734 return const0_rtx;
4735 }
4736 return const0_rtx;
4737 #endif /* HAVE_clear_cache */
4738 }
4739
4740 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4741
4742 static rtx
4743 round_trampoline_addr (rtx tramp)
4744 {
4745 rtx temp, addend, mask;
4746
4747 /* If we don't need too much alignment, we'll have been guaranteed
4748 proper alignment by get_trampoline_type. */
4749 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4750 return tramp;
4751
4752 /* Round address up to desired boundary. */
4753 temp = gen_reg_rtx (Pmode);
4754 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4755 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4756
4757 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4758 temp, 0, OPTAB_LIB_WIDEN);
4759 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4760 temp, 0, OPTAB_LIB_WIDEN);
4761
4762 return tramp;
4763 }
4764
4765 static rtx
4766 expand_builtin_init_trampoline (tree exp, bool onstack)
4767 {
4768 tree t_tramp, t_func, t_chain;
4769 rtx m_tramp, r_tramp, r_chain, tmp;
4770
4771 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4772 POINTER_TYPE, VOID_TYPE))
4773 return NULL_RTX;
4774
4775 t_tramp = CALL_EXPR_ARG (exp, 0);
4776 t_func = CALL_EXPR_ARG (exp, 1);
4777 t_chain = CALL_EXPR_ARG (exp, 2);
4778
4779 r_tramp = expand_normal (t_tramp);
4780 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4781 MEM_NOTRAP_P (m_tramp) = 1;
4782
4783 /* If ONSTACK, the TRAMP argument should be the address of a field
4784 within the local function's FRAME decl. Either way, let's see if
4785 we can fill in the MEM_ATTRs for this memory. */
4786 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4787 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4788
4789 /* Creator of a heap trampoline is responsible for making sure the
4790 address is aligned to at least STACK_BOUNDARY. Normally malloc
4791 will ensure this anyhow. */
4792 tmp = round_trampoline_addr (r_tramp);
4793 if (tmp != r_tramp)
4794 {
4795 m_tramp = change_address (m_tramp, BLKmode, tmp);
4796 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4797 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4798 }
4799
4800 /* The FUNC argument should be the address of the nested function.
4801 Extract the actual function decl to pass to the hook. */
4802 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4803 t_func = TREE_OPERAND (t_func, 0);
4804 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4805
4806 r_chain = expand_normal (t_chain);
4807
4808 /* Generate insns to initialize the trampoline. */
4809 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4810
4811 if (onstack)
4812 {
4813 trampolines_created = 1;
4814
4815 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4816 "trampoline generated for nested function %qD", t_func);
4817 }
4818
4819 return const0_rtx;
4820 }
4821
4822 static rtx
4823 expand_builtin_adjust_trampoline (tree exp)
4824 {
4825 rtx tramp;
4826
4827 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4828 return NULL_RTX;
4829
4830 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4831 tramp = round_trampoline_addr (tramp);
4832 if (targetm.calls.trampoline_adjust_address)
4833 tramp = targetm.calls.trampoline_adjust_address (tramp);
4834
4835 return tramp;
4836 }
4837
4838 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4839 function. The function first checks whether the back end provides
4840 an insn to implement signbit for the respective mode. If not, it
4841 checks whether the floating point format of the value is such that
4842 the sign bit can be extracted. If that is not the case, the
4843 function returns NULL_RTX to indicate that a normal call should be
4844 emitted rather than expanding the function in-line. EXP is the
4845 expression that is a call to the builtin function; if convenient,
4846 the result should be placed in TARGET. */
4847 static rtx
4848 expand_builtin_signbit (tree exp, rtx target)
4849 {
4850 const struct real_format *fmt;
4851 enum machine_mode fmode, imode, rmode;
4852 tree arg;
4853 int word, bitpos;
4854 enum insn_code icode;
4855 rtx temp;
4856 location_t loc = EXPR_LOCATION (exp);
4857
4858 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4859 return NULL_RTX;
4860
4861 arg = CALL_EXPR_ARG (exp, 0);
4862 fmode = TYPE_MODE (TREE_TYPE (arg));
4863 rmode = TYPE_MODE (TREE_TYPE (exp));
4864 fmt = REAL_MODE_FORMAT (fmode);
4865
4866 arg = builtin_save_expr (arg);
4867
4868 /* Expand the argument yielding a RTX expression. */
4869 temp = expand_normal (arg);
4870
4871 /* Check if the back end provides an insn that handles signbit for the
4872 argument's mode. */
4873 icode = optab_handler (signbit_optab, fmode);
4874 if (icode != CODE_FOR_nothing)
4875 {
4876 rtx last = get_last_insn ();
4877 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4878 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4879 return target;
4880 delete_insns_since (last);
4881 }
4882
4883 /* For floating point formats without a sign bit, implement signbit
4884 as "ARG < 0.0". */
4885 bitpos = fmt->signbit_ro;
4886 if (bitpos < 0)
4887 {
4888 /* But we can't do this if the format supports signed zero. */
4889 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4890 return NULL_RTX;
4891
4892 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4893 build_real (TREE_TYPE (arg), dconst0));
4894 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4895 }
4896
4897 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4898 {
4899 imode = int_mode_for_mode (fmode);
4900 if (imode == BLKmode)
4901 return NULL_RTX;
4902 temp = gen_lowpart (imode, temp);
4903 }
4904 else
4905 {
4906 imode = word_mode;
4907 /* Handle targets with different FP word orders. */
4908 if (FLOAT_WORDS_BIG_ENDIAN)
4909 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4910 else
4911 word = bitpos / BITS_PER_WORD;
4912 temp = operand_subword_force (temp, word, fmode);
4913 bitpos = bitpos % BITS_PER_WORD;
4914 }
4915
4916 /* Force the intermediate word_mode (or narrower) result into a
4917 register. This avoids attempting to create paradoxical SUBREGs
4918 of floating point modes below. */
4919 temp = force_reg (imode, temp);
4920
4921 /* If the bitpos is within the "result mode" lowpart, the operation
4922 can be implement with a single bitwise AND. Otherwise, we need
4923 a right shift and an AND. */
4924
4925 if (bitpos < GET_MODE_BITSIZE (rmode))
4926 {
4927 double_int mask = double_int_zero.set_bit (bitpos);
4928
4929 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4930 temp = gen_lowpart (rmode, temp);
4931 temp = expand_binop (rmode, and_optab, temp,
4932 immed_double_int_const (mask, rmode),
4933 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4934 }
4935 else
4936 {
4937 /* Perform a logical right shift to place the signbit in the least
4938 significant bit, then truncate the result to the desired mode
4939 and mask just this bit. */
4940 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4941 temp = gen_lowpart (rmode, temp);
4942 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4943 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4944 }
4945
4946 return temp;
4947 }
4948
4949 /* Expand fork or exec calls. TARGET is the desired target of the
4950 call. EXP is the call. FN is the
4951 identificator of the actual function. IGNORE is nonzero if the
4952 value is to be ignored. */
4953
4954 static rtx
4955 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4956 {
4957 tree id, decl;
4958 tree call;
4959
4960 /* If we are not profiling, just call the function. */
4961 if (!profile_arc_flag)
4962 return NULL_RTX;
4963
4964 /* Otherwise call the wrapper. This should be equivalent for the rest of
4965 compiler, so the code does not diverge, and the wrapper may run the
4966 code necessary for keeping the profiling sane. */
4967
4968 switch (DECL_FUNCTION_CODE (fn))
4969 {
4970 case BUILT_IN_FORK:
4971 id = get_identifier ("__gcov_fork");
4972 break;
4973
4974 case BUILT_IN_EXECL:
4975 id = get_identifier ("__gcov_execl");
4976 break;
4977
4978 case BUILT_IN_EXECV:
4979 id = get_identifier ("__gcov_execv");
4980 break;
4981
4982 case BUILT_IN_EXECLP:
4983 id = get_identifier ("__gcov_execlp");
4984 break;
4985
4986 case BUILT_IN_EXECLE:
4987 id = get_identifier ("__gcov_execle");
4988 break;
4989
4990 case BUILT_IN_EXECVP:
4991 id = get_identifier ("__gcov_execvp");
4992 break;
4993
4994 case BUILT_IN_EXECVE:
4995 id = get_identifier ("__gcov_execve");
4996 break;
4997
4998 default:
4999 gcc_unreachable ();
5000 }
5001
5002 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5003 FUNCTION_DECL, id, TREE_TYPE (fn));
5004 DECL_EXTERNAL (decl) = 1;
5005 TREE_PUBLIC (decl) = 1;
5006 DECL_ARTIFICIAL (decl) = 1;
5007 TREE_NOTHROW (decl) = 1;
5008 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5009 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5010 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5011 return expand_call (call, target, ignore);
5012 }
5013
5014
5015 \f
5016 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5017 the pointer in these functions is void*, the tree optimizers may remove
5018 casts. The mode computed in expand_builtin isn't reliable either, due
5019 to __sync_bool_compare_and_swap.
5020
5021 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5022 group of builtins. This gives us log2 of the mode size. */
5023
5024 static inline enum machine_mode
5025 get_builtin_sync_mode (int fcode_diff)
5026 {
5027 /* The size is not negotiable, so ask not to get BLKmode in return
5028 if the target indicates that a smaller size would be better. */
5029 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5030 }
5031
5032 /* Expand the memory expression LOC and return the appropriate memory operand
5033 for the builtin_sync operations. */
5034
5035 static rtx
5036 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5037 {
5038 rtx addr, mem;
5039
5040 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5041 addr = convert_memory_address (Pmode, addr);
5042
5043 /* Note that we explicitly do not want any alias information for this
5044 memory, so that we kill all other live memories. Otherwise we don't
5045 satisfy the full barrier semantics of the intrinsic. */
5046 mem = validize_mem (gen_rtx_MEM (mode, addr));
5047
5048 /* The alignment needs to be at least according to that of the mode. */
5049 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5050 get_pointer_alignment (loc)));
5051 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5052 MEM_VOLATILE_P (mem) = 1;
5053
5054 return mem;
5055 }
5056
5057 /* Make sure an argument is in the right mode.
5058 EXP is the tree argument.
5059 MODE is the mode it should be in. */
5060
5061 static rtx
5062 expand_expr_force_mode (tree exp, enum machine_mode mode)
5063 {
5064 rtx val;
5065 enum machine_mode old_mode;
5066
5067 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5068 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5069 of CONST_INTs, where we know the old_mode only from the call argument. */
5070
5071 old_mode = GET_MODE (val);
5072 if (old_mode == VOIDmode)
5073 old_mode = TYPE_MODE (TREE_TYPE (exp));
5074 val = convert_modes (mode, old_mode, val, 1);
5075 return val;
5076 }
5077
5078
5079 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5080 EXP is the CALL_EXPR. CODE is the rtx code
5081 that corresponds to the arithmetic or logical operation from the name;
5082 an exception here is that NOT actually means NAND. TARGET is an optional
5083 place for us to store the results; AFTER is true if this is the
5084 fetch_and_xxx form. */
5085
5086 static rtx
5087 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5088 enum rtx_code code, bool after,
5089 rtx target)
5090 {
5091 rtx val, mem;
5092 location_t loc = EXPR_LOCATION (exp);
5093
5094 if (code == NOT && warn_sync_nand)
5095 {
5096 tree fndecl = get_callee_fndecl (exp);
5097 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5098
5099 static bool warned_f_a_n, warned_n_a_f;
5100
5101 switch (fcode)
5102 {
5103 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5104 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5105 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5106 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5107 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5108 if (warned_f_a_n)
5109 break;
5110
5111 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5112 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5113 warned_f_a_n = true;
5114 break;
5115
5116 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5117 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5118 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5119 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5120 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5121 if (warned_n_a_f)
5122 break;
5123
5124 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5125 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5126 warned_n_a_f = true;
5127 break;
5128
5129 default:
5130 gcc_unreachable ();
5131 }
5132 }
5133
5134 /* Expand the operands. */
5135 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5136 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5137
5138 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5139 after);
5140 }
5141
5142 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5143 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5144 true if this is the boolean form. TARGET is a place for us to store the
5145 results; this is NOT optional if IS_BOOL is true. */
5146
5147 static rtx
5148 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5149 bool is_bool, rtx target)
5150 {
5151 rtx old_val, new_val, mem;
5152 rtx *pbool, *poval;
5153
5154 /* Expand the operands. */
5155 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5156 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5157 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5158
5159 pbool = poval = NULL;
5160 if (target != const0_rtx)
5161 {
5162 if (is_bool)
5163 pbool = &target;
5164 else
5165 poval = &target;
5166 }
5167 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5168 false, MEMMODEL_SEQ_CST,
5169 MEMMODEL_SEQ_CST))
5170 return NULL_RTX;
5171
5172 return target;
5173 }
5174
5175 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5176 general form is actually an atomic exchange, and some targets only
5177 support a reduced form with the second argument being a constant 1.
5178 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5179 the results. */
5180
5181 static rtx
5182 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5183 rtx target)
5184 {
5185 rtx val, mem;
5186
5187 /* Expand the operands. */
5188 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5189 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5190
5191 return expand_sync_lock_test_and_set (target, mem, val);
5192 }
5193
5194 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5195
5196 static void
5197 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5198 {
5199 rtx mem;
5200
5201 /* Expand the operands. */
5202 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5203
5204 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5205 }
5206
5207 /* Given an integer representing an ``enum memmodel'', verify its
5208 correctness and return the memory model enum. */
5209
5210 static enum memmodel
5211 get_memmodel (tree exp)
5212 {
5213 rtx op;
5214 unsigned HOST_WIDE_INT val;
5215
5216 /* If the parameter is not a constant, it's a run time value so we'll just
5217 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5218 if (TREE_CODE (exp) != INTEGER_CST)
5219 return MEMMODEL_SEQ_CST;
5220
5221 op = expand_normal (exp);
5222
5223 val = INTVAL (op);
5224 if (targetm.memmodel_check)
5225 val = targetm.memmodel_check (val);
5226 else if (val & ~MEMMODEL_MASK)
5227 {
5228 warning (OPT_Winvalid_memory_model,
5229 "Unknown architecture specifier in memory model to builtin.");
5230 return MEMMODEL_SEQ_CST;
5231 }
5232
5233 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5234 {
5235 warning (OPT_Winvalid_memory_model,
5236 "invalid memory model argument to builtin");
5237 return MEMMODEL_SEQ_CST;
5238 }
5239
5240 return (enum memmodel) val;
5241 }
5242
5243 /* Expand the __atomic_exchange intrinsic:
5244 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5245 EXP is the CALL_EXPR.
5246 TARGET is an optional place for us to store the results. */
5247
5248 static rtx
5249 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5250 {
5251 rtx val, mem;
5252 enum memmodel model;
5253
5254 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5255 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5256 {
5257 error ("invalid memory model for %<__atomic_exchange%>");
5258 return NULL_RTX;
5259 }
5260
5261 if (!flag_inline_atomics)
5262 return NULL_RTX;
5263
5264 /* Expand the operands. */
5265 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5266 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5267
5268 return expand_atomic_exchange (target, mem, val, model);
5269 }
5270
5271 /* Expand the __atomic_compare_exchange intrinsic:
5272 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5273 TYPE desired, BOOL weak,
5274 enum memmodel success,
5275 enum memmodel failure)
5276 EXP is the CALL_EXPR.
5277 TARGET is an optional place for us to store the results. */
5278
5279 static rtx
5280 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5281 rtx target)
5282 {
5283 rtx expect, desired, mem, oldval;
5284 enum memmodel success, failure;
5285 tree weak;
5286 bool is_weak;
5287
5288 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5289 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5290
5291 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5292 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5293 {
5294 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5295 return NULL_RTX;
5296 }
5297
5298 if (failure > success)
5299 {
5300 error ("failure memory model cannot be stronger than success "
5301 "memory model for %<__atomic_compare_exchange%>");
5302 return NULL_RTX;
5303 }
5304
5305 if (!flag_inline_atomics)
5306 return NULL_RTX;
5307
5308 /* Expand the operands. */
5309 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5310
5311 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5312 expect = convert_memory_address (Pmode, expect);
5313 expect = gen_rtx_MEM (mode, expect);
5314 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5315
5316 weak = CALL_EXPR_ARG (exp, 3);
5317 is_weak = false;
5318 if (host_integerp (weak, 0) && tree_low_cst (weak, 0) != 0)
5319 is_weak = true;
5320
5321 oldval = expect;
5322 if (!expand_atomic_compare_and_swap ((target == const0_rtx ? NULL : &target),
5323 &oldval, mem, oldval, desired,
5324 is_weak, success, failure))
5325 return NULL_RTX;
5326
5327 if (oldval != expect)
5328 emit_move_insn (expect, oldval);
5329
5330 return target;
5331 }
5332
5333 /* Expand the __atomic_load intrinsic:
5334 TYPE __atomic_load (TYPE *object, enum memmodel)
5335 EXP is the CALL_EXPR.
5336 TARGET is an optional place for us to store the results. */
5337
5338 static rtx
5339 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5340 {
5341 rtx mem;
5342 enum memmodel model;
5343
5344 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5345 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5346 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5347 {
5348 error ("invalid memory model for %<__atomic_load%>");
5349 return NULL_RTX;
5350 }
5351
5352 if (!flag_inline_atomics)
5353 return NULL_RTX;
5354
5355 /* Expand the operand. */
5356 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5357
5358 return expand_atomic_load (target, mem, model);
5359 }
5360
5361
5362 /* Expand the __atomic_store intrinsic:
5363 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5364 EXP is the CALL_EXPR.
5365 TARGET is an optional place for us to store the results. */
5366
5367 static rtx
5368 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5369 {
5370 rtx mem, val;
5371 enum memmodel model;
5372
5373 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5374 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5375 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5376 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5377 {
5378 error ("invalid memory model for %<__atomic_store%>");
5379 return NULL_RTX;
5380 }
5381
5382 if (!flag_inline_atomics)
5383 return NULL_RTX;
5384
5385 /* Expand the operands. */
5386 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5387 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5388
5389 return expand_atomic_store (mem, val, model, false);
5390 }
5391
5392 /* Expand the __atomic_fetch_XXX intrinsic:
5393 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5394 EXP is the CALL_EXPR.
5395 TARGET is an optional place for us to store the results.
5396 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5397 FETCH_AFTER is true if returning the result of the operation.
5398 FETCH_AFTER is false if returning the value before the operation.
5399 IGNORE is true if the result is not used.
5400 EXT_CALL is the correct builtin for an external call if this cannot be
5401 resolved to an instruction sequence. */
5402
5403 static rtx
5404 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5405 enum rtx_code code, bool fetch_after,
5406 bool ignore, enum built_in_function ext_call)
5407 {
5408 rtx val, mem, ret;
5409 enum memmodel model;
5410 tree fndecl;
5411 tree addr;
5412
5413 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5414
5415 /* Expand the operands. */
5416 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5417 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5418
5419 /* Only try generating instructions if inlining is turned on. */
5420 if (flag_inline_atomics)
5421 {
5422 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5423 if (ret)
5424 return ret;
5425 }
5426
5427 /* Return if a different routine isn't needed for the library call. */
5428 if (ext_call == BUILT_IN_NONE)
5429 return NULL_RTX;
5430
5431 /* Change the call to the specified function. */
5432 fndecl = get_callee_fndecl (exp);
5433 addr = CALL_EXPR_FN (exp);
5434 STRIP_NOPS (addr);
5435
5436 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5437 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5438
5439 /* Expand the call here so we can emit trailing code. */
5440 ret = expand_call (exp, target, ignore);
5441
5442 /* Replace the original function just in case it matters. */
5443 TREE_OPERAND (addr, 0) = fndecl;
5444
5445 /* Then issue the arithmetic correction to return the right result. */
5446 if (!ignore)
5447 {
5448 if (code == NOT)
5449 {
5450 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5451 OPTAB_LIB_WIDEN);
5452 ret = expand_simple_unop (mode, NOT, ret, target, true);
5453 }
5454 else
5455 ret = expand_simple_binop (mode, code, ret, val, target, true,
5456 OPTAB_LIB_WIDEN);
5457 }
5458 return ret;
5459 }
5460
5461
5462 #ifndef HAVE_atomic_clear
5463 # define HAVE_atomic_clear 0
5464 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5465 #endif
5466
5467 /* Expand an atomic clear operation.
5468 void _atomic_clear (BOOL *obj, enum memmodel)
5469 EXP is the call expression. */
5470
5471 static rtx
5472 expand_builtin_atomic_clear (tree exp)
5473 {
5474 enum machine_mode mode;
5475 rtx mem, ret;
5476 enum memmodel model;
5477
5478 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5479 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5480 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5481
5482 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5483 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5484 {
5485 error ("invalid memory model for %<__atomic_store%>");
5486 return const0_rtx;
5487 }
5488
5489 if (HAVE_atomic_clear)
5490 {
5491 emit_insn (gen_atomic_clear (mem, model));
5492 return const0_rtx;
5493 }
5494
5495 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5496 Failing that, a store is issued by __atomic_store. The only way this can
5497 fail is if the bool type is larger than a word size. Unlikely, but
5498 handle it anyway for completeness. Assume a single threaded model since
5499 there is no atomic support in this case, and no barriers are required. */
5500 ret = expand_atomic_store (mem, const0_rtx, model, true);
5501 if (!ret)
5502 emit_move_insn (mem, const0_rtx);
5503 return const0_rtx;
5504 }
5505
5506 /* Expand an atomic test_and_set operation.
5507 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5508 EXP is the call expression. */
5509
5510 static rtx
5511 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5512 {
5513 rtx mem;
5514 enum memmodel model;
5515 enum machine_mode mode;
5516
5517 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5518 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5519 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5520
5521 return expand_atomic_test_and_set (target, mem, model);
5522 }
5523
5524
5525 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5526 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5527
5528 static tree
5529 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5530 {
5531 int size;
5532 enum machine_mode mode;
5533 unsigned int mode_align, type_align;
5534
5535 if (TREE_CODE (arg0) != INTEGER_CST)
5536 return NULL_TREE;
5537
5538 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5539 mode = mode_for_size (size, MODE_INT, 0);
5540 mode_align = GET_MODE_ALIGNMENT (mode);
5541
5542 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5543 type_align = mode_align;
5544 else
5545 {
5546 tree ttype = TREE_TYPE (arg1);
5547
5548 /* This function is usually invoked and folded immediately by the front
5549 end before anything else has a chance to look at it. The pointer
5550 parameter at this point is usually cast to a void *, so check for that
5551 and look past the cast. */
5552 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5553 && VOID_TYPE_P (TREE_TYPE (ttype)))
5554 arg1 = TREE_OPERAND (arg1, 0);
5555
5556 ttype = TREE_TYPE (arg1);
5557 gcc_assert (POINTER_TYPE_P (ttype));
5558
5559 /* Get the underlying type of the object. */
5560 ttype = TREE_TYPE (ttype);
5561 type_align = TYPE_ALIGN (ttype);
5562 }
5563
5564 /* If the object has smaller alignment, the the lock free routines cannot
5565 be used. */
5566 if (type_align < mode_align)
5567 return boolean_false_node;
5568
5569 /* Check if a compare_and_swap pattern exists for the mode which represents
5570 the required size. The pattern is not allowed to fail, so the existence
5571 of the pattern indicates support is present. */
5572 if (can_compare_and_swap_p (mode, true))
5573 return boolean_true_node;
5574 else
5575 return boolean_false_node;
5576 }
5577
5578 /* Return true if the parameters to call EXP represent an object which will
5579 always generate lock free instructions. The first argument represents the
5580 size of the object, and the second parameter is a pointer to the object
5581 itself. If NULL is passed for the object, then the result is based on
5582 typical alignment for an object of the specified size. Otherwise return
5583 false. */
5584
5585 static rtx
5586 expand_builtin_atomic_always_lock_free (tree exp)
5587 {
5588 tree size;
5589 tree arg0 = CALL_EXPR_ARG (exp, 0);
5590 tree arg1 = CALL_EXPR_ARG (exp, 1);
5591
5592 if (TREE_CODE (arg0) != INTEGER_CST)
5593 {
5594 error ("non-constant argument 1 to __atomic_always_lock_free");
5595 return const0_rtx;
5596 }
5597
5598 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5599 if (size == boolean_true_node)
5600 return const1_rtx;
5601 return const0_rtx;
5602 }
5603
5604 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5605 is lock free on this architecture. */
5606
5607 static tree
5608 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5609 {
5610 if (!flag_inline_atomics)
5611 return NULL_TREE;
5612
5613 /* If it isn't always lock free, don't generate a result. */
5614 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5615 return boolean_true_node;
5616
5617 return NULL_TREE;
5618 }
5619
5620 /* Return true if the parameters to call EXP represent an object which will
5621 always generate lock free instructions. The first argument represents the
5622 size of the object, and the second parameter is a pointer to the object
5623 itself. If NULL is passed for the object, then the result is based on
5624 typical alignment for an object of the specified size. Otherwise return
5625 NULL*/
5626
5627 static rtx
5628 expand_builtin_atomic_is_lock_free (tree exp)
5629 {
5630 tree size;
5631 tree arg0 = CALL_EXPR_ARG (exp, 0);
5632 tree arg1 = CALL_EXPR_ARG (exp, 1);
5633
5634 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5635 {
5636 error ("non-integer argument 1 to __atomic_is_lock_free");
5637 return NULL_RTX;
5638 }
5639
5640 if (!flag_inline_atomics)
5641 return NULL_RTX;
5642
5643 /* If the value is known at compile time, return the RTX for it. */
5644 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5645 if (size == boolean_true_node)
5646 return const1_rtx;
5647
5648 return NULL_RTX;
5649 }
5650
5651 /* Expand the __atomic_thread_fence intrinsic:
5652 void __atomic_thread_fence (enum memmodel)
5653 EXP is the CALL_EXPR. */
5654
5655 static void
5656 expand_builtin_atomic_thread_fence (tree exp)
5657 {
5658 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5659 expand_mem_thread_fence (model);
5660 }
5661
5662 /* Expand the __atomic_signal_fence intrinsic:
5663 void __atomic_signal_fence (enum memmodel)
5664 EXP is the CALL_EXPR. */
5665
5666 static void
5667 expand_builtin_atomic_signal_fence (tree exp)
5668 {
5669 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5670 expand_mem_signal_fence (model);
5671 }
5672
5673 /* Expand the __sync_synchronize intrinsic. */
5674
5675 static void
5676 expand_builtin_sync_synchronize (void)
5677 {
5678 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5679 }
5680
5681 static rtx
5682 expand_builtin_thread_pointer (tree exp, rtx target)
5683 {
5684 enum insn_code icode;
5685 if (!validate_arglist (exp, VOID_TYPE))
5686 return const0_rtx;
5687 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5688 if (icode != CODE_FOR_nothing)
5689 {
5690 struct expand_operand op;
5691 if (!REG_P (target) || GET_MODE (target) != Pmode)
5692 target = gen_reg_rtx (Pmode);
5693 create_output_operand (&op, target, Pmode);
5694 expand_insn (icode, 1, &op);
5695 return target;
5696 }
5697 error ("__builtin_thread_pointer is not supported on this target");
5698 return const0_rtx;
5699 }
5700
5701 static void
5702 expand_builtin_set_thread_pointer (tree exp)
5703 {
5704 enum insn_code icode;
5705 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5706 return;
5707 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5708 if (icode != CODE_FOR_nothing)
5709 {
5710 struct expand_operand op;
5711 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5712 Pmode, EXPAND_NORMAL);
5713 create_input_operand (&op, val, Pmode);
5714 expand_insn (icode, 1, &op);
5715 return;
5716 }
5717 error ("__builtin_set_thread_pointer is not supported on this target");
5718 }
5719
5720 \f
5721 /* Expand an expression EXP that calls a built-in function,
5722 with result going to TARGET if that's convenient
5723 (and in mode MODE if that's convenient).
5724 SUBTARGET may be used as the target for computing one of EXP's operands.
5725 IGNORE is nonzero if the value is to be ignored. */
5726
5727 rtx
5728 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5729 int ignore)
5730 {
5731 tree fndecl = get_callee_fndecl (exp);
5732 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5733 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5734 int flags;
5735
5736 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5737 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5738
5739 /* When not optimizing, generate calls to library functions for a certain
5740 set of builtins. */
5741 if (!optimize
5742 && !called_as_built_in (fndecl)
5743 && fcode != BUILT_IN_FORK
5744 && fcode != BUILT_IN_EXECL
5745 && fcode != BUILT_IN_EXECV
5746 && fcode != BUILT_IN_EXECLP
5747 && fcode != BUILT_IN_EXECLE
5748 && fcode != BUILT_IN_EXECVP
5749 && fcode != BUILT_IN_EXECVE
5750 && fcode != BUILT_IN_ALLOCA
5751 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5752 && fcode != BUILT_IN_FREE)
5753 return expand_call (exp, target, ignore);
5754
5755 /* The built-in function expanders test for target == const0_rtx
5756 to determine whether the function's result will be ignored. */
5757 if (ignore)
5758 target = const0_rtx;
5759
5760 /* If the result of a pure or const built-in function is ignored, and
5761 none of its arguments are volatile, we can avoid expanding the
5762 built-in call and just evaluate the arguments for side-effects. */
5763 if (target == const0_rtx
5764 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5765 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5766 {
5767 bool volatilep = false;
5768 tree arg;
5769 call_expr_arg_iterator iter;
5770
5771 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5772 if (TREE_THIS_VOLATILE (arg))
5773 {
5774 volatilep = true;
5775 break;
5776 }
5777
5778 if (! volatilep)
5779 {
5780 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5781 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5782 return const0_rtx;
5783 }
5784 }
5785
5786 switch (fcode)
5787 {
5788 CASE_FLT_FN (BUILT_IN_FABS):
5789 case BUILT_IN_FABSD32:
5790 case BUILT_IN_FABSD64:
5791 case BUILT_IN_FABSD128:
5792 target = expand_builtin_fabs (exp, target, subtarget);
5793 if (target)
5794 return target;
5795 break;
5796
5797 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5798 target = expand_builtin_copysign (exp, target, subtarget);
5799 if (target)
5800 return target;
5801 break;
5802
5803 /* Just do a normal library call if we were unable to fold
5804 the values. */
5805 CASE_FLT_FN (BUILT_IN_CABS):
5806 break;
5807
5808 CASE_FLT_FN (BUILT_IN_EXP):
5809 CASE_FLT_FN (BUILT_IN_EXP10):
5810 CASE_FLT_FN (BUILT_IN_POW10):
5811 CASE_FLT_FN (BUILT_IN_EXP2):
5812 CASE_FLT_FN (BUILT_IN_EXPM1):
5813 CASE_FLT_FN (BUILT_IN_LOGB):
5814 CASE_FLT_FN (BUILT_IN_LOG):
5815 CASE_FLT_FN (BUILT_IN_LOG10):
5816 CASE_FLT_FN (BUILT_IN_LOG2):
5817 CASE_FLT_FN (BUILT_IN_LOG1P):
5818 CASE_FLT_FN (BUILT_IN_TAN):
5819 CASE_FLT_FN (BUILT_IN_ASIN):
5820 CASE_FLT_FN (BUILT_IN_ACOS):
5821 CASE_FLT_FN (BUILT_IN_ATAN):
5822 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5823 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5824 because of possible accuracy problems. */
5825 if (! flag_unsafe_math_optimizations)
5826 break;
5827 CASE_FLT_FN (BUILT_IN_SQRT):
5828 CASE_FLT_FN (BUILT_IN_FLOOR):
5829 CASE_FLT_FN (BUILT_IN_CEIL):
5830 CASE_FLT_FN (BUILT_IN_TRUNC):
5831 CASE_FLT_FN (BUILT_IN_ROUND):
5832 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5833 CASE_FLT_FN (BUILT_IN_RINT):
5834 target = expand_builtin_mathfn (exp, target, subtarget);
5835 if (target)
5836 return target;
5837 break;
5838
5839 CASE_FLT_FN (BUILT_IN_FMA):
5840 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5841 if (target)
5842 return target;
5843 break;
5844
5845 CASE_FLT_FN (BUILT_IN_ILOGB):
5846 if (! flag_unsafe_math_optimizations)
5847 break;
5848 CASE_FLT_FN (BUILT_IN_ISINF):
5849 CASE_FLT_FN (BUILT_IN_FINITE):
5850 case BUILT_IN_ISFINITE:
5851 case BUILT_IN_ISNORMAL:
5852 target = expand_builtin_interclass_mathfn (exp, target);
5853 if (target)
5854 return target;
5855 break;
5856
5857 CASE_FLT_FN (BUILT_IN_ICEIL):
5858 CASE_FLT_FN (BUILT_IN_LCEIL):
5859 CASE_FLT_FN (BUILT_IN_LLCEIL):
5860 CASE_FLT_FN (BUILT_IN_LFLOOR):
5861 CASE_FLT_FN (BUILT_IN_IFLOOR):
5862 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5863 target = expand_builtin_int_roundingfn (exp, target);
5864 if (target)
5865 return target;
5866 break;
5867
5868 CASE_FLT_FN (BUILT_IN_IRINT):
5869 CASE_FLT_FN (BUILT_IN_LRINT):
5870 CASE_FLT_FN (BUILT_IN_LLRINT):
5871 CASE_FLT_FN (BUILT_IN_IROUND):
5872 CASE_FLT_FN (BUILT_IN_LROUND):
5873 CASE_FLT_FN (BUILT_IN_LLROUND):
5874 target = expand_builtin_int_roundingfn_2 (exp, target);
5875 if (target)
5876 return target;
5877 break;
5878
5879 CASE_FLT_FN (BUILT_IN_POWI):
5880 target = expand_builtin_powi (exp, target);
5881 if (target)
5882 return target;
5883 break;
5884
5885 CASE_FLT_FN (BUILT_IN_ATAN2):
5886 CASE_FLT_FN (BUILT_IN_LDEXP):
5887 CASE_FLT_FN (BUILT_IN_SCALB):
5888 CASE_FLT_FN (BUILT_IN_SCALBN):
5889 CASE_FLT_FN (BUILT_IN_SCALBLN):
5890 if (! flag_unsafe_math_optimizations)
5891 break;
5892
5893 CASE_FLT_FN (BUILT_IN_FMOD):
5894 CASE_FLT_FN (BUILT_IN_REMAINDER):
5895 CASE_FLT_FN (BUILT_IN_DREM):
5896 CASE_FLT_FN (BUILT_IN_POW):
5897 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5898 if (target)
5899 return target;
5900 break;
5901
5902 CASE_FLT_FN (BUILT_IN_CEXPI):
5903 target = expand_builtin_cexpi (exp, target);
5904 gcc_assert (target);
5905 return target;
5906
5907 CASE_FLT_FN (BUILT_IN_SIN):
5908 CASE_FLT_FN (BUILT_IN_COS):
5909 if (! flag_unsafe_math_optimizations)
5910 break;
5911 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5912 if (target)
5913 return target;
5914 break;
5915
5916 CASE_FLT_FN (BUILT_IN_SINCOS):
5917 if (! flag_unsafe_math_optimizations)
5918 break;
5919 target = expand_builtin_sincos (exp);
5920 if (target)
5921 return target;
5922 break;
5923
5924 case BUILT_IN_APPLY_ARGS:
5925 return expand_builtin_apply_args ();
5926
5927 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5928 FUNCTION with a copy of the parameters described by
5929 ARGUMENTS, and ARGSIZE. It returns a block of memory
5930 allocated on the stack into which is stored all the registers
5931 that might possibly be used for returning the result of a
5932 function. ARGUMENTS is the value returned by
5933 __builtin_apply_args. ARGSIZE is the number of bytes of
5934 arguments that must be copied. ??? How should this value be
5935 computed? We'll also need a safe worst case value for varargs
5936 functions. */
5937 case BUILT_IN_APPLY:
5938 if (!validate_arglist (exp, POINTER_TYPE,
5939 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5940 && !validate_arglist (exp, REFERENCE_TYPE,
5941 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5942 return const0_rtx;
5943 else
5944 {
5945 rtx ops[3];
5946
5947 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5948 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5949 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5950
5951 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5952 }
5953
5954 /* __builtin_return (RESULT) causes the function to return the
5955 value described by RESULT. RESULT is address of the block of
5956 memory returned by __builtin_apply. */
5957 case BUILT_IN_RETURN:
5958 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5959 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5960 return const0_rtx;
5961
5962 case BUILT_IN_SAVEREGS:
5963 return expand_builtin_saveregs ();
5964
5965 case BUILT_IN_VA_ARG_PACK:
5966 /* All valid uses of __builtin_va_arg_pack () are removed during
5967 inlining. */
5968 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5969 return const0_rtx;
5970
5971 case BUILT_IN_VA_ARG_PACK_LEN:
5972 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5973 inlining. */
5974 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5975 return const0_rtx;
5976
5977 /* Return the address of the first anonymous stack arg. */
5978 case BUILT_IN_NEXT_ARG:
5979 if (fold_builtin_next_arg (exp, false))
5980 return const0_rtx;
5981 return expand_builtin_next_arg ();
5982
5983 case BUILT_IN_CLEAR_CACHE:
5984 target = expand_builtin___clear_cache (exp);
5985 if (target)
5986 return target;
5987 break;
5988
5989 case BUILT_IN_CLASSIFY_TYPE:
5990 return expand_builtin_classify_type (exp);
5991
5992 case BUILT_IN_CONSTANT_P:
5993 return const0_rtx;
5994
5995 case BUILT_IN_FRAME_ADDRESS:
5996 case BUILT_IN_RETURN_ADDRESS:
5997 return expand_builtin_frame_address (fndecl, exp);
5998
5999 /* Returns the address of the area where the structure is returned.
6000 0 otherwise. */
6001 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6002 if (call_expr_nargs (exp) != 0
6003 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6004 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6005 return const0_rtx;
6006 else
6007 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6008
6009 case BUILT_IN_ALLOCA:
6010 case BUILT_IN_ALLOCA_WITH_ALIGN:
6011 /* If the allocation stems from the declaration of a variable-sized
6012 object, it cannot accumulate. */
6013 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6014 if (target)
6015 return target;
6016 break;
6017
6018 case BUILT_IN_STACK_SAVE:
6019 return expand_stack_save ();
6020
6021 case BUILT_IN_STACK_RESTORE:
6022 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6023 return const0_rtx;
6024
6025 case BUILT_IN_BSWAP16:
6026 case BUILT_IN_BSWAP32:
6027 case BUILT_IN_BSWAP64:
6028 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6029 if (target)
6030 return target;
6031 break;
6032
6033 CASE_INT_FN (BUILT_IN_FFS):
6034 target = expand_builtin_unop (target_mode, exp, target,
6035 subtarget, ffs_optab);
6036 if (target)
6037 return target;
6038 break;
6039
6040 CASE_INT_FN (BUILT_IN_CLZ):
6041 target = expand_builtin_unop (target_mode, exp, target,
6042 subtarget, clz_optab);
6043 if (target)
6044 return target;
6045 break;
6046
6047 CASE_INT_FN (BUILT_IN_CTZ):
6048 target = expand_builtin_unop (target_mode, exp, target,
6049 subtarget, ctz_optab);
6050 if (target)
6051 return target;
6052 break;
6053
6054 CASE_INT_FN (BUILT_IN_CLRSB):
6055 target = expand_builtin_unop (target_mode, exp, target,
6056 subtarget, clrsb_optab);
6057 if (target)
6058 return target;
6059 break;
6060
6061 CASE_INT_FN (BUILT_IN_POPCOUNT):
6062 target = expand_builtin_unop (target_mode, exp, target,
6063 subtarget, popcount_optab);
6064 if (target)
6065 return target;
6066 break;
6067
6068 CASE_INT_FN (BUILT_IN_PARITY):
6069 target = expand_builtin_unop (target_mode, exp, target,
6070 subtarget, parity_optab);
6071 if (target)
6072 return target;
6073 break;
6074
6075 case BUILT_IN_STRLEN:
6076 target = expand_builtin_strlen (exp, target, target_mode);
6077 if (target)
6078 return target;
6079 break;
6080
6081 case BUILT_IN_STRCPY:
6082 target = expand_builtin_strcpy (exp, target);
6083 if (target)
6084 return target;
6085 break;
6086
6087 case BUILT_IN_STRNCPY:
6088 target = expand_builtin_strncpy (exp, target);
6089 if (target)
6090 return target;
6091 break;
6092
6093 case BUILT_IN_STPCPY:
6094 target = expand_builtin_stpcpy (exp, target, mode);
6095 if (target)
6096 return target;
6097 break;
6098
6099 case BUILT_IN_MEMCPY:
6100 target = expand_builtin_memcpy (exp, target);
6101 if (target)
6102 return target;
6103 break;
6104
6105 case BUILT_IN_MEMPCPY:
6106 target = expand_builtin_mempcpy (exp, target, mode);
6107 if (target)
6108 return target;
6109 break;
6110
6111 case BUILT_IN_MEMSET:
6112 target = expand_builtin_memset (exp, target, mode);
6113 if (target)
6114 return target;
6115 break;
6116
6117 case BUILT_IN_BZERO:
6118 target = expand_builtin_bzero (exp);
6119 if (target)
6120 return target;
6121 break;
6122
6123 case BUILT_IN_STRCMP:
6124 target = expand_builtin_strcmp (exp, target);
6125 if (target)
6126 return target;
6127 break;
6128
6129 case BUILT_IN_STRNCMP:
6130 target = expand_builtin_strncmp (exp, target, mode);
6131 if (target)
6132 return target;
6133 break;
6134
6135 case BUILT_IN_BCMP:
6136 case BUILT_IN_MEMCMP:
6137 target = expand_builtin_memcmp (exp, target, mode);
6138 if (target)
6139 return target;
6140 break;
6141
6142 case BUILT_IN_SETJMP:
6143 /* This should have been lowered to the builtins below. */
6144 gcc_unreachable ();
6145
6146 case BUILT_IN_SETJMP_SETUP:
6147 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6148 and the receiver label. */
6149 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6150 {
6151 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6152 VOIDmode, EXPAND_NORMAL);
6153 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6154 rtx label_r = label_rtx (label);
6155
6156 /* This is copied from the handling of non-local gotos. */
6157 expand_builtin_setjmp_setup (buf_addr, label_r);
6158 nonlocal_goto_handler_labels
6159 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6160 nonlocal_goto_handler_labels);
6161 /* ??? Do not let expand_label treat us as such since we would
6162 not want to be both on the list of non-local labels and on
6163 the list of forced labels. */
6164 FORCED_LABEL (label) = 0;
6165 return const0_rtx;
6166 }
6167 break;
6168
6169 case BUILT_IN_SETJMP_DISPATCHER:
6170 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6171 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6172 {
6173 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6174 rtx label_r = label_rtx (label);
6175
6176 /* Remove the dispatcher label from the list of non-local labels
6177 since the receiver labels have been added to it above. */
6178 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6179 return const0_rtx;
6180 }
6181 break;
6182
6183 case BUILT_IN_SETJMP_RECEIVER:
6184 /* __builtin_setjmp_receiver is passed the receiver label. */
6185 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6186 {
6187 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6188 rtx label_r = label_rtx (label);
6189
6190 expand_builtin_setjmp_receiver (label_r);
6191 return const0_rtx;
6192 }
6193 break;
6194
6195 /* __builtin_longjmp is passed a pointer to an array of five words.
6196 It's similar to the C library longjmp function but works with
6197 __builtin_setjmp above. */
6198 case BUILT_IN_LONGJMP:
6199 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6200 {
6201 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6202 VOIDmode, EXPAND_NORMAL);
6203 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6204
6205 if (value != const1_rtx)
6206 {
6207 error ("%<__builtin_longjmp%> second argument must be 1");
6208 return const0_rtx;
6209 }
6210
6211 expand_builtin_longjmp (buf_addr, value);
6212 return const0_rtx;
6213 }
6214 break;
6215
6216 case BUILT_IN_NONLOCAL_GOTO:
6217 target = expand_builtin_nonlocal_goto (exp);
6218 if (target)
6219 return target;
6220 break;
6221
6222 /* This updates the setjmp buffer that is its argument with the value
6223 of the current stack pointer. */
6224 case BUILT_IN_UPDATE_SETJMP_BUF:
6225 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6226 {
6227 rtx buf_addr
6228 = expand_normal (CALL_EXPR_ARG (exp, 0));
6229
6230 expand_builtin_update_setjmp_buf (buf_addr);
6231 return const0_rtx;
6232 }
6233 break;
6234
6235 case BUILT_IN_TRAP:
6236 expand_builtin_trap ();
6237 return const0_rtx;
6238
6239 case BUILT_IN_UNREACHABLE:
6240 expand_builtin_unreachable ();
6241 return const0_rtx;
6242
6243 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6244 case BUILT_IN_SIGNBITD32:
6245 case BUILT_IN_SIGNBITD64:
6246 case BUILT_IN_SIGNBITD128:
6247 target = expand_builtin_signbit (exp, target);
6248 if (target)
6249 return target;
6250 break;
6251
6252 /* Various hooks for the DWARF 2 __throw routine. */
6253 case BUILT_IN_UNWIND_INIT:
6254 expand_builtin_unwind_init ();
6255 return const0_rtx;
6256 case BUILT_IN_DWARF_CFA:
6257 return virtual_cfa_rtx;
6258 #ifdef DWARF2_UNWIND_INFO
6259 case BUILT_IN_DWARF_SP_COLUMN:
6260 return expand_builtin_dwarf_sp_column ();
6261 case BUILT_IN_INIT_DWARF_REG_SIZES:
6262 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6263 return const0_rtx;
6264 #endif
6265 case BUILT_IN_FROB_RETURN_ADDR:
6266 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6267 case BUILT_IN_EXTRACT_RETURN_ADDR:
6268 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6269 case BUILT_IN_EH_RETURN:
6270 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6271 CALL_EXPR_ARG (exp, 1));
6272 return const0_rtx;
6273 #ifdef EH_RETURN_DATA_REGNO
6274 case BUILT_IN_EH_RETURN_DATA_REGNO:
6275 return expand_builtin_eh_return_data_regno (exp);
6276 #endif
6277 case BUILT_IN_EXTEND_POINTER:
6278 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6279 case BUILT_IN_EH_POINTER:
6280 return expand_builtin_eh_pointer (exp);
6281 case BUILT_IN_EH_FILTER:
6282 return expand_builtin_eh_filter (exp);
6283 case BUILT_IN_EH_COPY_VALUES:
6284 return expand_builtin_eh_copy_values (exp);
6285
6286 case BUILT_IN_VA_START:
6287 return expand_builtin_va_start (exp);
6288 case BUILT_IN_VA_END:
6289 return expand_builtin_va_end (exp);
6290 case BUILT_IN_VA_COPY:
6291 return expand_builtin_va_copy (exp);
6292 case BUILT_IN_EXPECT:
6293 return expand_builtin_expect (exp, target);
6294 case BUILT_IN_ASSUME_ALIGNED:
6295 return expand_builtin_assume_aligned (exp, target);
6296 case BUILT_IN_PREFETCH:
6297 expand_builtin_prefetch (exp);
6298 return const0_rtx;
6299
6300 case BUILT_IN_INIT_TRAMPOLINE:
6301 return expand_builtin_init_trampoline (exp, true);
6302 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6303 return expand_builtin_init_trampoline (exp, false);
6304 case BUILT_IN_ADJUST_TRAMPOLINE:
6305 return expand_builtin_adjust_trampoline (exp);
6306
6307 case BUILT_IN_FORK:
6308 case BUILT_IN_EXECL:
6309 case BUILT_IN_EXECV:
6310 case BUILT_IN_EXECLP:
6311 case BUILT_IN_EXECLE:
6312 case BUILT_IN_EXECVP:
6313 case BUILT_IN_EXECVE:
6314 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6315 if (target)
6316 return target;
6317 break;
6318
6319 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6320 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6321 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6322 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6323 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6324 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6325 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6326 if (target)
6327 return target;
6328 break;
6329
6330 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6331 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6332 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6333 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6334 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6335 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6336 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6337 if (target)
6338 return target;
6339 break;
6340
6341 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6342 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6343 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6344 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6345 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6346 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6347 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6348 if (target)
6349 return target;
6350 break;
6351
6352 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6353 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6354 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6355 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6356 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6357 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6358 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6359 if (target)
6360 return target;
6361 break;
6362
6363 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6364 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6365 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6366 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6367 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6368 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6369 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6370 if (target)
6371 return target;
6372 break;
6373
6374 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6375 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6376 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6377 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6378 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6379 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6380 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6381 if (target)
6382 return target;
6383 break;
6384
6385 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6386 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6387 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6388 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6389 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6390 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6391 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6392 if (target)
6393 return target;
6394 break;
6395
6396 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6397 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6398 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6399 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6400 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6401 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6402 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6403 if (target)
6404 return target;
6405 break;
6406
6407 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6408 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6409 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6410 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6411 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6412 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6413 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6414 if (target)
6415 return target;
6416 break;
6417
6418 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6419 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6420 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6421 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6422 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6423 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6424 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6425 if (target)
6426 return target;
6427 break;
6428
6429 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6430 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6431 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6432 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6433 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6434 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6435 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6436 if (target)
6437 return target;
6438 break;
6439
6440 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6441 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6442 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6443 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6444 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6445 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6446 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6447 if (target)
6448 return target;
6449 break;
6450
6451 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6452 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6453 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6454 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6455 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6456 if (mode == VOIDmode)
6457 mode = TYPE_MODE (boolean_type_node);
6458 if (!target || !register_operand (target, mode))
6459 target = gen_reg_rtx (mode);
6460
6461 mode = get_builtin_sync_mode
6462 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6463 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6464 if (target)
6465 return target;
6466 break;
6467
6468 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6469 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6470 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6471 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6472 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6473 mode = get_builtin_sync_mode
6474 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6475 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6476 if (target)
6477 return target;
6478 break;
6479
6480 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6481 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6482 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6483 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6484 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6485 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6486 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6487 if (target)
6488 return target;
6489 break;
6490
6491 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6492 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6493 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6494 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6495 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6496 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6497 expand_builtin_sync_lock_release (mode, exp);
6498 return const0_rtx;
6499
6500 case BUILT_IN_SYNC_SYNCHRONIZE:
6501 expand_builtin_sync_synchronize ();
6502 return const0_rtx;
6503
6504 case BUILT_IN_ATOMIC_EXCHANGE_1:
6505 case BUILT_IN_ATOMIC_EXCHANGE_2:
6506 case BUILT_IN_ATOMIC_EXCHANGE_4:
6507 case BUILT_IN_ATOMIC_EXCHANGE_8:
6508 case BUILT_IN_ATOMIC_EXCHANGE_16:
6509 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6510 target = expand_builtin_atomic_exchange (mode, exp, target);
6511 if (target)
6512 return target;
6513 break;
6514
6515 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6516 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6517 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6518 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6519 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6520 {
6521 unsigned int nargs, z;
6522 vec<tree, va_gc> *vec;
6523
6524 mode =
6525 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6526 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6527 if (target)
6528 return target;
6529
6530 /* If this is turned into an external library call, the weak parameter
6531 must be dropped to match the expected parameter list. */
6532 nargs = call_expr_nargs (exp);
6533 vec_alloc (vec, nargs - 1);
6534 for (z = 0; z < 3; z++)
6535 vec->quick_push (CALL_EXPR_ARG (exp, z));
6536 /* Skip the boolean weak parameter. */
6537 for (z = 4; z < 6; z++)
6538 vec->quick_push (CALL_EXPR_ARG (exp, z));
6539 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6540 break;
6541 }
6542
6543 case BUILT_IN_ATOMIC_LOAD_1:
6544 case BUILT_IN_ATOMIC_LOAD_2:
6545 case BUILT_IN_ATOMIC_LOAD_4:
6546 case BUILT_IN_ATOMIC_LOAD_8:
6547 case BUILT_IN_ATOMIC_LOAD_16:
6548 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6549 target = expand_builtin_atomic_load (mode, exp, target);
6550 if (target)
6551 return target;
6552 break;
6553
6554 case BUILT_IN_ATOMIC_STORE_1:
6555 case BUILT_IN_ATOMIC_STORE_2:
6556 case BUILT_IN_ATOMIC_STORE_4:
6557 case BUILT_IN_ATOMIC_STORE_8:
6558 case BUILT_IN_ATOMIC_STORE_16:
6559 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6560 target = expand_builtin_atomic_store (mode, exp);
6561 if (target)
6562 return const0_rtx;
6563 break;
6564
6565 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6566 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6567 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6568 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6569 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6570 {
6571 enum built_in_function lib;
6572 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6573 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6574 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6575 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6576 ignore, lib);
6577 if (target)
6578 return target;
6579 break;
6580 }
6581 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6582 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6583 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6584 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6585 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6586 {
6587 enum built_in_function lib;
6588 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6589 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6590 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6591 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6592 ignore, lib);
6593 if (target)
6594 return target;
6595 break;
6596 }
6597 case BUILT_IN_ATOMIC_AND_FETCH_1:
6598 case BUILT_IN_ATOMIC_AND_FETCH_2:
6599 case BUILT_IN_ATOMIC_AND_FETCH_4:
6600 case BUILT_IN_ATOMIC_AND_FETCH_8:
6601 case BUILT_IN_ATOMIC_AND_FETCH_16:
6602 {
6603 enum built_in_function lib;
6604 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6605 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6606 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6607 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6608 ignore, lib);
6609 if (target)
6610 return target;
6611 break;
6612 }
6613 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6614 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6615 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6616 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6617 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6618 {
6619 enum built_in_function lib;
6620 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6621 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6622 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6623 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6624 ignore, lib);
6625 if (target)
6626 return target;
6627 break;
6628 }
6629 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6630 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6631 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6632 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6633 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6634 {
6635 enum built_in_function lib;
6636 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6637 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6638 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6639 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6640 ignore, lib);
6641 if (target)
6642 return target;
6643 break;
6644 }
6645 case BUILT_IN_ATOMIC_OR_FETCH_1:
6646 case BUILT_IN_ATOMIC_OR_FETCH_2:
6647 case BUILT_IN_ATOMIC_OR_FETCH_4:
6648 case BUILT_IN_ATOMIC_OR_FETCH_8:
6649 case BUILT_IN_ATOMIC_OR_FETCH_16:
6650 {
6651 enum built_in_function lib;
6652 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6653 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6654 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6655 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6656 ignore, lib);
6657 if (target)
6658 return target;
6659 break;
6660 }
6661 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6662 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6663 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6664 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6665 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6666 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6667 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6668 ignore, BUILT_IN_NONE);
6669 if (target)
6670 return target;
6671 break;
6672
6673 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6674 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6675 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6676 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6677 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6678 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6679 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6680 ignore, BUILT_IN_NONE);
6681 if (target)
6682 return target;
6683 break;
6684
6685 case BUILT_IN_ATOMIC_FETCH_AND_1:
6686 case BUILT_IN_ATOMIC_FETCH_AND_2:
6687 case BUILT_IN_ATOMIC_FETCH_AND_4:
6688 case BUILT_IN_ATOMIC_FETCH_AND_8:
6689 case BUILT_IN_ATOMIC_FETCH_AND_16:
6690 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6691 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6692 ignore, BUILT_IN_NONE);
6693 if (target)
6694 return target;
6695 break;
6696
6697 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6698 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6699 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6700 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6701 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6702 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6703 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6704 ignore, BUILT_IN_NONE);
6705 if (target)
6706 return target;
6707 break;
6708
6709 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6710 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6711 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6712 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6713 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6714 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6715 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6716 ignore, BUILT_IN_NONE);
6717 if (target)
6718 return target;
6719 break;
6720
6721 case BUILT_IN_ATOMIC_FETCH_OR_1:
6722 case BUILT_IN_ATOMIC_FETCH_OR_2:
6723 case BUILT_IN_ATOMIC_FETCH_OR_4:
6724 case BUILT_IN_ATOMIC_FETCH_OR_8:
6725 case BUILT_IN_ATOMIC_FETCH_OR_16:
6726 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6727 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6728 ignore, BUILT_IN_NONE);
6729 if (target)
6730 return target;
6731 break;
6732
6733 case BUILT_IN_ATOMIC_TEST_AND_SET:
6734 return expand_builtin_atomic_test_and_set (exp, target);
6735
6736 case BUILT_IN_ATOMIC_CLEAR:
6737 return expand_builtin_atomic_clear (exp);
6738
6739 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6740 return expand_builtin_atomic_always_lock_free (exp);
6741
6742 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6743 target = expand_builtin_atomic_is_lock_free (exp);
6744 if (target)
6745 return target;
6746 break;
6747
6748 case BUILT_IN_ATOMIC_THREAD_FENCE:
6749 expand_builtin_atomic_thread_fence (exp);
6750 return const0_rtx;
6751
6752 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6753 expand_builtin_atomic_signal_fence (exp);
6754 return const0_rtx;
6755
6756 case BUILT_IN_OBJECT_SIZE:
6757 return expand_builtin_object_size (exp);
6758
6759 case BUILT_IN_MEMCPY_CHK:
6760 case BUILT_IN_MEMPCPY_CHK:
6761 case BUILT_IN_MEMMOVE_CHK:
6762 case BUILT_IN_MEMSET_CHK:
6763 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6764 if (target)
6765 return target;
6766 break;
6767
6768 case BUILT_IN_STRCPY_CHK:
6769 case BUILT_IN_STPCPY_CHK:
6770 case BUILT_IN_STRNCPY_CHK:
6771 case BUILT_IN_STPNCPY_CHK:
6772 case BUILT_IN_STRCAT_CHK:
6773 case BUILT_IN_STRNCAT_CHK:
6774 case BUILT_IN_SNPRINTF_CHK:
6775 case BUILT_IN_VSNPRINTF_CHK:
6776 maybe_emit_chk_warning (exp, fcode);
6777 break;
6778
6779 case BUILT_IN_SPRINTF_CHK:
6780 case BUILT_IN_VSPRINTF_CHK:
6781 maybe_emit_sprintf_chk_warning (exp, fcode);
6782 break;
6783
6784 case BUILT_IN_FREE:
6785 if (warn_free_nonheap_object)
6786 maybe_emit_free_warning (exp);
6787 break;
6788
6789 case BUILT_IN_THREAD_POINTER:
6790 return expand_builtin_thread_pointer (exp, target);
6791
6792 case BUILT_IN_SET_THREAD_POINTER:
6793 expand_builtin_set_thread_pointer (exp);
6794 return const0_rtx;
6795
6796 default: /* just do library call, if unknown builtin */
6797 break;
6798 }
6799
6800 /* The switch statement above can drop through to cause the function
6801 to be called normally. */
6802 return expand_call (exp, target, ignore);
6803 }
6804
6805 /* Determine whether a tree node represents a call to a built-in
6806 function. If the tree T is a call to a built-in function with
6807 the right number of arguments of the appropriate types, return
6808 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6809 Otherwise the return value is END_BUILTINS. */
6810
6811 enum built_in_function
6812 builtin_mathfn_code (const_tree t)
6813 {
6814 const_tree fndecl, arg, parmlist;
6815 const_tree argtype, parmtype;
6816 const_call_expr_arg_iterator iter;
6817
6818 if (TREE_CODE (t) != CALL_EXPR
6819 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6820 return END_BUILTINS;
6821
6822 fndecl = get_callee_fndecl (t);
6823 if (fndecl == NULL_TREE
6824 || TREE_CODE (fndecl) != FUNCTION_DECL
6825 || ! DECL_BUILT_IN (fndecl)
6826 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6827 return END_BUILTINS;
6828
6829 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6830 init_const_call_expr_arg_iterator (t, &iter);
6831 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6832 {
6833 /* If a function doesn't take a variable number of arguments,
6834 the last element in the list will have type `void'. */
6835 parmtype = TREE_VALUE (parmlist);
6836 if (VOID_TYPE_P (parmtype))
6837 {
6838 if (more_const_call_expr_args_p (&iter))
6839 return END_BUILTINS;
6840 return DECL_FUNCTION_CODE (fndecl);
6841 }
6842
6843 if (! more_const_call_expr_args_p (&iter))
6844 return END_BUILTINS;
6845
6846 arg = next_const_call_expr_arg (&iter);
6847 argtype = TREE_TYPE (arg);
6848
6849 if (SCALAR_FLOAT_TYPE_P (parmtype))
6850 {
6851 if (! SCALAR_FLOAT_TYPE_P (argtype))
6852 return END_BUILTINS;
6853 }
6854 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6855 {
6856 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6857 return END_BUILTINS;
6858 }
6859 else if (POINTER_TYPE_P (parmtype))
6860 {
6861 if (! POINTER_TYPE_P (argtype))
6862 return END_BUILTINS;
6863 }
6864 else if (INTEGRAL_TYPE_P (parmtype))
6865 {
6866 if (! INTEGRAL_TYPE_P (argtype))
6867 return END_BUILTINS;
6868 }
6869 else
6870 return END_BUILTINS;
6871 }
6872
6873 /* Variable-length argument list. */
6874 return DECL_FUNCTION_CODE (fndecl);
6875 }
6876
6877 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6878 evaluate to a constant. */
6879
6880 static tree
6881 fold_builtin_constant_p (tree arg)
6882 {
6883 /* We return 1 for a numeric type that's known to be a constant
6884 value at compile-time or for an aggregate type that's a
6885 literal constant. */
6886 STRIP_NOPS (arg);
6887
6888 /* If we know this is a constant, emit the constant of one. */
6889 if (CONSTANT_CLASS_P (arg)
6890 || (TREE_CODE (arg) == CONSTRUCTOR
6891 && TREE_CONSTANT (arg)))
6892 return integer_one_node;
6893 if (TREE_CODE (arg) == ADDR_EXPR)
6894 {
6895 tree op = TREE_OPERAND (arg, 0);
6896 if (TREE_CODE (op) == STRING_CST
6897 || (TREE_CODE (op) == ARRAY_REF
6898 && integer_zerop (TREE_OPERAND (op, 1))
6899 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6900 return integer_one_node;
6901 }
6902
6903 /* If this expression has side effects, show we don't know it to be a
6904 constant. Likewise if it's a pointer or aggregate type since in
6905 those case we only want literals, since those are only optimized
6906 when generating RTL, not later.
6907 And finally, if we are compiling an initializer, not code, we
6908 need to return a definite result now; there's not going to be any
6909 more optimization done. */
6910 if (TREE_SIDE_EFFECTS (arg)
6911 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6912 || POINTER_TYPE_P (TREE_TYPE (arg))
6913 || cfun == 0
6914 || folding_initializer
6915 || force_folding_builtin_constant_p)
6916 return integer_zero_node;
6917
6918 return NULL_TREE;
6919 }
6920
6921 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6922 return it as a truthvalue. */
6923
6924 static tree
6925 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6926 {
6927 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6928
6929 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
6930 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6931 ret_type = TREE_TYPE (TREE_TYPE (fn));
6932 pred_type = TREE_VALUE (arg_types);
6933 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6934
6935 pred = fold_convert_loc (loc, pred_type, pred);
6936 expected = fold_convert_loc (loc, expected_type, expected);
6937 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6938
6939 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6940 build_int_cst (ret_type, 0));
6941 }
6942
6943 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6944 NULL_TREE if no simplification is possible. */
6945
6946 static tree
6947 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6948 {
6949 tree inner, fndecl, inner_arg0;
6950 enum tree_code code;
6951
6952 /* Distribute the expected value over short-circuiting operators.
6953 See through the cast from truthvalue_type_node to long. */
6954 inner_arg0 = arg0;
6955 while (TREE_CODE (inner_arg0) == NOP_EXPR
6956 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
6957 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
6958 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
6959
6960 /* If this is a builtin_expect within a builtin_expect keep the
6961 inner one. See through a comparison against a constant. It
6962 might have been added to create a thruthvalue. */
6963 inner = inner_arg0;
6964
6965 if (COMPARISON_CLASS_P (inner)
6966 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6967 inner = TREE_OPERAND (inner, 0);
6968
6969 if (TREE_CODE (inner) == CALL_EXPR
6970 && (fndecl = get_callee_fndecl (inner))
6971 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6972 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6973 return arg0;
6974
6975 inner = inner_arg0;
6976 code = TREE_CODE (inner);
6977 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6978 {
6979 tree op0 = TREE_OPERAND (inner, 0);
6980 tree op1 = TREE_OPERAND (inner, 1);
6981
6982 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6983 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6984 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6985
6986 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6987 }
6988
6989 /* If the argument isn't invariant then there's nothing else we can do. */
6990 if (!TREE_CONSTANT (inner_arg0))
6991 return NULL_TREE;
6992
6993 /* If we expect that a comparison against the argument will fold to
6994 a constant return the constant. In practice, this means a true
6995 constant or the address of a non-weak symbol. */
6996 inner = inner_arg0;
6997 STRIP_NOPS (inner);
6998 if (TREE_CODE (inner) == ADDR_EXPR)
6999 {
7000 do
7001 {
7002 inner = TREE_OPERAND (inner, 0);
7003 }
7004 while (TREE_CODE (inner) == COMPONENT_REF
7005 || TREE_CODE (inner) == ARRAY_REF);
7006 if ((TREE_CODE (inner) == VAR_DECL
7007 || TREE_CODE (inner) == FUNCTION_DECL)
7008 && DECL_WEAK (inner))
7009 return NULL_TREE;
7010 }
7011
7012 /* Otherwise, ARG0 already has the proper type for the return value. */
7013 return arg0;
7014 }
7015
7016 /* Fold a call to __builtin_classify_type with argument ARG. */
7017
7018 static tree
7019 fold_builtin_classify_type (tree arg)
7020 {
7021 if (arg == 0)
7022 return build_int_cst (integer_type_node, no_type_class);
7023
7024 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7025 }
7026
7027 /* Fold a call to __builtin_strlen with argument ARG. */
7028
7029 static tree
7030 fold_builtin_strlen (location_t loc, tree type, tree arg)
7031 {
7032 if (!validate_arg (arg, POINTER_TYPE))
7033 return NULL_TREE;
7034 else
7035 {
7036 tree len = c_strlen (arg, 0);
7037
7038 if (len)
7039 return fold_convert_loc (loc, type, len);
7040
7041 return NULL_TREE;
7042 }
7043 }
7044
7045 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7046
7047 static tree
7048 fold_builtin_inf (location_t loc, tree type, int warn)
7049 {
7050 REAL_VALUE_TYPE real;
7051
7052 /* __builtin_inff is intended to be usable to define INFINITY on all
7053 targets. If an infinity is not available, INFINITY expands "to a
7054 positive constant of type float that overflows at translation
7055 time", footnote "In this case, using INFINITY will violate the
7056 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7057 Thus we pedwarn to ensure this constraint violation is
7058 diagnosed. */
7059 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7060 pedwarn (loc, 0, "target format does not support infinity");
7061
7062 real_inf (&real);
7063 return build_real (type, real);
7064 }
7065
7066 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7067
7068 static tree
7069 fold_builtin_nan (tree arg, tree type, int quiet)
7070 {
7071 REAL_VALUE_TYPE real;
7072 const char *str;
7073
7074 if (!validate_arg (arg, POINTER_TYPE))
7075 return NULL_TREE;
7076 str = c_getstr (arg);
7077 if (!str)
7078 return NULL_TREE;
7079
7080 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7081 return NULL_TREE;
7082
7083 return build_real (type, real);
7084 }
7085
7086 /* Return true if the floating point expression T has an integer value.
7087 We also allow +Inf, -Inf and NaN to be considered integer values. */
7088
7089 static bool
7090 integer_valued_real_p (tree t)
7091 {
7092 switch (TREE_CODE (t))
7093 {
7094 case FLOAT_EXPR:
7095 return true;
7096
7097 case ABS_EXPR:
7098 case SAVE_EXPR:
7099 return integer_valued_real_p (TREE_OPERAND (t, 0));
7100
7101 case COMPOUND_EXPR:
7102 case MODIFY_EXPR:
7103 case BIND_EXPR:
7104 return integer_valued_real_p (TREE_OPERAND (t, 1));
7105
7106 case PLUS_EXPR:
7107 case MINUS_EXPR:
7108 case MULT_EXPR:
7109 case MIN_EXPR:
7110 case MAX_EXPR:
7111 return integer_valued_real_p (TREE_OPERAND (t, 0))
7112 && integer_valued_real_p (TREE_OPERAND (t, 1));
7113
7114 case COND_EXPR:
7115 return integer_valued_real_p (TREE_OPERAND (t, 1))
7116 && integer_valued_real_p (TREE_OPERAND (t, 2));
7117
7118 case REAL_CST:
7119 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7120
7121 case NOP_EXPR:
7122 {
7123 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7124 if (TREE_CODE (type) == INTEGER_TYPE)
7125 return true;
7126 if (TREE_CODE (type) == REAL_TYPE)
7127 return integer_valued_real_p (TREE_OPERAND (t, 0));
7128 break;
7129 }
7130
7131 case CALL_EXPR:
7132 switch (builtin_mathfn_code (t))
7133 {
7134 CASE_FLT_FN (BUILT_IN_CEIL):
7135 CASE_FLT_FN (BUILT_IN_FLOOR):
7136 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7137 CASE_FLT_FN (BUILT_IN_RINT):
7138 CASE_FLT_FN (BUILT_IN_ROUND):
7139 CASE_FLT_FN (BUILT_IN_TRUNC):
7140 return true;
7141
7142 CASE_FLT_FN (BUILT_IN_FMIN):
7143 CASE_FLT_FN (BUILT_IN_FMAX):
7144 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7145 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7146
7147 default:
7148 break;
7149 }
7150 break;
7151
7152 default:
7153 break;
7154 }
7155 return false;
7156 }
7157
7158 /* FNDECL is assumed to be a builtin where truncation can be propagated
7159 across (for instance floor((double)f) == (double)floorf (f).
7160 Do the transformation for a call with argument ARG. */
7161
7162 static tree
7163 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7164 {
7165 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7166
7167 if (!validate_arg (arg, REAL_TYPE))
7168 return NULL_TREE;
7169
7170 /* Integer rounding functions are idempotent. */
7171 if (fcode == builtin_mathfn_code (arg))
7172 return arg;
7173
7174 /* If argument is already integer valued, and we don't need to worry
7175 about setting errno, there's no need to perform rounding. */
7176 if (! flag_errno_math && integer_valued_real_p (arg))
7177 return arg;
7178
7179 if (optimize)
7180 {
7181 tree arg0 = strip_float_extensions (arg);
7182 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7183 tree newtype = TREE_TYPE (arg0);
7184 tree decl;
7185
7186 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7187 && (decl = mathfn_built_in (newtype, fcode)))
7188 return fold_convert_loc (loc, ftype,
7189 build_call_expr_loc (loc, decl, 1,
7190 fold_convert_loc (loc,
7191 newtype,
7192 arg0)));
7193 }
7194 return NULL_TREE;
7195 }
7196
7197 /* FNDECL is assumed to be builtin which can narrow the FP type of
7198 the argument, for instance lround((double)f) -> lroundf (f).
7199 Do the transformation for a call with argument ARG. */
7200
7201 static tree
7202 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7203 {
7204 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7205
7206 if (!validate_arg (arg, REAL_TYPE))
7207 return NULL_TREE;
7208
7209 /* If argument is already integer valued, and we don't need to worry
7210 about setting errno, there's no need to perform rounding. */
7211 if (! flag_errno_math && integer_valued_real_p (arg))
7212 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7213 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7214
7215 if (optimize)
7216 {
7217 tree ftype = TREE_TYPE (arg);
7218 tree arg0 = strip_float_extensions (arg);
7219 tree newtype = TREE_TYPE (arg0);
7220 tree decl;
7221
7222 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7223 && (decl = mathfn_built_in (newtype, fcode)))
7224 return build_call_expr_loc (loc, decl, 1,
7225 fold_convert_loc (loc, newtype, arg0));
7226 }
7227
7228 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7229 sizeof (int) == sizeof (long). */
7230 if (TYPE_PRECISION (integer_type_node)
7231 == TYPE_PRECISION (long_integer_type_node))
7232 {
7233 tree newfn = NULL_TREE;
7234 switch (fcode)
7235 {
7236 CASE_FLT_FN (BUILT_IN_ICEIL):
7237 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7238 break;
7239
7240 CASE_FLT_FN (BUILT_IN_IFLOOR):
7241 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7242 break;
7243
7244 CASE_FLT_FN (BUILT_IN_IROUND):
7245 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7246 break;
7247
7248 CASE_FLT_FN (BUILT_IN_IRINT):
7249 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7250 break;
7251
7252 default:
7253 break;
7254 }
7255
7256 if (newfn)
7257 {
7258 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7259 return fold_convert_loc (loc,
7260 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7261 }
7262 }
7263
7264 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7265 sizeof (long long) == sizeof (long). */
7266 if (TYPE_PRECISION (long_long_integer_type_node)
7267 == TYPE_PRECISION (long_integer_type_node))
7268 {
7269 tree newfn = NULL_TREE;
7270 switch (fcode)
7271 {
7272 CASE_FLT_FN (BUILT_IN_LLCEIL):
7273 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7274 break;
7275
7276 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7277 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7278 break;
7279
7280 CASE_FLT_FN (BUILT_IN_LLROUND):
7281 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7282 break;
7283
7284 CASE_FLT_FN (BUILT_IN_LLRINT):
7285 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7286 break;
7287
7288 default:
7289 break;
7290 }
7291
7292 if (newfn)
7293 {
7294 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7295 return fold_convert_loc (loc,
7296 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7297 }
7298 }
7299
7300 return NULL_TREE;
7301 }
7302
7303 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7304 return type. Return NULL_TREE if no simplification can be made. */
7305
7306 static tree
7307 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7308 {
7309 tree res;
7310
7311 if (!validate_arg (arg, COMPLEX_TYPE)
7312 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7313 return NULL_TREE;
7314
7315 /* Calculate the result when the argument is a constant. */
7316 if (TREE_CODE (arg) == COMPLEX_CST
7317 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7318 type, mpfr_hypot)))
7319 return res;
7320
7321 if (TREE_CODE (arg) == COMPLEX_EXPR)
7322 {
7323 tree real = TREE_OPERAND (arg, 0);
7324 tree imag = TREE_OPERAND (arg, 1);
7325
7326 /* If either part is zero, cabs is fabs of the other. */
7327 if (real_zerop (real))
7328 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7329 if (real_zerop (imag))
7330 return fold_build1_loc (loc, ABS_EXPR, type, real);
7331
7332 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7333 if (flag_unsafe_math_optimizations
7334 && operand_equal_p (real, imag, OEP_PURE_SAME))
7335 {
7336 const REAL_VALUE_TYPE sqrt2_trunc
7337 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7338 STRIP_NOPS (real);
7339 return fold_build2_loc (loc, MULT_EXPR, type,
7340 fold_build1_loc (loc, ABS_EXPR, type, real),
7341 build_real (type, sqrt2_trunc));
7342 }
7343 }
7344
7345 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7346 if (TREE_CODE (arg) == NEGATE_EXPR
7347 || TREE_CODE (arg) == CONJ_EXPR)
7348 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7349
7350 /* Don't do this when optimizing for size. */
7351 if (flag_unsafe_math_optimizations
7352 && optimize && optimize_function_for_speed_p (cfun))
7353 {
7354 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7355
7356 if (sqrtfn != NULL_TREE)
7357 {
7358 tree rpart, ipart, result;
7359
7360 arg = builtin_save_expr (arg);
7361
7362 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7363 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7364
7365 rpart = builtin_save_expr (rpart);
7366 ipart = builtin_save_expr (ipart);
7367
7368 result = fold_build2_loc (loc, PLUS_EXPR, type,
7369 fold_build2_loc (loc, MULT_EXPR, type,
7370 rpart, rpart),
7371 fold_build2_loc (loc, MULT_EXPR, type,
7372 ipart, ipart));
7373
7374 return build_call_expr_loc (loc, sqrtfn, 1, result);
7375 }
7376 }
7377
7378 return NULL_TREE;
7379 }
7380
7381 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7382 complex tree type of the result. If NEG is true, the imaginary
7383 zero is negative. */
7384
7385 static tree
7386 build_complex_cproj (tree type, bool neg)
7387 {
7388 REAL_VALUE_TYPE rinf, rzero = dconst0;
7389
7390 real_inf (&rinf);
7391 rzero.sign = neg;
7392 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7393 build_real (TREE_TYPE (type), rzero));
7394 }
7395
7396 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7397 return type. Return NULL_TREE if no simplification can be made. */
7398
7399 static tree
7400 fold_builtin_cproj (location_t loc, tree arg, tree type)
7401 {
7402 if (!validate_arg (arg, COMPLEX_TYPE)
7403 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7404 return NULL_TREE;
7405
7406 /* If there are no infinities, return arg. */
7407 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7408 return non_lvalue_loc (loc, arg);
7409
7410 /* Calculate the result when the argument is a constant. */
7411 if (TREE_CODE (arg) == COMPLEX_CST)
7412 {
7413 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7414 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7415
7416 if (real_isinf (real) || real_isinf (imag))
7417 return build_complex_cproj (type, imag->sign);
7418 else
7419 return arg;
7420 }
7421 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7422 {
7423 tree real = TREE_OPERAND (arg, 0);
7424 tree imag = TREE_OPERAND (arg, 1);
7425
7426 STRIP_NOPS (real);
7427 STRIP_NOPS (imag);
7428
7429 /* If the real part is inf and the imag part is known to be
7430 nonnegative, return (inf + 0i). Remember side-effects are
7431 possible in the imag part. */
7432 if (TREE_CODE (real) == REAL_CST
7433 && real_isinf (TREE_REAL_CST_PTR (real))
7434 && tree_expr_nonnegative_p (imag))
7435 return omit_one_operand_loc (loc, type,
7436 build_complex_cproj (type, false),
7437 arg);
7438
7439 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7440 Remember side-effects are possible in the real part. */
7441 if (TREE_CODE (imag) == REAL_CST
7442 && real_isinf (TREE_REAL_CST_PTR (imag)))
7443 return
7444 omit_one_operand_loc (loc, type,
7445 build_complex_cproj (type, TREE_REAL_CST_PTR
7446 (imag)->sign), arg);
7447 }
7448
7449 return NULL_TREE;
7450 }
7451
7452 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7453 Return NULL_TREE if no simplification can be made. */
7454
7455 static tree
7456 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7457 {
7458
7459 enum built_in_function fcode;
7460 tree res;
7461
7462 if (!validate_arg (arg, REAL_TYPE))
7463 return NULL_TREE;
7464
7465 /* Calculate the result when the argument is a constant. */
7466 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7467 return res;
7468
7469 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7470 fcode = builtin_mathfn_code (arg);
7471 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7472 {
7473 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7474 arg = fold_build2_loc (loc, MULT_EXPR, type,
7475 CALL_EXPR_ARG (arg, 0),
7476 build_real (type, dconsthalf));
7477 return build_call_expr_loc (loc, expfn, 1, arg);
7478 }
7479
7480 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7481 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7482 {
7483 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7484
7485 if (powfn)
7486 {
7487 tree arg0 = CALL_EXPR_ARG (arg, 0);
7488 tree tree_root;
7489 /* The inner root was either sqrt or cbrt. */
7490 /* This was a conditional expression but it triggered a bug
7491 in Sun C 5.5. */
7492 REAL_VALUE_TYPE dconstroot;
7493 if (BUILTIN_SQRT_P (fcode))
7494 dconstroot = dconsthalf;
7495 else
7496 dconstroot = dconst_third ();
7497
7498 /* Adjust for the outer root. */
7499 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7500 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7501 tree_root = build_real (type, dconstroot);
7502 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7503 }
7504 }
7505
7506 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7507 if (flag_unsafe_math_optimizations
7508 && (fcode == BUILT_IN_POW
7509 || fcode == BUILT_IN_POWF
7510 || fcode == BUILT_IN_POWL))
7511 {
7512 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7513 tree arg0 = CALL_EXPR_ARG (arg, 0);
7514 tree arg1 = CALL_EXPR_ARG (arg, 1);
7515 tree narg1;
7516 if (!tree_expr_nonnegative_p (arg0))
7517 arg0 = build1 (ABS_EXPR, type, arg0);
7518 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7519 build_real (type, dconsthalf));
7520 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7521 }
7522
7523 return NULL_TREE;
7524 }
7525
7526 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7527 Return NULL_TREE if no simplification can be made. */
7528
7529 static tree
7530 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7531 {
7532 const enum built_in_function fcode = builtin_mathfn_code (arg);
7533 tree res;
7534
7535 if (!validate_arg (arg, REAL_TYPE))
7536 return NULL_TREE;
7537
7538 /* Calculate the result when the argument is a constant. */
7539 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7540 return res;
7541
7542 if (flag_unsafe_math_optimizations)
7543 {
7544 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7545 if (BUILTIN_EXPONENT_P (fcode))
7546 {
7547 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7548 const REAL_VALUE_TYPE third_trunc =
7549 real_value_truncate (TYPE_MODE (type), dconst_third ());
7550 arg = fold_build2_loc (loc, MULT_EXPR, type,
7551 CALL_EXPR_ARG (arg, 0),
7552 build_real (type, third_trunc));
7553 return build_call_expr_loc (loc, expfn, 1, arg);
7554 }
7555
7556 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7557 if (BUILTIN_SQRT_P (fcode))
7558 {
7559 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7560
7561 if (powfn)
7562 {
7563 tree arg0 = CALL_EXPR_ARG (arg, 0);
7564 tree tree_root;
7565 REAL_VALUE_TYPE dconstroot = dconst_third ();
7566
7567 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7568 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7569 tree_root = build_real (type, dconstroot);
7570 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7571 }
7572 }
7573
7574 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7575 if (BUILTIN_CBRT_P (fcode))
7576 {
7577 tree arg0 = CALL_EXPR_ARG (arg, 0);
7578 if (tree_expr_nonnegative_p (arg0))
7579 {
7580 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7581
7582 if (powfn)
7583 {
7584 tree tree_root;
7585 REAL_VALUE_TYPE dconstroot;
7586
7587 real_arithmetic (&dconstroot, MULT_EXPR,
7588 dconst_third_ptr (), dconst_third_ptr ());
7589 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7590 tree_root = build_real (type, dconstroot);
7591 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7592 }
7593 }
7594 }
7595
7596 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7597 if (fcode == BUILT_IN_POW
7598 || fcode == BUILT_IN_POWF
7599 || fcode == BUILT_IN_POWL)
7600 {
7601 tree arg00 = CALL_EXPR_ARG (arg, 0);
7602 tree arg01 = CALL_EXPR_ARG (arg, 1);
7603 if (tree_expr_nonnegative_p (arg00))
7604 {
7605 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7606 const REAL_VALUE_TYPE dconstroot
7607 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7608 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7609 build_real (type, dconstroot));
7610 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7611 }
7612 }
7613 }
7614 return NULL_TREE;
7615 }
7616
7617 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7618 TYPE is the type of the return value. Return NULL_TREE if no
7619 simplification can be made. */
7620
7621 static tree
7622 fold_builtin_cos (location_t loc,
7623 tree arg, tree type, tree fndecl)
7624 {
7625 tree res, narg;
7626
7627 if (!validate_arg (arg, REAL_TYPE))
7628 return NULL_TREE;
7629
7630 /* Calculate the result when the argument is a constant. */
7631 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7632 return res;
7633
7634 /* Optimize cos(-x) into cos (x). */
7635 if ((narg = fold_strip_sign_ops (arg)))
7636 return build_call_expr_loc (loc, fndecl, 1, narg);
7637
7638 return NULL_TREE;
7639 }
7640
7641 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7642 Return NULL_TREE if no simplification can be made. */
7643
7644 static tree
7645 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7646 {
7647 if (validate_arg (arg, REAL_TYPE))
7648 {
7649 tree res, narg;
7650
7651 /* Calculate the result when the argument is a constant. */
7652 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7653 return res;
7654
7655 /* Optimize cosh(-x) into cosh (x). */
7656 if ((narg = fold_strip_sign_ops (arg)))
7657 return build_call_expr_loc (loc, fndecl, 1, narg);
7658 }
7659
7660 return NULL_TREE;
7661 }
7662
7663 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7664 argument ARG. TYPE is the type of the return value. Return
7665 NULL_TREE if no simplification can be made. */
7666
7667 static tree
7668 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7669 bool hyper)
7670 {
7671 if (validate_arg (arg, COMPLEX_TYPE)
7672 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7673 {
7674 tree tmp;
7675
7676 /* Calculate the result when the argument is a constant. */
7677 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7678 return tmp;
7679
7680 /* Optimize fn(-x) into fn(x). */
7681 if ((tmp = fold_strip_sign_ops (arg)))
7682 return build_call_expr_loc (loc, fndecl, 1, tmp);
7683 }
7684
7685 return NULL_TREE;
7686 }
7687
7688 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7689 Return NULL_TREE if no simplification can be made. */
7690
7691 static tree
7692 fold_builtin_tan (tree arg, tree type)
7693 {
7694 enum built_in_function fcode;
7695 tree res;
7696
7697 if (!validate_arg (arg, REAL_TYPE))
7698 return NULL_TREE;
7699
7700 /* Calculate the result when the argument is a constant. */
7701 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7702 return res;
7703
7704 /* Optimize tan(atan(x)) = x. */
7705 fcode = builtin_mathfn_code (arg);
7706 if (flag_unsafe_math_optimizations
7707 && (fcode == BUILT_IN_ATAN
7708 || fcode == BUILT_IN_ATANF
7709 || fcode == BUILT_IN_ATANL))
7710 return CALL_EXPR_ARG (arg, 0);
7711
7712 return NULL_TREE;
7713 }
7714
7715 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7716 NULL_TREE if no simplification can be made. */
7717
7718 static tree
7719 fold_builtin_sincos (location_t loc,
7720 tree arg0, tree arg1, tree arg2)
7721 {
7722 tree type;
7723 tree res, fn, call;
7724
7725 if (!validate_arg (arg0, REAL_TYPE)
7726 || !validate_arg (arg1, POINTER_TYPE)
7727 || !validate_arg (arg2, POINTER_TYPE))
7728 return NULL_TREE;
7729
7730 type = TREE_TYPE (arg0);
7731
7732 /* Calculate the result when the argument is a constant. */
7733 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7734 return res;
7735
7736 /* Canonicalize sincos to cexpi. */
7737 if (!targetm.libc_has_function (function_c99_math_complex))
7738 return NULL_TREE;
7739 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7740 if (!fn)
7741 return NULL_TREE;
7742
7743 call = build_call_expr_loc (loc, fn, 1, arg0);
7744 call = builtin_save_expr (call);
7745
7746 return build2 (COMPOUND_EXPR, void_type_node,
7747 build2 (MODIFY_EXPR, void_type_node,
7748 build_fold_indirect_ref_loc (loc, arg1),
7749 build1 (IMAGPART_EXPR, type, call)),
7750 build2 (MODIFY_EXPR, void_type_node,
7751 build_fold_indirect_ref_loc (loc, arg2),
7752 build1 (REALPART_EXPR, type, call)));
7753 }
7754
7755 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7756 NULL_TREE if no simplification can be made. */
7757
7758 static tree
7759 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7760 {
7761 tree rtype;
7762 tree realp, imagp, ifn;
7763 tree res;
7764
7765 if (!validate_arg (arg0, COMPLEX_TYPE)
7766 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7767 return NULL_TREE;
7768
7769 /* Calculate the result when the argument is a constant. */
7770 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7771 return res;
7772
7773 rtype = TREE_TYPE (TREE_TYPE (arg0));
7774
7775 /* In case we can figure out the real part of arg0 and it is constant zero
7776 fold to cexpi. */
7777 if (!targetm.libc_has_function (function_c99_math_complex))
7778 return NULL_TREE;
7779 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7780 if (!ifn)
7781 return NULL_TREE;
7782
7783 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7784 && real_zerop (realp))
7785 {
7786 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7787 return build_call_expr_loc (loc, ifn, 1, narg);
7788 }
7789
7790 /* In case we can easily decompose real and imaginary parts split cexp
7791 to exp (r) * cexpi (i). */
7792 if (flag_unsafe_math_optimizations
7793 && realp)
7794 {
7795 tree rfn, rcall, icall;
7796
7797 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7798 if (!rfn)
7799 return NULL_TREE;
7800
7801 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7802 if (!imagp)
7803 return NULL_TREE;
7804
7805 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7806 icall = builtin_save_expr (icall);
7807 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7808 rcall = builtin_save_expr (rcall);
7809 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7810 fold_build2_loc (loc, MULT_EXPR, rtype,
7811 rcall,
7812 fold_build1_loc (loc, REALPART_EXPR,
7813 rtype, icall)),
7814 fold_build2_loc (loc, MULT_EXPR, rtype,
7815 rcall,
7816 fold_build1_loc (loc, IMAGPART_EXPR,
7817 rtype, icall)));
7818 }
7819
7820 return NULL_TREE;
7821 }
7822
7823 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7824 Return NULL_TREE if no simplification can be made. */
7825
7826 static tree
7827 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7828 {
7829 if (!validate_arg (arg, REAL_TYPE))
7830 return NULL_TREE;
7831
7832 /* Optimize trunc of constant value. */
7833 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7834 {
7835 REAL_VALUE_TYPE r, x;
7836 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7837
7838 x = TREE_REAL_CST (arg);
7839 real_trunc (&r, TYPE_MODE (type), &x);
7840 return build_real (type, r);
7841 }
7842
7843 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7844 }
7845
7846 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7847 Return NULL_TREE if no simplification can be made. */
7848
7849 static tree
7850 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7851 {
7852 if (!validate_arg (arg, REAL_TYPE))
7853 return NULL_TREE;
7854
7855 /* Optimize floor of constant value. */
7856 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7857 {
7858 REAL_VALUE_TYPE x;
7859
7860 x = TREE_REAL_CST (arg);
7861 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7862 {
7863 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7864 REAL_VALUE_TYPE r;
7865
7866 real_floor (&r, TYPE_MODE (type), &x);
7867 return build_real (type, r);
7868 }
7869 }
7870
7871 /* Fold floor (x) where x is nonnegative to trunc (x). */
7872 if (tree_expr_nonnegative_p (arg))
7873 {
7874 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7875 if (truncfn)
7876 return build_call_expr_loc (loc, truncfn, 1, arg);
7877 }
7878
7879 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7880 }
7881
7882 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7883 Return NULL_TREE if no simplification can be made. */
7884
7885 static tree
7886 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7887 {
7888 if (!validate_arg (arg, REAL_TYPE))
7889 return NULL_TREE;
7890
7891 /* Optimize ceil of constant value. */
7892 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7893 {
7894 REAL_VALUE_TYPE x;
7895
7896 x = TREE_REAL_CST (arg);
7897 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7898 {
7899 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7900 REAL_VALUE_TYPE r;
7901
7902 real_ceil (&r, TYPE_MODE (type), &x);
7903 return build_real (type, r);
7904 }
7905 }
7906
7907 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7908 }
7909
7910 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7911 Return NULL_TREE if no simplification can be made. */
7912
7913 static tree
7914 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7915 {
7916 if (!validate_arg (arg, REAL_TYPE))
7917 return NULL_TREE;
7918
7919 /* Optimize round of constant value. */
7920 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7921 {
7922 REAL_VALUE_TYPE x;
7923
7924 x = TREE_REAL_CST (arg);
7925 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7926 {
7927 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7928 REAL_VALUE_TYPE r;
7929
7930 real_round (&r, TYPE_MODE (type), &x);
7931 return build_real (type, r);
7932 }
7933 }
7934
7935 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7936 }
7937
7938 /* Fold function call to builtin lround, lroundf or lroundl (or the
7939 corresponding long long versions) and other rounding functions. ARG
7940 is the argument to the call. Return NULL_TREE if no simplification
7941 can be made. */
7942
7943 static tree
7944 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7945 {
7946 if (!validate_arg (arg, REAL_TYPE))
7947 return NULL_TREE;
7948
7949 /* Optimize lround of constant value. */
7950 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7951 {
7952 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7953
7954 if (real_isfinite (&x))
7955 {
7956 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7957 tree ftype = TREE_TYPE (arg);
7958 double_int val;
7959 REAL_VALUE_TYPE r;
7960
7961 switch (DECL_FUNCTION_CODE (fndecl))
7962 {
7963 CASE_FLT_FN (BUILT_IN_IFLOOR):
7964 CASE_FLT_FN (BUILT_IN_LFLOOR):
7965 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7966 real_floor (&r, TYPE_MODE (ftype), &x);
7967 break;
7968
7969 CASE_FLT_FN (BUILT_IN_ICEIL):
7970 CASE_FLT_FN (BUILT_IN_LCEIL):
7971 CASE_FLT_FN (BUILT_IN_LLCEIL):
7972 real_ceil (&r, TYPE_MODE (ftype), &x);
7973 break;
7974
7975 CASE_FLT_FN (BUILT_IN_IROUND):
7976 CASE_FLT_FN (BUILT_IN_LROUND):
7977 CASE_FLT_FN (BUILT_IN_LLROUND):
7978 real_round (&r, TYPE_MODE (ftype), &x);
7979 break;
7980
7981 default:
7982 gcc_unreachable ();
7983 }
7984
7985 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
7986 if (double_int_fits_to_tree_p (itype, val))
7987 return double_int_to_tree (itype, val);
7988 }
7989 }
7990
7991 switch (DECL_FUNCTION_CODE (fndecl))
7992 {
7993 CASE_FLT_FN (BUILT_IN_LFLOOR):
7994 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7995 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7996 if (tree_expr_nonnegative_p (arg))
7997 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7998 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7999 break;
8000 default:;
8001 }
8002
8003 return fold_fixed_mathfn (loc, fndecl, arg);
8004 }
8005
8006 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8007 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8008 the argument to the call. Return NULL_TREE if no simplification can
8009 be made. */
8010
8011 static tree
8012 fold_builtin_bitop (tree fndecl, tree arg)
8013 {
8014 if (!validate_arg (arg, INTEGER_TYPE))
8015 return NULL_TREE;
8016
8017 /* Optimize for constant argument. */
8018 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8019 {
8020 HOST_WIDE_INT hi, width, result;
8021 unsigned HOST_WIDE_INT lo;
8022 tree type;
8023
8024 type = TREE_TYPE (arg);
8025 width = TYPE_PRECISION (type);
8026 lo = TREE_INT_CST_LOW (arg);
8027
8028 /* Clear all the bits that are beyond the type's precision. */
8029 if (width > HOST_BITS_PER_WIDE_INT)
8030 {
8031 hi = TREE_INT_CST_HIGH (arg);
8032 if (width < HOST_BITS_PER_DOUBLE_INT)
8033 hi &= ~(HOST_WIDE_INT_M1U << (width - HOST_BITS_PER_WIDE_INT));
8034 }
8035 else
8036 {
8037 hi = 0;
8038 if (width < HOST_BITS_PER_WIDE_INT)
8039 lo &= ~(HOST_WIDE_INT_M1U << width);
8040 }
8041
8042 switch (DECL_FUNCTION_CODE (fndecl))
8043 {
8044 CASE_INT_FN (BUILT_IN_FFS):
8045 if (lo != 0)
8046 result = ffs_hwi (lo);
8047 else if (hi != 0)
8048 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
8049 else
8050 result = 0;
8051 break;
8052
8053 CASE_INT_FN (BUILT_IN_CLZ):
8054 if (hi != 0)
8055 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8056 else if (lo != 0)
8057 result = width - floor_log2 (lo) - 1;
8058 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8059 result = width;
8060 break;
8061
8062 CASE_INT_FN (BUILT_IN_CTZ):
8063 if (lo != 0)
8064 result = ctz_hwi (lo);
8065 else if (hi != 0)
8066 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
8067 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8068 result = width;
8069 break;
8070
8071 CASE_INT_FN (BUILT_IN_CLRSB):
8072 if (width > 2 * HOST_BITS_PER_WIDE_INT)
8073 return NULL_TREE;
8074 if (width > HOST_BITS_PER_WIDE_INT
8075 && (hi & ((unsigned HOST_WIDE_INT) 1
8076 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
8077 {
8078 hi = ~hi & ~(HOST_WIDE_INT_M1U
8079 << (width - HOST_BITS_PER_WIDE_INT - 1));
8080 lo = ~lo;
8081 }
8082 else if (width <= HOST_BITS_PER_WIDE_INT
8083 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
8084 lo = ~lo & ~(HOST_WIDE_INT_M1U << (width - 1));
8085 if (hi != 0)
8086 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
8087 else if (lo != 0)
8088 result = width - floor_log2 (lo) - 2;
8089 else
8090 result = width - 1;
8091 break;
8092
8093 CASE_INT_FN (BUILT_IN_POPCOUNT):
8094 result = 0;
8095 while (lo)
8096 result++, lo &= lo - 1;
8097 while (hi)
8098 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8099 break;
8100
8101 CASE_INT_FN (BUILT_IN_PARITY):
8102 result = 0;
8103 while (lo)
8104 result++, lo &= lo - 1;
8105 while (hi)
8106 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8107 result &= 1;
8108 break;
8109
8110 default:
8111 gcc_unreachable ();
8112 }
8113
8114 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8115 }
8116
8117 return NULL_TREE;
8118 }
8119
8120 /* Fold function call to builtin_bswap and the short, long and long long
8121 variants. Return NULL_TREE if no simplification can be made. */
8122 static tree
8123 fold_builtin_bswap (tree fndecl, tree arg)
8124 {
8125 if (! validate_arg (arg, INTEGER_TYPE))
8126 return NULL_TREE;
8127
8128 /* Optimize constant value. */
8129 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8130 {
8131 HOST_WIDE_INT hi, width, r_hi = 0;
8132 unsigned HOST_WIDE_INT lo, r_lo = 0;
8133 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8134
8135 width = TYPE_PRECISION (type);
8136 lo = TREE_INT_CST_LOW (arg);
8137 hi = TREE_INT_CST_HIGH (arg);
8138
8139 switch (DECL_FUNCTION_CODE (fndecl))
8140 {
8141 case BUILT_IN_BSWAP16:
8142 case BUILT_IN_BSWAP32:
8143 case BUILT_IN_BSWAP64:
8144 {
8145 int s;
8146
8147 for (s = 0; s < width; s += 8)
8148 {
8149 int d = width - s - 8;
8150 unsigned HOST_WIDE_INT byte;
8151
8152 if (s < HOST_BITS_PER_WIDE_INT)
8153 byte = (lo >> s) & 0xff;
8154 else
8155 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8156
8157 if (d < HOST_BITS_PER_WIDE_INT)
8158 r_lo |= byte << d;
8159 else
8160 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8161 }
8162 }
8163
8164 break;
8165
8166 default:
8167 gcc_unreachable ();
8168 }
8169
8170 if (width < HOST_BITS_PER_WIDE_INT)
8171 return build_int_cst (type, r_lo);
8172 else
8173 return build_int_cst_wide (type, r_lo, r_hi);
8174 }
8175
8176 return NULL_TREE;
8177 }
8178
8179 /* A subroutine of fold_builtin to fold the various logarithmic
8180 functions. Return NULL_TREE if no simplification can me made.
8181 FUNC is the corresponding MPFR logarithm function. */
8182
8183 static tree
8184 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8185 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8186 {
8187 if (validate_arg (arg, REAL_TYPE))
8188 {
8189 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8190 tree res;
8191 const enum built_in_function fcode = builtin_mathfn_code (arg);
8192
8193 /* Calculate the result when the argument is a constant. */
8194 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8195 return res;
8196
8197 /* Special case, optimize logN(expN(x)) = x. */
8198 if (flag_unsafe_math_optimizations
8199 && ((func == mpfr_log
8200 && (fcode == BUILT_IN_EXP
8201 || fcode == BUILT_IN_EXPF
8202 || fcode == BUILT_IN_EXPL))
8203 || (func == mpfr_log2
8204 && (fcode == BUILT_IN_EXP2
8205 || fcode == BUILT_IN_EXP2F
8206 || fcode == BUILT_IN_EXP2L))
8207 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8208 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8209
8210 /* Optimize logN(func()) for various exponential functions. We
8211 want to determine the value "x" and the power "exponent" in
8212 order to transform logN(x**exponent) into exponent*logN(x). */
8213 if (flag_unsafe_math_optimizations)
8214 {
8215 tree exponent = 0, x = 0;
8216
8217 switch (fcode)
8218 {
8219 CASE_FLT_FN (BUILT_IN_EXP):
8220 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8221 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8222 dconst_e ()));
8223 exponent = CALL_EXPR_ARG (arg, 0);
8224 break;
8225 CASE_FLT_FN (BUILT_IN_EXP2):
8226 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8227 x = build_real (type, dconst2);
8228 exponent = CALL_EXPR_ARG (arg, 0);
8229 break;
8230 CASE_FLT_FN (BUILT_IN_EXP10):
8231 CASE_FLT_FN (BUILT_IN_POW10):
8232 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8233 {
8234 REAL_VALUE_TYPE dconst10;
8235 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8236 x = build_real (type, dconst10);
8237 }
8238 exponent = CALL_EXPR_ARG (arg, 0);
8239 break;
8240 CASE_FLT_FN (BUILT_IN_SQRT):
8241 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8242 x = CALL_EXPR_ARG (arg, 0);
8243 exponent = build_real (type, dconsthalf);
8244 break;
8245 CASE_FLT_FN (BUILT_IN_CBRT):
8246 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8247 x = CALL_EXPR_ARG (arg, 0);
8248 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8249 dconst_third ()));
8250 break;
8251 CASE_FLT_FN (BUILT_IN_POW):
8252 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8253 x = CALL_EXPR_ARG (arg, 0);
8254 exponent = CALL_EXPR_ARG (arg, 1);
8255 break;
8256 default:
8257 break;
8258 }
8259
8260 /* Now perform the optimization. */
8261 if (x && exponent)
8262 {
8263 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8264 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8265 }
8266 }
8267 }
8268
8269 return NULL_TREE;
8270 }
8271
8272 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8273 NULL_TREE if no simplification can be made. */
8274
8275 static tree
8276 fold_builtin_hypot (location_t loc, tree fndecl,
8277 tree arg0, tree arg1, tree type)
8278 {
8279 tree res, narg0, narg1;
8280
8281 if (!validate_arg (arg0, REAL_TYPE)
8282 || !validate_arg (arg1, REAL_TYPE))
8283 return NULL_TREE;
8284
8285 /* Calculate the result when the argument is a constant. */
8286 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8287 return res;
8288
8289 /* If either argument to hypot has a negate or abs, strip that off.
8290 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8291 narg0 = fold_strip_sign_ops (arg0);
8292 narg1 = fold_strip_sign_ops (arg1);
8293 if (narg0 || narg1)
8294 {
8295 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8296 narg1 ? narg1 : arg1);
8297 }
8298
8299 /* If either argument is zero, hypot is fabs of the other. */
8300 if (real_zerop (arg0))
8301 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8302 else if (real_zerop (arg1))
8303 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8304
8305 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8306 if (flag_unsafe_math_optimizations
8307 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8308 {
8309 const REAL_VALUE_TYPE sqrt2_trunc
8310 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8311 return fold_build2_loc (loc, MULT_EXPR, type,
8312 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8313 build_real (type, sqrt2_trunc));
8314 }
8315
8316 return NULL_TREE;
8317 }
8318
8319
8320 /* Fold a builtin function call to pow, powf, or powl. Return
8321 NULL_TREE if no simplification can be made. */
8322 static tree
8323 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8324 {
8325 tree res;
8326
8327 if (!validate_arg (arg0, REAL_TYPE)
8328 || !validate_arg (arg1, REAL_TYPE))
8329 return NULL_TREE;
8330
8331 /* Calculate the result when the argument is a constant. */
8332 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8333 return res;
8334
8335 /* Optimize pow(1.0,y) = 1.0. */
8336 if (real_onep (arg0))
8337 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8338
8339 if (TREE_CODE (arg1) == REAL_CST
8340 && !TREE_OVERFLOW (arg1))
8341 {
8342 REAL_VALUE_TYPE cint;
8343 REAL_VALUE_TYPE c;
8344 HOST_WIDE_INT n;
8345
8346 c = TREE_REAL_CST (arg1);
8347
8348 /* Optimize pow(x,0.0) = 1.0. */
8349 if (REAL_VALUES_EQUAL (c, dconst0))
8350 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8351 arg0);
8352
8353 /* Optimize pow(x,1.0) = x. */
8354 if (REAL_VALUES_EQUAL (c, dconst1))
8355 return arg0;
8356
8357 /* Optimize pow(x,-1.0) = 1.0/x. */
8358 if (REAL_VALUES_EQUAL (c, dconstm1))
8359 return fold_build2_loc (loc, RDIV_EXPR, type,
8360 build_real (type, dconst1), arg0);
8361
8362 /* Optimize pow(x,0.5) = sqrt(x). */
8363 if (flag_unsafe_math_optimizations
8364 && REAL_VALUES_EQUAL (c, dconsthalf))
8365 {
8366 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8367
8368 if (sqrtfn != NULL_TREE)
8369 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8370 }
8371
8372 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8373 if (flag_unsafe_math_optimizations)
8374 {
8375 const REAL_VALUE_TYPE dconstroot
8376 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8377
8378 if (REAL_VALUES_EQUAL (c, dconstroot))
8379 {
8380 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8381 if (cbrtfn != NULL_TREE)
8382 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8383 }
8384 }
8385
8386 /* Check for an integer exponent. */
8387 n = real_to_integer (&c);
8388 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8389 if (real_identical (&c, &cint))
8390 {
8391 /* Attempt to evaluate pow at compile-time, unless this should
8392 raise an exception. */
8393 if (TREE_CODE (arg0) == REAL_CST
8394 && !TREE_OVERFLOW (arg0)
8395 && (n > 0
8396 || (!flag_trapping_math && !flag_errno_math)
8397 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8398 {
8399 REAL_VALUE_TYPE x;
8400 bool inexact;
8401
8402 x = TREE_REAL_CST (arg0);
8403 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8404 if (flag_unsafe_math_optimizations || !inexact)
8405 return build_real (type, x);
8406 }
8407
8408 /* Strip sign ops from even integer powers. */
8409 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8410 {
8411 tree narg0 = fold_strip_sign_ops (arg0);
8412 if (narg0)
8413 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8414 }
8415 }
8416 }
8417
8418 if (flag_unsafe_math_optimizations)
8419 {
8420 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8421
8422 /* Optimize pow(expN(x),y) = expN(x*y). */
8423 if (BUILTIN_EXPONENT_P (fcode))
8424 {
8425 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8426 tree arg = CALL_EXPR_ARG (arg0, 0);
8427 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8428 return build_call_expr_loc (loc, expfn, 1, arg);
8429 }
8430
8431 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8432 if (BUILTIN_SQRT_P (fcode))
8433 {
8434 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8435 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8436 build_real (type, dconsthalf));
8437 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8438 }
8439
8440 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8441 if (BUILTIN_CBRT_P (fcode))
8442 {
8443 tree arg = CALL_EXPR_ARG (arg0, 0);
8444 if (tree_expr_nonnegative_p (arg))
8445 {
8446 const REAL_VALUE_TYPE dconstroot
8447 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8448 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8449 build_real (type, dconstroot));
8450 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8451 }
8452 }
8453
8454 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8455 if (fcode == BUILT_IN_POW
8456 || fcode == BUILT_IN_POWF
8457 || fcode == BUILT_IN_POWL)
8458 {
8459 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8460 if (tree_expr_nonnegative_p (arg00))
8461 {
8462 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8463 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8464 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8465 }
8466 }
8467 }
8468
8469 return NULL_TREE;
8470 }
8471
8472 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8473 Return NULL_TREE if no simplification can be made. */
8474 static tree
8475 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8476 tree arg0, tree arg1, tree type)
8477 {
8478 if (!validate_arg (arg0, REAL_TYPE)
8479 || !validate_arg (arg1, INTEGER_TYPE))
8480 return NULL_TREE;
8481
8482 /* Optimize pow(1.0,y) = 1.0. */
8483 if (real_onep (arg0))
8484 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8485
8486 if (host_integerp (arg1, 0))
8487 {
8488 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8489
8490 /* Evaluate powi at compile-time. */
8491 if (TREE_CODE (arg0) == REAL_CST
8492 && !TREE_OVERFLOW (arg0))
8493 {
8494 REAL_VALUE_TYPE x;
8495 x = TREE_REAL_CST (arg0);
8496 real_powi (&x, TYPE_MODE (type), &x, c);
8497 return build_real (type, x);
8498 }
8499
8500 /* Optimize pow(x,0) = 1.0. */
8501 if (c == 0)
8502 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8503 arg0);
8504
8505 /* Optimize pow(x,1) = x. */
8506 if (c == 1)
8507 return arg0;
8508
8509 /* Optimize pow(x,-1) = 1.0/x. */
8510 if (c == -1)
8511 return fold_build2_loc (loc, RDIV_EXPR, type,
8512 build_real (type, dconst1), arg0);
8513 }
8514
8515 return NULL_TREE;
8516 }
8517
8518 /* A subroutine of fold_builtin to fold the various exponent
8519 functions. Return NULL_TREE if no simplification can be made.
8520 FUNC is the corresponding MPFR exponent function. */
8521
8522 static tree
8523 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8524 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8525 {
8526 if (validate_arg (arg, REAL_TYPE))
8527 {
8528 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8529 tree res;
8530
8531 /* Calculate the result when the argument is a constant. */
8532 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8533 return res;
8534
8535 /* Optimize expN(logN(x)) = x. */
8536 if (flag_unsafe_math_optimizations)
8537 {
8538 const enum built_in_function fcode = builtin_mathfn_code (arg);
8539
8540 if ((func == mpfr_exp
8541 && (fcode == BUILT_IN_LOG
8542 || fcode == BUILT_IN_LOGF
8543 || fcode == BUILT_IN_LOGL))
8544 || (func == mpfr_exp2
8545 && (fcode == BUILT_IN_LOG2
8546 || fcode == BUILT_IN_LOG2F
8547 || fcode == BUILT_IN_LOG2L))
8548 || (func == mpfr_exp10
8549 && (fcode == BUILT_IN_LOG10
8550 || fcode == BUILT_IN_LOG10F
8551 || fcode == BUILT_IN_LOG10L)))
8552 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8553 }
8554 }
8555
8556 return NULL_TREE;
8557 }
8558
8559 /* Return true if VAR is a VAR_DECL or a component thereof. */
8560
8561 static bool
8562 var_decl_component_p (tree var)
8563 {
8564 tree inner = var;
8565 while (handled_component_p (inner))
8566 inner = TREE_OPERAND (inner, 0);
8567 return SSA_VAR_P (inner);
8568 }
8569
8570 /* Fold function call to builtin memset. Return
8571 NULL_TREE if no simplification can be made. */
8572
8573 static tree
8574 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8575 tree type, bool ignore)
8576 {
8577 tree var, ret, etype;
8578 unsigned HOST_WIDE_INT length, cval;
8579
8580 if (! validate_arg (dest, POINTER_TYPE)
8581 || ! validate_arg (c, INTEGER_TYPE)
8582 || ! validate_arg (len, INTEGER_TYPE))
8583 return NULL_TREE;
8584
8585 if (! host_integerp (len, 1))
8586 return NULL_TREE;
8587
8588 /* If the LEN parameter is zero, return DEST. */
8589 if (integer_zerop (len))
8590 return omit_one_operand_loc (loc, type, dest, c);
8591
8592 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8593 return NULL_TREE;
8594
8595 var = dest;
8596 STRIP_NOPS (var);
8597 if (TREE_CODE (var) != ADDR_EXPR)
8598 return NULL_TREE;
8599
8600 var = TREE_OPERAND (var, 0);
8601 if (TREE_THIS_VOLATILE (var))
8602 return NULL_TREE;
8603
8604 etype = TREE_TYPE (var);
8605 if (TREE_CODE (etype) == ARRAY_TYPE)
8606 etype = TREE_TYPE (etype);
8607
8608 if (!INTEGRAL_TYPE_P (etype)
8609 && !POINTER_TYPE_P (etype))
8610 return NULL_TREE;
8611
8612 if (! var_decl_component_p (var))
8613 return NULL_TREE;
8614
8615 length = tree_low_cst (len, 1);
8616 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8617 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
8618 return NULL_TREE;
8619
8620 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8621 return NULL_TREE;
8622
8623 if (integer_zerop (c))
8624 cval = 0;
8625 else
8626 {
8627 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8628 return NULL_TREE;
8629
8630 cval = TREE_INT_CST_LOW (c);
8631 cval &= 0xff;
8632 cval |= cval << 8;
8633 cval |= cval << 16;
8634 cval |= (cval << 31) << 1;
8635 }
8636
8637 ret = build_int_cst_type (etype, cval);
8638 var = build_fold_indirect_ref_loc (loc,
8639 fold_convert_loc (loc,
8640 build_pointer_type (etype),
8641 dest));
8642 ret = build2 (MODIFY_EXPR, etype, var, ret);
8643 if (ignore)
8644 return ret;
8645
8646 return omit_one_operand_loc (loc, type, dest, ret);
8647 }
8648
8649 /* Fold function call to builtin memset. Return
8650 NULL_TREE if no simplification can be made. */
8651
8652 static tree
8653 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8654 {
8655 if (! validate_arg (dest, POINTER_TYPE)
8656 || ! validate_arg (size, INTEGER_TYPE))
8657 return NULL_TREE;
8658
8659 if (!ignore)
8660 return NULL_TREE;
8661
8662 /* New argument list transforming bzero(ptr x, int y) to
8663 memset(ptr x, int 0, size_t y). This is done this way
8664 so that if it isn't expanded inline, we fallback to
8665 calling bzero instead of memset. */
8666
8667 return fold_builtin_memset (loc, dest, integer_zero_node,
8668 fold_convert_loc (loc, size_type_node, size),
8669 void_type_node, ignore);
8670 }
8671
8672 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8673 NULL_TREE if no simplification can be made.
8674 If ENDP is 0, return DEST (like memcpy).
8675 If ENDP is 1, return DEST+LEN (like mempcpy).
8676 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8677 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8678 (memmove). */
8679
8680 static tree
8681 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8682 tree len, tree type, bool ignore, int endp)
8683 {
8684 tree destvar, srcvar, expr;
8685
8686 if (! validate_arg (dest, POINTER_TYPE)
8687 || ! validate_arg (src, POINTER_TYPE)
8688 || ! validate_arg (len, INTEGER_TYPE))
8689 return NULL_TREE;
8690
8691 /* If the LEN parameter is zero, return DEST. */
8692 if (integer_zerop (len))
8693 return omit_one_operand_loc (loc, type, dest, src);
8694
8695 /* If SRC and DEST are the same (and not volatile), return
8696 DEST{,+LEN,+LEN-1}. */
8697 if (operand_equal_p (src, dest, 0))
8698 expr = len;
8699 else
8700 {
8701 tree srctype, desttype;
8702 unsigned int src_align, dest_align;
8703 tree off0;
8704
8705 if (endp == 3)
8706 {
8707 src_align = get_pointer_alignment (src);
8708 dest_align = get_pointer_alignment (dest);
8709
8710 /* Both DEST and SRC must be pointer types.
8711 ??? This is what old code did. Is the testing for pointer types
8712 really mandatory?
8713
8714 If either SRC is readonly or length is 1, we can use memcpy. */
8715 if (!dest_align || !src_align)
8716 return NULL_TREE;
8717 if (readonly_data_expr (src)
8718 || (host_integerp (len, 1)
8719 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8720 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
8721 {
8722 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8723 if (!fn)
8724 return NULL_TREE;
8725 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8726 }
8727
8728 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8729 if (TREE_CODE (src) == ADDR_EXPR
8730 && TREE_CODE (dest) == ADDR_EXPR)
8731 {
8732 tree src_base, dest_base, fn;
8733 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8734 HOST_WIDE_INT size = -1;
8735 HOST_WIDE_INT maxsize = -1;
8736
8737 srcvar = TREE_OPERAND (src, 0);
8738 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8739 &size, &maxsize);
8740 destvar = TREE_OPERAND (dest, 0);
8741 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8742 &size, &maxsize);
8743 if (host_integerp (len, 1))
8744 maxsize = tree_low_cst (len, 1);
8745 else
8746 maxsize = -1;
8747 src_offset /= BITS_PER_UNIT;
8748 dest_offset /= BITS_PER_UNIT;
8749 if (SSA_VAR_P (src_base)
8750 && SSA_VAR_P (dest_base))
8751 {
8752 if (operand_equal_p (src_base, dest_base, 0)
8753 && ranges_overlap_p (src_offset, maxsize,
8754 dest_offset, maxsize))
8755 return NULL_TREE;
8756 }
8757 else if (TREE_CODE (src_base) == MEM_REF
8758 && TREE_CODE (dest_base) == MEM_REF)
8759 {
8760 double_int off;
8761 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8762 TREE_OPERAND (dest_base, 0), 0))
8763 return NULL_TREE;
8764 off = mem_ref_offset (src_base) +
8765 double_int::from_shwi (src_offset);
8766 if (!off.fits_shwi ())
8767 return NULL_TREE;
8768 src_offset = off.low;
8769 off = mem_ref_offset (dest_base) +
8770 double_int::from_shwi (dest_offset);
8771 if (!off.fits_shwi ())
8772 return NULL_TREE;
8773 dest_offset = off.low;
8774 if (ranges_overlap_p (src_offset, maxsize,
8775 dest_offset, maxsize))
8776 return NULL_TREE;
8777 }
8778 else
8779 return NULL_TREE;
8780
8781 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8782 if (!fn)
8783 return NULL_TREE;
8784 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8785 }
8786
8787 /* If the destination and source do not alias optimize into
8788 memcpy as well. */
8789 if ((is_gimple_min_invariant (dest)
8790 || TREE_CODE (dest) == SSA_NAME)
8791 && (is_gimple_min_invariant (src)
8792 || TREE_CODE (src) == SSA_NAME))
8793 {
8794 ao_ref destr, srcr;
8795 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8796 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8797 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8798 {
8799 tree fn;
8800 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8801 if (!fn)
8802 return NULL_TREE;
8803 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8804 }
8805 }
8806
8807 return NULL_TREE;
8808 }
8809
8810 if (!host_integerp (len, 0))
8811 return NULL_TREE;
8812 /* FIXME:
8813 This logic lose for arguments like (type *)malloc (sizeof (type)),
8814 since we strip the casts of up to VOID return value from malloc.
8815 Perhaps we ought to inherit type from non-VOID argument here? */
8816 STRIP_NOPS (src);
8817 STRIP_NOPS (dest);
8818 if (!POINTER_TYPE_P (TREE_TYPE (src))
8819 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8820 return NULL_TREE;
8821 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8822 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8823 {
8824 tree tem = TREE_OPERAND (src, 0);
8825 STRIP_NOPS (tem);
8826 if (tem != TREE_OPERAND (src, 0))
8827 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8828 }
8829 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8830 {
8831 tree tem = TREE_OPERAND (dest, 0);
8832 STRIP_NOPS (tem);
8833 if (tem != TREE_OPERAND (dest, 0))
8834 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8835 }
8836 srctype = TREE_TYPE (TREE_TYPE (src));
8837 if (TREE_CODE (srctype) == ARRAY_TYPE
8838 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8839 {
8840 srctype = TREE_TYPE (srctype);
8841 STRIP_NOPS (src);
8842 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8843 }
8844 desttype = TREE_TYPE (TREE_TYPE (dest));
8845 if (TREE_CODE (desttype) == ARRAY_TYPE
8846 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8847 {
8848 desttype = TREE_TYPE (desttype);
8849 STRIP_NOPS (dest);
8850 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8851 }
8852 if (TREE_ADDRESSABLE (srctype)
8853 || TREE_ADDRESSABLE (desttype))
8854 return NULL_TREE;
8855
8856 src_align = get_pointer_alignment (src);
8857 dest_align = get_pointer_alignment (dest);
8858 if (dest_align < TYPE_ALIGN (desttype)
8859 || src_align < TYPE_ALIGN (srctype))
8860 return NULL_TREE;
8861
8862 if (!ignore)
8863 dest = builtin_save_expr (dest);
8864
8865 /* Build accesses at offset zero with a ref-all character type. */
8866 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8867 ptr_mode, true), 0);
8868
8869 destvar = dest;
8870 STRIP_NOPS (destvar);
8871 if (TREE_CODE (destvar) == ADDR_EXPR
8872 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8873 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8874 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8875 else
8876 destvar = NULL_TREE;
8877
8878 srcvar = src;
8879 STRIP_NOPS (srcvar);
8880 if (TREE_CODE (srcvar) == ADDR_EXPR
8881 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8882 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8883 {
8884 if (!destvar
8885 || src_align >= TYPE_ALIGN (desttype))
8886 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8887 srcvar, off0);
8888 else if (!STRICT_ALIGNMENT)
8889 {
8890 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8891 src_align);
8892 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8893 }
8894 else
8895 srcvar = NULL_TREE;
8896 }
8897 else
8898 srcvar = NULL_TREE;
8899
8900 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8901 return NULL_TREE;
8902
8903 if (srcvar == NULL_TREE)
8904 {
8905 STRIP_NOPS (src);
8906 if (src_align >= TYPE_ALIGN (desttype))
8907 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8908 else
8909 {
8910 if (STRICT_ALIGNMENT)
8911 return NULL_TREE;
8912 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8913 src_align);
8914 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
8915 }
8916 }
8917 else if (destvar == NULL_TREE)
8918 {
8919 STRIP_NOPS (dest);
8920 if (dest_align >= TYPE_ALIGN (srctype))
8921 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8922 else
8923 {
8924 if (STRICT_ALIGNMENT)
8925 return NULL_TREE;
8926 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
8927 dest_align);
8928 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
8929 }
8930 }
8931
8932 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8933 }
8934
8935 if (ignore)
8936 return expr;
8937
8938 if (endp == 0 || endp == 3)
8939 return omit_one_operand_loc (loc, type, dest, expr);
8940
8941 if (expr == len)
8942 expr = NULL_TREE;
8943
8944 if (endp == 2)
8945 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8946 ssize_int (1));
8947
8948 dest = fold_build_pointer_plus_loc (loc, dest, len);
8949 dest = fold_convert_loc (loc, type, dest);
8950 if (expr)
8951 dest = omit_one_operand_loc (loc, type, dest, expr);
8952 return dest;
8953 }
8954
8955 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8956 If LEN is not NULL, it represents the length of the string to be
8957 copied. Return NULL_TREE if no simplification can be made. */
8958
8959 tree
8960 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8961 {
8962 tree fn;
8963
8964 if (!validate_arg (dest, POINTER_TYPE)
8965 || !validate_arg (src, POINTER_TYPE))
8966 return NULL_TREE;
8967
8968 /* If SRC and DEST are the same (and not volatile), return DEST. */
8969 if (operand_equal_p (src, dest, 0))
8970 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8971
8972 if (optimize_function_for_size_p (cfun))
8973 return NULL_TREE;
8974
8975 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8976 if (!fn)
8977 return NULL_TREE;
8978
8979 if (!len)
8980 {
8981 len = c_strlen (src, 1);
8982 if (! len || TREE_SIDE_EFFECTS (len))
8983 return NULL_TREE;
8984 }
8985
8986 len = fold_convert_loc (loc, size_type_node, len);
8987 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
8988 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8989 build_call_expr_loc (loc, fn, 3, dest, src, len));
8990 }
8991
8992 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8993 Return NULL_TREE if no simplification can be made. */
8994
8995 static tree
8996 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8997 {
8998 tree fn, len, lenp1, call, type;
8999
9000 if (!validate_arg (dest, POINTER_TYPE)
9001 || !validate_arg (src, POINTER_TYPE))
9002 return NULL_TREE;
9003
9004 len = c_strlen (src, 1);
9005 if (!len
9006 || TREE_CODE (len) != INTEGER_CST)
9007 return NULL_TREE;
9008
9009 if (optimize_function_for_size_p (cfun)
9010 /* If length is zero it's small enough. */
9011 && !integer_zerop (len))
9012 return NULL_TREE;
9013
9014 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9015 if (!fn)
9016 return NULL_TREE;
9017
9018 lenp1 = size_binop_loc (loc, PLUS_EXPR,
9019 fold_convert_loc (loc, size_type_node, len),
9020 build_int_cst (size_type_node, 1));
9021 /* We use dest twice in building our expression. Save it from
9022 multiple expansions. */
9023 dest = builtin_save_expr (dest);
9024 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
9025
9026 type = TREE_TYPE (TREE_TYPE (fndecl));
9027 dest = fold_build_pointer_plus_loc (loc, dest, len);
9028 dest = fold_convert_loc (loc, type, dest);
9029 dest = omit_one_operand_loc (loc, type, dest, call);
9030 return dest;
9031 }
9032
9033 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9034 If SLEN is not NULL, it represents the length of the source string.
9035 Return NULL_TREE if no simplification can be made. */
9036
9037 tree
9038 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
9039 tree src, tree len, tree slen)
9040 {
9041 tree fn;
9042
9043 if (!validate_arg (dest, POINTER_TYPE)
9044 || !validate_arg (src, POINTER_TYPE)
9045 || !validate_arg (len, INTEGER_TYPE))
9046 return NULL_TREE;
9047
9048 /* If the LEN parameter is zero, return DEST. */
9049 if (integer_zerop (len))
9050 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9051
9052 /* We can't compare slen with len as constants below if len is not a
9053 constant. */
9054 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9055 return NULL_TREE;
9056
9057 if (!slen)
9058 slen = c_strlen (src, 1);
9059
9060 /* Now, we must be passed a constant src ptr parameter. */
9061 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9062 return NULL_TREE;
9063
9064 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
9065
9066 /* We do not support simplification of this case, though we do
9067 support it when expanding trees into RTL. */
9068 /* FIXME: generate a call to __builtin_memset. */
9069 if (tree_int_cst_lt (slen, len))
9070 return NULL_TREE;
9071
9072 /* OK transform into builtin memcpy. */
9073 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9074 if (!fn)
9075 return NULL_TREE;
9076
9077 len = fold_convert_loc (loc, size_type_node, len);
9078 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9079 build_call_expr_loc (loc, fn, 3, dest, src, len));
9080 }
9081
9082 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9083 arguments to the call, and TYPE is its return type.
9084 Return NULL_TREE if no simplification can be made. */
9085
9086 static tree
9087 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
9088 {
9089 if (!validate_arg (arg1, POINTER_TYPE)
9090 || !validate_arg (arg2, INTEGER_TYPE)
9091 || !validate_arg (len, INTEGER_TYPE))
9092 return NULL_TREE;
9093 else
9094 {
9095 const char *p1;
9096
9097 if (TREE_CODE (arg2) != INTEGER_CST
9098 || !host_integerp (len, 1))
9099 return NULL_TREE;
9100
9101 p1 = c_getstr (arg1);
9102 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9103 {
9104 char c;
9105 const char *r;
9106 tree tem;
9107
9108 if (target_char_cast (arg2, &c))
9109 return NULL_TREE;
9110
9111 r = (const char *) memchr (p1, c, tree_low_cst (len, 1));
9112
9113 if (r == NULL)
9114 return build_int_cst (TREE_TYPE (arg1), 0);
9115
9116 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
9117 return fold_convert_loc (loc, type, tem);
9118 }
9119 return NULL_TREE;
9120 }
9121 }
9122
9123 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9124 Return NULL_TREE if no simplification can be made. */
9125
9126 static tree
9127 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9128 {
9129 const char *p1, *p2;
9130
9131 if (!validate_arg (arg1, POINTER_TYPE)
9132 || !validate_arg (arg2, POINTER_TYPE)
9133 || !validate_arg (len, INTEGER_TYPE))
9134 return NULL_TREE;
9135
9136 /* If the LEN parameter is zero, return zero. */
9137 if (integer_zerop (len))
9138 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9139 arg1, arg2);
9140
9141 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9142 if (operand_equal_p (arg1, arg2, 0))
9143 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9144
9145 p1 = c_getstr (arg1);
9146 p2 = c_getstr (arg2);
9147
9148 /* If all arguments are constant, and the value of len is not greater
9149 than the lengths of arg1 and arg2, evaluate at compile-time. */
9150 if (host_integerp (len, 1) && p1 && p2
9151 && compare_tree_int (len, strlen (p1) + 1) <= 0
9152 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9153 {
9154 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9155
9156 if (r > 0)
9157 return integer_one_node;
9158 else if (r < 0)
9159 return integer_minus_one_node;
9160 else
9161 return integer_zero_node;
9162 }
9163
9164 /* If len parameter is one, return an expression corresponding to
9165 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9166 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9167 {
9168 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9169 tree cst_uchar_ptr_node
9170 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9171
9172 tree ind1
9173 = fold_convert_loc (loc, integer_type_node,
9174 build1 (INDIRECT_REF, cst_uchar_node,
9175 fold_convert_loc (loc,
9176 cst_uchar_ptr_node,
9177 arg1)));
9178 tree ind2
9179 = fold_convert_loc (loc, integer_type_node,
9180 build1 (INDIRECT_REF, cst_uchar_node,
9181 fold_convert_loc (loc,
9182 cst_uchar_ptr_node,
9183 arg2)));
9184 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9185 }
9186
9187 return NULL_TREE;
9188 }
9189
9190 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9191 Return NULL_TREE if no simplification can be made. */
9192
9193 static tree
9194 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9195 {
9196 const char *p1, *p2;
9197
9198 if (!validate_arg (arg1, POINTER_TYPE)
9199 || !validate_arg (arg2, POINTER_TYPE))
9200 return NULL_TREE;
9201
9202 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9203 if (operand_equal_p (arg1, arg2, 0))
9204 return integer_zero_node;
9205
9206 p1 = c_getstr (arg1);
9207 p2 = c_getstr (arg2);
9208
9209 if (p1 && p2)
9210 {
9211 const int i = strcmp (p1, p2);
9212 if (i < 0)
9213 return integer_minus_one_node;
9214 else if (i > 0)
9215 return integer_one_node;
9216 else
9217 return integer_zero_node;
9218 }
9219
9220 /* If the second arg is "", return *(const unsigned char*)arg1. */
9221 if (p2 && *p2 == '\0')
9222 {
9223 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9224 tree cst_uchar_ptr_node
9225 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9226
9227 return fold_convert_loc (loc, integer_type_node,
9228 build1 (INDIRECT_REF, cst_uchar_node,
9229 fold_convert_loc (loc,
9230 cst_uchar_ptr_node,
9231 arg1)));
9232 }
9233
9234 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9235 if (p1 && *p1 == '\0')
9236 {
9237 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9238 tree cst_uchar_ptr_node
9239 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9240
9241 tree temp
9242 = fold_convert_loc (loc, integer_type_node,
9243 build1 (INDIRECT_REF, cst_uchar_node,
9244 fold_convert_loc (loc,
9245 cst_uchar_ptr_node,
9246 arg2)));
9247 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9248 }
9249
9250 return NULL_TREE;
9251 }
9252
9253 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9254 Return NULL_TREE if no simplification can be made. */
9255
9256 static tree
9257 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9258 {
9259 const char *p1, *p2;
9260
9261 if (!validate_arg (arg1, POINTER_TYPE)
9262 || !validate_arg (arg2, POINTER_TYPE)
9263 || !validate_arg (len, INTEGER_TYPE))
9264 return NULL_TREE;
9265
9266 /* If the LEN parameter is zero, return zero. */
9267 if (integer_zerop (len))
9268 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9269 arg1, arg2);
9270
9271 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9272 if (operand_equal_p (arg1, arg2, 0))
9273 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9274
9275 p1 = c_getstr (arg1);
9276 p2 = c_getstr (arg2);
9277
9278 if (host_integerp (len, 1) && p1 && p2)
9279 {
9280 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9281 if (i > 0)
9282 return integer_one_node;
9283 else if (i < 0)
9284 return integer_minus_one_node;
9285 else
9286 return integer_zero_node;
9287 }
9288
9289 /* If the second arg is "", and the length is greater than zero,
9290 return *(const unsigned char*)arg1. */
9291 if (p2 && *p2 == '\0'
9292 && TREE_CODE (len) == INTEGER_CST
9293 && tree_int_cst_sgn (len) == 1)
9294 {
9295 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9296 tree cst_uchar_ptr_node
9297 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9298
9299 return fold_convert_loc (loc, integer_type_node,
9300 build1 (INDIRECT_REF, cst_uchar_node,
9301 fold_convert_loc (loc,
9302 cst_uchar_ptr_node,
9303 arg1)));
9304 }
9305
9306 /* If the first arg is "", and the length is greater than zero,
9307 return -*(const unsigned char*)arg2. */
9308 if (p1 && *p1 == '\0'
9309 && TREE_CODE (len) == INTEGER_CST
9310 && tree_int_cst_sgn (len) == 1)
9311 {
9312 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9313 tree cst_uchar_ptr_node
9314 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9315
9316 tree temp = fold_convert_loc (loc, integer_type_node,
9317 build1 (INDIRECT_REF, cst_uchar_node,
9318 fold_convert_loc (loc,
9319 cst_uchar_ptr_node,
9320 arg2)));
9321 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9322 }
9323
9324 /* If len parameter is one, return an expression corresponding to
9325 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9326 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9327 {
9328 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9329 tree cst_uchar_ptr_node
9330 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9331
9332 tree ind1 = fold_convert_loc (loc, integer_type_node,
9333 build1 (INDIRECT_REF, cst_uchar_node,
9334 fold_convert_loc (loc,
9335 cst_uchar_ptr_node,
9336 arg1)));
9337 tree ind2 = fold_convert_loc (loc, integer_type_node,
9338 build1 (INDIRECT_REF, cst_uchar_node,
9339 fold_convert_loc (loc,
9340 cst_uchar_ptr_node,
9341 arg2)));
9342 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9343 }
9344
9345 return NULL_TREE;
9346 }
9347
9348 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9349 ARG. Return NULL_TREE if no simplification can be made. */
9350
9351 static tree
9352 fold_builtin_signbit (location_t loc, tree arg, tree type)
9353 {
9354 if (!validate_arg (arg, REAL_TYPE))
9355 return NULL_TREE;
9356
9357 /* If ARG is a compile-time constant, determine the result. */
9358 if (TREE_CODE (arg) == REAL_CST
9359 && !TREE_OVERFLOW (arg))
9360 {
9361 REAL_VALUE_TYPE c;
9362
9363 c = TREE_REAL_CST (arg);
9364 return (REAL_VALUE_NEGATIVE (c)
9365 ? build_one_cst (type)
9366 : build_zero_cst (type));
9367 }
9368
9369 /* If ARG is non-negative, the result is always zero. */
9370 if (tree_expr_nonnegative_p (arg))
9371 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9372
9373 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9374 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9375 return fold_convert (type,
9376 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9377 build_real (TREE_TYPE (arg), dconst0)));
9378
9379 return NULL_TREE;
9380 }
9381
9382 /* Fold function call to builtin copysign, copysignf or copysignl with
9383 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9384 be made. */
9385
9386 static tree
9387 fold_builtin_copysign (location_t loc, tree fndecl,
9388 tree arg1, tree arg2, tree type)
9389 {
9390 tree tem;
9391
9392 if (!validate_arg (arg1, REAL_TYPE)
9393 || !validate_arg (arg2, REAL_TYPE))
9394 return NULL_TREE;
9395
9396 /* copysign(X,X) is X. */
9397 if (operand_equal_p (arg1, arg2, 0))
9398 return fold_convert_loc (loc, type, arg1);
9399
9400 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9401 if (TREE_CODE (arg1) == REAL_CST
9402 && TREE_CODE (arg2) == REAL_CST
9403 && !TREE_OVERFLOW (arg1)
9404 && !TREE_OVERFLOW (arg2))
9405 {
9406 REAL_VALUE_TYPE c1, c2;
9407
9408 c1 = TREE_REAL_CST (arg1);
9409 c2 = TREE_REAL_CST (arg2);
9410 /* c1.sign := c2.sign. */
9411 real_copysign (&c1, &c2);
9412 return build_real (type, c1);
9413 }
9414
9415 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9416 Remember to evaluate Y for side-effects. */
9417 if (tree_expr_nonnegative_p (arg2))
9418 return omit_one_operand_loc (loc, type,
9419 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9420 arg2);
9421
9422 /* Strip sign changing operations for the first argument. */
9423 tem = fold_strip_sign_ops (arg1);
9424 if (tem)
9425 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9426
9427 return NULL_TREE;
9428 }
9429
9430 /* Fold a call to builtin isascii with argument ARG. */
9431
9432 static tree
9433 fold_builtin_isascii (location_t loc, tree arg)
9434 {
9435 if (!validate_arg (arg, INTEGER_TYPE))
9436 return NULL_TREE;
9437 else
9438 {
9439 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9440 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9441 build_int_cst (integer_type_node,
9442 ~ (unsigned HOST_WIDE_INT) 0x7f));
9443 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9444 arg, integer_zero_node);
9445 }
9446 }
9447
9448 /* Fold a call to builtin toascii with argument ARG. */
9449
9450 static tree
9451 fold_builtin_toascii (location_t loc, tree arg)
9452 {
9453 if (!validate_arg (arg, INTEGER_TYPE))
9454 return NULL_TREE;
9455
9456 /* Transform toascii(c) -> (c & 0x7f). */
9457 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9458 build_int_cst (integer_type_node, 0x7f));
9459 }
9460
9461 /* Fold a call to builtin isdigit with argument ARG. */
9462
9463 static tree
9464 fold_builtin_isdigit (location_t loc, tree arg)
9465 {
9466 if (!validate_arg (arg, INTEGER_TYPE))
9467 return NULL_TREE;
9468 else
9469 {
9470 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9471 /* According to the C standard, isdigit is unaffected by locale.
9472 However, it definitely is affected by the target character set. */
9473 unsigned HOST_WIDE_INT target_digit0
9474 = lang_hooks.to_target_charset ('0');
9475
9476 if (target_digit0 == 0)
9477 return NULL_TREE;
9478
9479 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9480 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9481 build_int_cst (unsigned_type_node, target_digit0));
9482 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9483 build_int_cst (unsigned_type_node, 9));
9484 }
9485 }
9486
9487 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9488
9489 static tree
9490 fold_builtin_fabs (location_t loc, tree arg, tree type)
9491 {
9492 if (!validate_arg (arg, REAL_TYPE))
9493 return NULL_TREE;
9494
9495 arg = fold_convert_loc (loc, type, arg);
9496 if (TREE_CODE (arg) == REAL_CST)
9497 return fold_abs_const (arg, type);
9498 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9499 }
9500
9501 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9502
9503 static tree
9504 fold_builtin_abs (location_t loc, tree arg, tree type)
9505 {
9506 if (!validate_arg (arg, INTEGER_TYPE))
9507 return NULL_TREE;
9508
9509 arg = fold_convert_loc (loc, type, arg);
9510 if (TREE_CODE (arg) == INTEGER_CST)
9511 return fold_abs_const (arg, type);
9512 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9513 }
9514
9515 /* Fold a fma operation with arguments ARG[012]. */
9516
9517 tree
9518 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9519 tree type, tree arg0, tree arg1, tree arg2)
9520 {
9521 if (TREE_CODE (arg0) == REAL_CST
9522 && TREE_CODE (arg1) == REAL_CST
9523 && TREE_CODE (arg2) == REAL_CST)
9524 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9525
9526 return NULL_TREE;
9527 }
9528
9529 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9530
9531 static tree
9532 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9533 {
9534 if (validate_arg (arg0, REAL_TYPE)
9535 && validate_arg (arg1, REAL_TYPE)
9536 && validate_arg (arg2, REAL_TYPE))
9537 {
9538 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9539 if (tem)
9540 return tem;
9541
9542 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9543 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9544 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9545 }
9546 return NULL_TREE;
9547 }
9548
9549 /* Fold a call to builtin fmin or fmax. */
9550
9551 static tree
9552 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9553 tree type, bool max)
9554 {
9555 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9556 {
9557 /* Calculate the result when the argument is a constant. */
9558 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9559
9560 if (res)
9561 return res;
9562
9563 /* If either argument is NaN, return the other one. Avoid the
9564 transformation if we get (and honor) a signalling NaN. Using
9565 omit_one_operand() ensures we create a non-lvalue. */
9566 if (TREE_CODE (arg0) == REAL_CST
9567 && real_isnan (&TREE_REAL_CST (arg0))
9568 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9569 || ! TREE_REAL_CST (arg0).signalling))
9570 return omit_one_operand_loc (loc, type, arg1, arg0);
9571 if (TREE_CODE (arg1) == REAL_CST
9572 && real_isnan (&TREE_REAL_CST (arg1))
9573 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9574 || ! TREE_REAL_CST (arg1).signalling))
9575 return omit_one_operand_loc (loc, type, arg0, arg1);
9576
9577 /* Transform fmin/fmax(x,x) -> x. */
9578 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9579 return omit_one_operand_loc (loc, type, arg0, arg1);
9580
9581 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9582 functions to return the numeric arg if the other one is NaN.
9583 These tree codes don't honor that, so only transform if
9584 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9585 handled, so we don't have to worry about it either. */
9586 if (flag_finite_math_only)
9587 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9588 fold_convert_loc (loc, type, arg0),
9589 fold_convert_loc (loc, type, arg1));
9590 }
9591 return NULL_TREE;
9592 }
9593
9594 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9595
9596 static tree
9597 fold_builtin_carg (location_t loc, tree arg, tree type)
9598 {
9599 if (validate_arg (arg, COMPLEX_TYPE)
9600 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9601 {
9602 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9603
9604 if (atan2_fn)
9605 {
9606 tree new_arg = builtin_save_expr (arg);
9607 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9608 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9609 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9610 }
9611 }
9612
9613 return NULL_TREE;
9614 }
9615
9616 /* Fold a call to builtin logb/ilogb. */
9617
9618 static tree
9619 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9620 {
9621 if (! validate_arg (arg, REAL_TYPE))
9622 return NULL_TREE;
9623
9624 STRIP_NOPS (arg);
9625
9626 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9627 {
9628 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9629
9630 switch (value->cl)
9631 {
9632 case rvc_nan:
9633 case rvc_inf:
9634 /* If arg is Inf or NaN and we're logb, return it. */
9635 if (TREE_CODE (rettype) == REAL_TYPE)
9636 {
9637 /* For logb(-Inf) we have to return +Inf. */
9638 if (real_isinf (value) && real_isneg (value))
9639 {
9640 REAL_VALUE_TYPE tem;
9641 real_inf (&tem);
9642 return build_real (rettype, tem);
9643 }
9644 return fold_convert_loc (loc, rettype, arg);
9645 }
9646 /* Fall through... */
9647 case rvc_zero:
9648 /* Zero may set errno and/or raise an exception for logb, also
9649 for ilogb we don't know FP_ILOGB0. */
9650 return NULL_TREE;
9651 case rvc_normal:
9652 /* For normal numbers, proceed iff radix == 2. In GCC,
9653 normalized significands are in the range [0.5, 1.0). We
9654 want the exponent as if they were [1.0, 2.0) so get the
9655 exponent and subtract 1. */
9656 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9657 return fold_convert_loc (loc, rettype,
9658 build_int_cst (integer_type_node,
9659 REAL_EXP (value)-1));
9660 break;
9661 }
9662 }
9663
9664 return NULL_TREE;
9665 }
9666
9667 /* Fold a call to builtin significand, if radix == 2. */
9668
9669 static tree
9670 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9671 {
9672 if (! validate_arg (arg, REAL_TYPE))
9673 return NULL_TREE;
9674
9675 STRIP_NOPS (arg);
9676
9677 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9678 {
9679 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9680
9681 switch (value->cl)
9682 {
9683 case rvc_zero:
9684 case rvc_nan:
9685 case rvc_inf:
9686 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9687 return fold_convert_loc (loc, rettype, arg);
9688 case rvc_normal:
9689 /* For normal numbers, proceed iff radix == 2. */
9690 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9691 {
9692 REAL_VALUE_TYPE result = *value;
9693 /* In GCC, normalized significands are in the range [0.5,
9694 1.0). We want them to be [1.0, 2.0) so set the
9695 exponent to 1. */
9696 SET_REAL_EXP (&result, 1);
9697 return build_real (rettype, result);
9698 }
9699 break;
9700 }
9701 }
9702
9703 return NULL_TREE;
9704 }
9705
9706 /* Fold a call to builtin frexp, we can assume the base is 2. */
9707
9708 static tree
9709 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9710 {
9711 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9712 return NULL_TREE;
9713
9714 STRIP_NOPS (arg0);
9715
9716 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9717 return NULL_TREE;
9718
9719 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9720
9721 /* Proceed if a valid pointer type was passed in. */
9722 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9723 {
9724 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9725 tree frac, exp;
9726
9727 switch (value->cl)
9728 {
9729 case rvc_zero:
9730 /* For +-0, return (*exp = 0, +-0). */
9731 exp = integer_zero_node;
9732 frac = arg0;
9733 break;
9734 case rvc_nan:
9735 case rvc_inf:
9736 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9737 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9738 case rvc_normal:
9739 {
9740 /* Since the frexp function always expects base 2, and in
9741 GCC normalized significands are already in the range
9742 [0.5, 1.0), we have exactly what frexp wants. */
9743 REAL_VALUE_TYPE frac_rvt = *value;
9744 SET_REAL_EXP (&frac_rvt, 0);
9745 frac = build_real (rettype, frac_rvt);
9746 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9747 }
9748 break;
9749 default:
9750 gcc_unreachable ();
9751 }
9752
9753 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9754 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9755 TREE_SIDE_EFFECTS (arg1) = 1;
9756 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9757 }
9758
9759 return NULL_TREE;
9760 }
9761
9762 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9763 then we can assume the base is two. If it's false, then we have to
9764 check the mode of the TYPE parameter in certain cases. */
9765
9766 static tree
9767 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9768 tree type, bool ldexp)
9769 {
9770 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9771 {
9772 STRIP_NOPS (arg0);
9773 STRIP_NOPS (arg1);
9774
9775 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9776 if (real_zerop (arg0) || integer_zerop (arg1)
9777 || (TREE_CODE (arg0) == REAL_CST
9778 && !real_isfinite (&TREE_REAL_CST (arg0))))
9779 return omit_one_operand_loc (loc, type, arg0, arg1);
9780
9781 /* If both arguments are constant, then try to evaluate it. */
9782 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9783 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9784 && host_integerp (arg1, 0))
9785 {
9786 /* Bound the maximum adjustment to twice the range of the
9787 mode's valid exponents. Use abs to ensure the range is
9788 positive as a sanity check. */
9789 const long max_exp_adj = 2 *
9790 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9791 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9792
9793 /* Get the user-requested adjustment. */
9794 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9795
9796 /* The requested adjustment must be inside this range. This
9797 is a preliminary cap to avoid things like overflow, we
9798 may still fail to compute the result for other reasons. */
9799 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9800 {
9801 REAL_VALUE_TYPE initial_result;
9802
9803 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9804
9805 /* Ensure we didn't overflow. */
9806 if (! real_isinf (&initial_result))
9807 {
9808 const REAL_VALUE_TYPE trunc_result
9809 = real_value_truncate (TYPE_MODE (type), initial_result);
9810
9811 /* Only proceed if the target mode can hold the
9812 resulting value. */
9813 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9814 return build_real (type, trunc_result);
9815 }
9816 }
9817 }
9818 }
9819
9820 return NULL_TREE;
9821 }
9822
9823 /* Fold a call to builtin modf. */
9824
9825 static tree
9826 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9827 {
9828 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9829 return NULL_TREE;
9830
9831 STRIP_NOPS (arg0);
9832
9833 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9834 return NULL_TREE;
9835
9836 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9837
9838 /* Proceed if a valid pointer type was passed in. */
9839 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9840 {
9841 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9842 REAL_VALUE_TYPE trunc, frac;
9843
9844 switch (value->cl)
9845 {
9846 case rvc_nan:
9847 case rvc_zero:
9848 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9849 trunc = frac = *value;
9850 break;
9851 case rvc_inf:
9852 /* For +-Inf, return (*arg1 = arg0, +-0). */
9853 frac = dconst0;
9854 frac.sign = value->sign;
9855 trunc = *value;
9856 break;
9857 case rvc_normal:
9858 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9859 real_trunc (&trunc, VOIDmode, value);
9860 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9861 /* If the original number was negative and already
9862 integral, then the fractional part is -0.0. */
9863 if (value->sign && frac.cl == rvc_zero)
9864 frac.sign = value->sign;
9865 break;
9866 }
9867
9868 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9869 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9870 build_real (rettype, trunc));
9871 TREE_SIDE_EFFECTS (arg1) = 1;
9872 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9873 build_real (rettype, frac));
9874 }
9875
9876 return NULL_TREE;
9877 }
9878
9879 /* Given a location LOC, an interclass builtin function decl FNDECL
9880 and its single argument ARG, return an folded expression computing
9881 the same, or NULL_TREE if we either couldn't or didn't want to fold
9882 (the latter happen if there's an RTL instruction available). */
9883
9884 static tree
9885 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9886 {
9887 enum machine_mode mode;
9888
9889 if (!validate_arg (arg, REAL_TYPE))
9890 return NULL_TREE;
9891
9892 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9893 return NULL_TREE;
9894
9895 mode = TYPE_MODE (TREE_TYPE (arg));
9896
9897 /* If there is no optab, try generic code. */
9898 switch (DECL_FUNCTION_CODE (fndecl))
9899 {
9900 tree result;
9901
9902 CASE_FLT_FN (BUILT_IN_ISINF):
9903 {
9904 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9905 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9906 tree const type = TREE_TYPE (arg);
9907 REAL_VALUE_TYPE r;
9908 char buf[128];
9909
9910 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9911 real_from_string (&r, buf);
9912 result = build_call_expr (isgr_fn, 2,
9913 fold_build1_loc (loc, ABS_EXPR, type, arg),
9914 build_real (type, r));
9915 return result;
9916 }
9917 CASE_FLT_FN (BUILT_IN_FINITE):
9918 case BUILT_IN_ISFINITE:
9919 {
9920 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9921 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9922 tree const type = TREE_TYPE (arg);
9923 REAL_VALUE_TYPE r;
9924 char buf[128];
9925
9926 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9927 real_from_string (&r, buf);
9928 result = build_call_expr (isle_fn, 2,
9929 fold_build1_loc (loc, ABS_EXPR, type, arg),
9930 build_real (type, r));
9931 /*result = fold_build2_loc (loc, UNGT_EXPR,
9932 TREE_TYPE (TREE_TYPE (fndecl)),
9933 fold_build1_loc (loc, ABS_EXPR, type, arg),
9934 build_real (type, r));
9935 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9936 TREE_TYPE (TREE_TYPE (fndecl)),
9937 result);*/
9938 return result;
9939 }
9940 case BUILT_IN_ISNORMAL:
9941 {
9942 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9943 islessequal(fabs(x),DBL_MAX). */
9944 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9945 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9946 tree const type = TREE_TYPE (arg);
9947 REAL_VALUE_TYPE rmax, rmin;
9948 char buf[128];
9949
9950 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9951 real_from_string (&rmax, buf);
9952 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9953 real_from_string (&rmin, buf);
9954 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9955 result = build_call_expr (isle_fn, 2, arg,
9956 build_real (type, rmax));
9957 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9958 build_call_expr (isge_fn, 2, arg,
9959 build_real (type, rmin)));
9960 return result;
9961 }
9962 default:
9963 break;
9964 }
9965
9966 return NULL_TREE;
9967 }
9968
9969 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9970 ARG is the argument for the call. */
9971
9972 static tree
9973 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9974 {
9975 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9976 REAL_VALUE_TYPE r;
9977
9978 if (!validate_arg (arg, REAL_TYPE))
9979 return NULL_TREE;
9980
9981 switch (builtin_index)
9982 {
9983 case BUILT_IN_ISINF:
9984 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9985 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9986
9987 if (TREE_CODE (arg) == REAL_CST)
9988 {
9989 r = TREE_REAL_CST (arg);
9990 if (real_isinf (&r))
9991 return real_compare (GT_EXPR, &r, &dconst0)
9992 ? integer_one_node : integer_minus_one_node;
9993 else
9994 return integer_zero_node;
9995 }
9996
9997 return NULL_TREE;
9998
9999 case BUILT_IN_ISINF_SIGN:
10000 {
10001 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10002 /* In a boolean context, GCC will fold the inner COND_EXPR to
10003 1. So e.g. "if (isinf_sign(x))" would be folded to just
10004 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10005 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
10006 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
10007 tree tmp = NULL_TREE;
10008
10009 arg = builtin_save_expr (arg);
10010
10011 if (signbit_fn && isinf_fn)
10012 {
10013 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
10014 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
10015
10016 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10017 signbit_call, integer_zero_node);
10018 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10019 isinf_call, integer_zero_node);
10020
10021 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
10022 integer_minus_one_node, integer_one_node);
10023 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10024 isinf_call, tmp,
10025 integer_zero_node);
10026 }
10027
10028 return tmp;
10029 }
10030
10031 case BUILT_IN_ISFINITE:
10032 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10033 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10034 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10035
10036 if (TREE_CODE (arg) == REAL_CST)
10037 {
10038 r = TREE_REAL_CST (arg);
10039 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10040 }
10041
10042 return NULL_TREE;
10043
10044 case BUILT_IN_ISNAN:
10045 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10046 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10047
10048 if (TREE_CODE (arg) == REAL_CST)
10049 {
10050 r = TREE_REAL_CST (arg);
10051 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10052 }
10053
10054 arg = builtin_save_expr (arg);
10055 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10056
10057 default:
10058 gcc_unreachable ();
10059 }
10060 }
10061
10062 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10063 This builtin will generate code to return the appropriate floating
10064 point classification depending on the value of the floating point
10065 number passed in. The possible return values must be supplied as
10066 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10067 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10068 one floating point argument which is "type generic". */
10069
10070 static tree
10071 fold_builtin_fpclassify (location_t loc, tree exp)
10072 {
10073 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10074 arg, type, res, tmp;
10075 enum machine_mode mode;
10076 REAL_VALUE_TYPE r;
10077 char buf[128];
10078
10079 /* Verify the required arguments in the original call. */
10080 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10081 INTEGER_TYPE, INTEGER_TYPE,
10082 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10083 return NULL_TREE;
10084
10085 fp_nan = CALL_EXPR_ARG (exp, 0);
10086 fp_infinite = CALL_EXPR_ARG (exp, 1);
10087 fp_normal = CALL_EXPR_ARG (exp, 2);
10088 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10089 fp_zero = CALL_EXPR_ARG (exp, 4);
10090 arg = CALL_EXPR_ARG (exp, 5);
10091 type = TREE_TYPE (arg);
10092 mode = TYPE_MODE (type);
10093 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10094
10095 /* fpclassify(x) ->
10096 isnan(x) ? FP_NAN :
10097 (fabs(x) == Inf ? FP_INFINITE :
10098 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10099 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10100
10101 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10102 build_real (type, dconst0));
10103 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10104 tmp, fp_zero, fp_subnormal);
10105
10106 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10107 real_from_string (&r, buf);
10108 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10109 arg, build_real (type, r));
10110 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10111
10112 if (HONOR_INFINITIES (mode))
10113 {
10114 real_inf (&r);
10115 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10116 build_real (type, r));
10117 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10118 fp_infinite, res);
10119 }
10120
10121 if (HONOR_NANS (mode))
10122 {
10123 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10124 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10125 }
10126
10127 return res;
10128 }
10129
10130 /* Fold a call to an unordered comparison function such as
10131 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10132 being called and ARG0 and ARG1 are the arguments for the call.
10133 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10134 the opposite of the desired result. UNORDERED_CODE is used
10135 for modes that can hold NaNs and ORDERED_CODE is used for
10136 the rest. */
10137
10138 static tree
10139 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10140 enum tree_code unordered_code,
10141 enum tree_code ordered_code)
10142 {
10143 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10144 enum tree_code code;
10145 tree type0, type1;
10146 enum tree_code code0, code1;
10147 tree cmp_type = NULL_TREE;
10148
10149 type0 = TREE_TYPE (arg0);
10150 type1 = TREE_TYPE (arg1);
10151
10152 code0 = TREE_CODE (type0);
10153 code1 = TREE_CODE (type1);
10154
10155 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10156 /* Choose the wider of two real types. */
10157 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10158 ? type0 : type1;
10159 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10160 cmp_type = type0;
10161 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10162 cmp_type = type1;
10163
10164 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10165 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10166
10167 if (unordered_code == UNORDERED_EXPR)
10168 {
10169 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10170 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10171 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10172 }
10173
10174 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10175 : ordered_code;
10176 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10177 fold_build2_loc (loc, code, type, arg0, arg1));
10178 }
10179
10180 /* Fold a call to built-in function FNDECL with 0 arguments.
10181 IGNORE is true if the result of the function call is ignored. This
10182 function returns NULL_TREE if no simplification was possible. */
10183
10184 static tree
10185 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10186 {
10187 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10188 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10189 switch (fcode)
10190 {
10191 CASE_FLT_FN (BUILT_IN_INF):
10192 case BUILT_IN_INFD32:
10193 case BUILT_IN_INFD64:
10194 case BUILT_IN_INFD128:
10195 return fold_builtin_inf (loc, type, true);
10196
10197 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10198 return fold_builtin_inf (loc, type, false);
10199
10200 case BUILT_IN_CLASSIFY_TYPE:
10201 return fold_builtin_classify_type (NULL_TREE);
10202
10203 case BUILT_IN_UNREACHABLE:
10204 if (flag_sanitize & SANITIZE_UNREACHABLE
10205 && (current_function_decl == NULL
10206 || !lookup_attribute ("no_sanitize_undefined",
10207 DECL_ATTRIBUTES (current_function_decl))))
10208 return ubsan_instrument_unreachable (loc);
10209 break;
10210
10211 default:
10212 break;
10213 }
10214 return NULL_TREE;
10215 }
10216
10217 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10218 IGNORE is true if the result of the function call is ignored. This
10219 function returns NULL_TREE if no simplification was possible. */
10220
10221 static tree
10222 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10223 {
10224 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10225 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10226 switch (fcode)
10227 {
10228 case BUILT_IN_CONSTANT_P:
10229 {
10230 tree val = fold_builtin_constant_p (arg0);
10231
10232 /* Gimplification will pull the CALL_EXPR for the builtin out of
10233 an if condition. When not optimizing, we'll not CSE it back.
10234 To avoid link error types of regressions, return false now. */
10235 if (!val && !optimize)
10236 val = integer_zero_node;
10237
10238 return val;
10239 }
10240
10241 case BUILT_IN_CLASSIFY_TYPE:
10242 return fold_builtin_classify_type (arg0);
10243
10244 case BUILT_IN_STRLEN:
10245 return fold_builtin_strlen (loc, type, arg0);
10246
10247 CASE_FLT_FN (BUILT_IN_FABS):
10248 case BUILT_IN_FABSD32:
10249 case BUILT_IN_FABSD64:
10250 case BUILT_IN_FABSD128:
10251 return fold_builtin_fabs (loc, arg0, type);
10252
10253 case BUILT_IN_ABS:
10254 case BUILT_IN_LABS:
10255 case BUILT_IN_LLABS:
10256 case BUILT_IN_IMAXABS:
10257 return fold_builtin_abs (loc, arg0, type);
10258
10259 CASE_FLT_FN (BUILT_IN_CONJ):
10260 if (validate_arg (arg0, COMPLEX_TYPE)
10261 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10262 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10263 break;
10264
10265 CASE_FLT_FN (BUILT_IN_CREAL):
10266 if (validate_arg (arg0, COMPLEX_TYPE)
10267 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10268 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10269 break;
10270
10271 CASE_FLT_FN (BUILT_IN_CIMAG):
10272 if (validate_arg (arg0, COMPLEX_TYPE)
10273 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10274 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10275 break;
10276
10277 CASE_FLT_FN (BUILT_IN_CCOS):
10278 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
10279
10280 CASE_FLT_FN (BUILT_IN_CCOSH):
10281 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
10282
10283 CASE_FLT_FN (BUILT_IN_CPROJ):
10284 return fold_builtin_cproj (loc, arg0, type);
10285
10286 CASE_FLT_FN (BUILT_IN_CSIN):
10287 if (validate_arg (arg0, COMPLEX_TYPE)
10288 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10289 return do_mpc_arg1 (arg0, type, mpc_sin);
10290 break;
10291
10292 CASE_FLT_FN (BUILT_IN_CSINH):
10293 if (validate_arg (arg0, COMPLEX_TYPE)
10294 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10295 return do_mpc_arg1 (arg0, type, mpc_sinh);
10296 break;
10297
10298 CASE_FLT_FN (BUILT_IN_CTAN):
10299 if (validate_arg (arg0, COMPLEX_TYPE)
10300 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10301 return do_mpc_arg1 (arg0, type, mpc_tan);
10302 break;
10303
10304 CASE_FLT_FN (BUILT_IN_CTANH):
10305 if (validate_arg (arg0, COMPLEX_TYPE)
10306 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10307 return do_mpc_arg1 (arg0, type, mpc_tanh);
10308 break;
10309
10310 CASE_FLT_FN (BUILT_IN_CLOG):
10311 if (validate_arg (arg0, COMPLEX_TYPE)
10312 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10313 return do_mpc_arg1 (arg0, type, mpc_log);
10314 break;
10315
10316 CASE_FLT_FN (BUILT_IN_CSQRT):
10317 if (validate_arg (arg0, COMPLEX_TYPE)
10318 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10319 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10320 break;
10321
10322 CASE_FLT_FN (BUILT_IN_CASIN):
10323 if (validate_arg (arg0, COMPLEX_TYPE)
10324 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10325 return do_mpc_arg1 (arg0, type, mpc_asin);
10326 break;
10327
10328 CASE_FLT_FN (BUILT_IN_CACOS):
10329 if (validate_arg (arg0, COMPLEX_TYPE)
10330 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10331 return do_mpc_arg1 (arg0, type, mpc_acos);
10332 break;
10333
10334 CASE_FLT_FN (BUILT_IN_CATAN):
10335 if (validate_arg (arg0, COMPLEX_TYPE)
10336 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10337 return do_mpc_arg1 (arg0, type, mpc_atan);
10338 break;
10339
10340 CASE_FLT_FN (BUILT_IN_CASINH):
10341 if (validate_arg (arg0, COMPLEX_TYPE)
10342 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10343 return do_mpc_arg1 (arg0, type, mpc_asinh);
10344 break;
10345
10346 CASE_FLT_FN (BUILT_IN_CACOSH):
10347 if (validate_arg (arg0, COMPLEX_TYPE)
10348 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10349 return do_mpc_arg1 (arg0, type, mpc_acosh);
10350 break;
10351
10352 CASE_FLT_FN (BUILT_IN_CATANH):
10353 if (validate_arg (arg0, COMPLEX_TYPE)
10354 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10355 return do_mpc_arg1 (arg0, type, mpc_atanh);
10356 break;
10357
10358 CASE_FLT_FN (BUILT_IN_CABS):
10359 return fold_builtin_cabs (loc, arg0, type, fndecl);
10360
10361 CASE_FLT_FN (BUILT_IN_CARG):
10362 return fold_builtin_carg (loc, arg0, type);
10363
10364 CASE_FLT_FN (BUILT_IN_SQRT):
10365 return fold_builtin_sqrt (loc, arg0, type);
10366
10367 CASE_FLT_FN (BUILT_IN_CBRT):
10368 return fold_builtin_cbrt (loc, arg0, type);
10369
10370 CASE_FLT_FN (BUILT_IN_ASIN):
10371 if (validate_arg (arg0, REAL_TYPE))
10372 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10373 &dconstm1, &dconst1, true);
10374 break;
10375
10376 CASE_FLT_FN (BUILT_IN_ACOS):
10377 if (validate_arg (arg0, REAL_TYPE))
10378 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10379 &dconstm1, &dconst1, true);
10380 break;
10381
10382 CASE_FLT_FN (BUILT_IN_ATAN):
10383 if (validate_arg (arg0, REAL_TYPE))
10384 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10385 break;
10386
10387 CASE_FLT_FN (BUILT_IN_ASINH):
10388 if (validate_arg (arg0, REAL_TYPE))
10389 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10390 break;
10391
10392 CASE_FLT_FN (BUILT_IN_ACOSH):
10393 if (validate_arg (arg0, REAL_TYPE))
10394 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10395 &dconst1, NULL, true);
10396 break;
10397
10398 CASE_FLT_FN (BUILT_IN_ATANH):
10399 if (validate_arg (arg0, REAL_TYPE))
10400 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10401 &dconstm1, &dconst1, false);
10402 break;
10403
10404 CASE_FLT_FN (BUILT_IN_SIN):
10405 if (validate_arg (arg0, REAL_TYPE))
10406 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10407 break;
10408
10409 CASE_FLT_FN (BUILT_IN_COS):
10410 return fold_builtin_cos (loc, arg0, type, fndecl);
10411
10412 CASE_FLT_FN (BUILT_IN_TAN):
10413 return fold_builtin_tan (arg0, type);
10414
10415 CASE_FLT_FN (BUILT_IN_CEXP):
10416 return fold_builtin_cexp (loc, arg0, type);
10417
10418 CASE_FLT_FN (BUILT_IN_CEXPI):
10419 if (validate_arg (arg0, REAL_TYPE))
10420 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10421 break;
10422
10423 CASE_FLT_FN (BUILT_IN_SINH):
10424 if (validate_arg (arg0, REAL_TYPE))
10425 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10426 break;
10427
10428 CASE_FLT_FN (BUILT_IN_COSH):
10429 return fold_builtin_cosh (loc, arg0, type, fndecl);
10430
10431 CASE_FLT_FN (BUILT_IN_TANH):
10432 if (validate_arg (arg0, REAL_TYPE))
10433 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10434 break;
10435
10436 CASE_FLT_FN (BUILT_IN_ERF):
10437 if (validate_arg (arg0, REAL_TYPE))
10438 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10439 break;
10440
10441 CASE_FLT_FN (BUILT_IN_ERFC):
10442 if (validate_arg (arg0, REAL_TYPE))
10443 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10444 break;
10445
10446 CASE_FLT_FN (BUILT_IN_TGAMMA):
10447 if (validate_arg (arg0, REAL_TYPE))
10448 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10449 break;
10450
10451 CASE_FLT_FN (BUILT_IN_EXP):
10452 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10453
10454 CASE_FLT_FN (BUILT_IN_EXP2):
10455 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10456
10457 CASE_FLT_FN (BUILT_IN_EXP10):
10458 CASE_FLT_FN (BUILT_IN_POW10):
10459 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10460
10461 CASE_FLT_FN (BUILT_IN_EXPM1):
10462 if (validate_arg (arg0, REAL_TYPE))
10463 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10464 break;
10465
10466 CASE_FLT_FN (BUILT_IN_LOG):
10467 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10468
10469 CASE_FLT_FN (BUILT_IN_LOG2):
10470 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10471
10472 CASE_FLT_FN (BUILT_IN_LOG10):
10473 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10474
10475 CASE_FLT_FN (BUILT_IN_LOG1P):
10476 if (validate_arg (arg0, REAL_TYPE))
10477 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10478 &dconstm1, NULL, false);
10479 break;
10480
10481 CASE_FLT_FN (BUILT_IN_J0):
10482 if (validate_arg (arg0, REAL_TYPE))
10483 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10484 NULL, NULL, 0);
10485 break;
10486
10487 CASE_FLT_FN (BUILT_IN_J1):
10488 if (validate_arg (arg0, REAL_TYPE))
10489 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10490 NULL, NULL, 0);
10491 break;
10492
10493 CASE_FLT_FN (BUILT_IN_Y0):
10494 if (validate_arg (arg0, REAL_TYPE))
10495 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10496 &dconst0, NULL, false);
10497 break;
10498
10499 CASE_FLT_FN (BUILT_IN_Y1):
10500 if (validate_arg (arg0, REAL_TYPE))
10501 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10502 &dconst0, NULL, false);
10503 break;
10504
10505 CASE_FLT_FN (BUILT_IN_NAN):
10506 case BUILT_IN_NAND32:
10507 case BUILT_IN_NAND64:
10508 case BUILT_IN_NAND128:
10509 return fold_builtin_nan (arg0, type, true);
10510
10511 CASE_FLT_FN (BUILT_IN_NANS):
10512 return fold_builtin_nan (arg0, type, false);
10513
10514 CASE_FLT_FN (BUILT_IN_FLOOR):
10515 return fold_builtin_floor (loc, fndecl, arg0);
10516
10517 CASE_FLT_FN (BUILT_IN_CEIL):
10518 return fold_builtin_ceil (loc, fndecl, arg0);
10519
10520 CASE_FLT_FN (BUILT_IN_TRUNC):
10521 return fold_builtin_trunc (loc, fndecl, arg0);
10522
10523 CASE_FLT_FN (BUILT_IN_ROUND):
10524 return fold_builtin_round (loc, fndecl, arg0);
10525
10526 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10527 CASE_FLT_FN (BUILT_IN_RINT):
10528 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10529
10530 CASE_FLT_FN (BUILT_IN_ICEIL):
10531 CASE_FLT_FN (BUILT_IN_LCEIL):
10532 CASE_FLT_FN (BUILT_IN_LLCEIL):
10533 CASE_FLT_FN (BUILT_IN_LFLOOR):
10534 CASE_FLT_FN (BUILT_IN_IFLOOR):
10535 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10536 CASE_FLT_FN (BUILT_IN_IROUND):
10537 CASE_FLT_FN (BUILT_IN_LROUND):
10538 CASE_FLT_FN (BUILT_IN_LLROUND):
10539 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10540
10541 CASE_FLT_FN (BUILT_IN_IRINT):
10542 CASE_FLT_FN (BUILT_IN_LRINT):
10543 CASE_FLT_FN (BUILT_IN_LLRINT):
10544 return fold_fixed_mathfn (loc, fndecl, arg0);
10545
10546 case BUILT_IN_BSWAP16:
10547 case BUILT_IN_BSWAP32:
10548 case BUILT_IN_BSWAP64:
10549 return fold_builtin_bswap (fndecl, arg0);
10550
10551 CASE_INT_FN (BUILT_IN_FFS):
10552 CASE_INT_FN (BUILT_IN_CLZ):
10553 CASE_INT_FN (BUILT_IN_CTZ):
10554 CASE_INT_FN (BUILT_IN_CLRSB):
10555 CASE_INT_FN (BUILT_IN_POPCOUNT):
10556 CASE_INT_FN (BUILT_IN_PARITY):
10557 return fold_builtin_bitop (fndecl, arg0);
10558
10559 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10560 return fold_builtin_signbit (loc, arg0, type);
10561
10562 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10563 return fold_builtin_significand (loc, arg0, type);
10564
10565 CASE_FLT_FN (BUILT_IN_ILOGB):
10566 CASE_FLT_FN (BUILT_IN_LOGB):
10567 return fold_builtin_logb (loc, arg0, type);
10568
10569 case BUILT_IN_ISASCII:
10570 return fold_builtin_isascii (loc, arg0);
10571
10572 case BUILT_IN_TOASCII:
10573 return fold_builtin_toascii (loc, arg0);
10574
10575 case BUILT_IN_ISDIGIT:
10576 return fold_builtin_isdigit (loc, arg0);
10577
10578 CASE_FLT_FN (BUILT_IN_FINITE):
10579 case BUILT_IN_FINITED32:
10580 case BUILT_IN_FINITED64:
10581 case BUILT_IN_FINITED128:
10582 case BUILT_IN_ISFINITE:
10583 {
10584 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10585 if (ret)
10586 return ret;
10587 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10588 }
10589
10590 CASE_FLT_FN (BUILT_IN_ISINF):
10591 case BUILT_IN_ISINFD32:
10592 case BUILT_IN_ISINFD64:
10593 case BUILT_IN_ISINFD128:
10594 {
10595 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10596 if (ret)
10597 return ret;
10598 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10599 }
10600
10601 case BUILT_IN_ISNORMAL:
10602 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10603
10604 case BUILT_IN_ISINF_SIGN:
10605 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10606
10607 CASE_FLT_FN (BUILT_IN_ISNAN):
10608 case BUILT_IN_ISNAND32:
10609 case BUILT_IN_ISNAND64:
10610 case BUILT_IN_ISNAND128:
10611 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10612
10613 case BUILT_IN_PRINTF:
10614 case BUILT_IN_PRINTF_UNLOCKED:
10615 case BUILT_IN_VPRINTF:
10616 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10617
10618 case BUILT_IN_FREE:
10619 if (integer_zerop (arg0))
10620 return build_empty_stmt (loc);
10621 break;
10622
10623 default:
10624 break;
10625 }
10626
10627 return NULL_TREE;
10628
10629 }
10630
10631 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10632 IGNORE is true if the result of the function call is ignored. This
10633 function returns NULL_TREE if no simplification was possible. */
10634
10635 static tree
10636 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10637 {
10638 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10639 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10640
10641 switch (fcode)
10642 {
10643 CASE_FLT_FN (BUILT_IN_JN):
10644 if (validate_arg (arg0, INTEGER_TYPE)
10645 && validate_arg (arg1, REAL_TYPE))
10646 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10647 break;
10648
10649 CASE_FLT_FN (BUILT_IN_YN):
10650 if (validate_arg (arg0, INTEGER_TYPE)
10651 && validate_arg (arg1, REAL_TYPE))
10652 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10653 &dconst0, false);
10654 break;
10655
10656 CASE_FLT_FN (BUILT_IN_DREM):
10657 CASE_FLT_FN (BUILT_IN_REMAINDER):
10658 if (validate_arg (arg0, REAL_TYPE)
10659 && validate_arg (arg1, REAL_TYPE))
10660 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10661 break;
10662
10663 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10664 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10665 if (validate_arg (arg0, REAL_TYPE)
10666 && validate_arg (arg1, POINTER_TYPE))
10667 return do_mpfr_lgamma_r (arg0, arg1, type);
10668 break;
10669
10670 CASE_FLT_FN (BUILT_IN_ATAN2):
10671 if (validate_arg (arg0, REAL_TYPE)
10672 && validate_arg (arg1, REAL_TYPE))
10673 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10674 break;
10675
10676 CASE_FLT_FN (BUILT_IN_FDIM):
10677 if (validate_arg (arg0, REAL_TYPE)
10678 && validate_arg (arg1, REAL_TYPE))
10679 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10680 break;
10681
10682 CASE_FLT_FN (BUILT_IN_HYPOT):
10683 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10684
10685 CASE_FLT_FN (BUILT_IN_CPOW):
10686 if (validate_arg (arg0, COMPLEX_TYPE)
10687 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10688 && validate_arg (arg1, COMPLEX_TYPE)
10689 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10690 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10691 break;
10692
10693 CASE_FLT_FN (BUILT_IN_LDEXP):
10694 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10695 CASE_FLT_FN (BUILT_IN_SCALBN):
10696 CASE_FLT_FN (BUILT_IN_SCALBLN):
10697 return fold_builtin_load_exponent (loc, arg0, arg1,
10698 type, /*ldexp=*/false);
10699
10700 CASE_FLT_FN (BUILT_IN_FREXP):
10701 return fold_builtin_frexp (loc, arg0, arg1, type);
10702
10703 CASE_FLT_FN (BUILT_IN_MODF):
10704 return fold_builtin_modf (loc, arg0, arg1, type);
10705
10706 case BUILT_IN_BZERO:
10707 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10708
10709 case BUILT_IN_FPUTS:
10710 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10711
10712 case BUILT_IN_FPUTS_UNLOCKED:
10713 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10714
10715 case BUILT_IN_STRSTR:
10716 return fold_builtin_strstr (loc, arg0, arg1, type);
10717
10718 case BUILT_IN_STRCAT:
10719 return fold_builtin_strcat (loc, arg0, arg1);
10720
10721 case BUILT_IN_STRSPN:
10722 return fold_builtin_strspn (loc, arg0, arg1);
10723
10724 case BUILT_IN_STRCSPN:
10725 return fold_builtin_strcspn (loc, arg0, arg1);
10726
10727 case BUILT_IN_STRCHR:
10728 case BUILT_IN_INDEX:
10729 return fold_builtin_strchr (loc, arg0, arg1, type);
10730
10731 case BUILT_IN_STRRCHR:
10732 case BUILT_IN_RINDEX:
10733 return fold_builtin_strrchr (loc, arg0, arg1, type);
10734
10735 case BUILT_IN_STRCPY:
10736 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10737
10738 case BUILT_IN_STPCPY:
10739 if (ignore)
10740 {
10741 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10742 if (!fn)
10743 break;
10744
10745 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10746 }
10747 else
10748 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10749 break;
10750
10751 case BUILT_IN_STRCMP:
10752 return fold_builtin_strcmp (loc, arg0, arg1);
10753
10754 case BUILT_IN_STRPBRK:
10755 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10756
10757 case BUILT_IN_EXPECT:
10758 return fold_builtin_expect (loc, arg0, arg1);
10759
10760 CASE_FLT_FN (BUILT_IN_POW):
10761 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10762
10763 CASE_FLT_FN (BUILT_IN_POWI):
10764 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10765
10766 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10767 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10768
10769 CASE_FLT_FN (BUILT_IN_FMIN):
10770 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10771
10772 CASE_FLT_FN (BUILT_IN_FMAX):
10773 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10774
10775 case BUILT_IN_ISGREATER:
10776 return fold_builtin_unordered_cmp (loc, fndecl,
10777 arg0, arg1, UNLE_EXPR, LE_EXPR);
10778 case BUILT_IN_ISGREATEREQUAL:
10779 return fold_builtin_unordered_cmp (loc, fndecl,
10780 arg0, arg1, UNLT_EXPR, LT_EXPR);
10781 case BUILT_IN_ISLESS:
10782 return fold_builtin_unordered_cmp (loc, fndecl,
10783 arg0, arg1, UNGE_EXPR, GE_EXPR);
10784 case BUILT_IN_ISLESSEQUAL:
10785 return fold_builtin_unordered_cmp (loc, fndecl,
10786 arg0, arg1, UNGT_EXPR, GT_EXPR);
10787 case BUILT_IN_ISLESSGREATER:
10788 return fold_builtin_unordered_cmp (loc, fndecl,
10789 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10790 case BUILT_IN_ISUNORDERED:
10791 return fold_builtin_unordered_cmp (loc, fndecl,
10792 arg0, arg1, UNORDERED_EXPR,
10793 NOP_EXPR);
10794
10795 /* We do the folding for va_start in the expander. */
10796 case BUILT_IN_VA_START:
10797 break;
10798
10799 case BUILT_IN_SPRINTF:
10800 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10801
10802 case BUILT_IN_OBJECT_SIZE:
10803 return fold_builtin_object_size (arg0, arg1);
10804
10805 case BUILT_IN_PRINTF:
10806 case BUILT_IN_PRINTF_UNLOCKED:
10807 case BUILT_IN_VPRINTF:
10808 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10809
10810 case BUILT_IN_PRINTF_CHK:
10811 case BUILT_IN_VPRINTF_CHK:
10812 if (!validate_arg (arg0, INTEGER_TYPE)
10813 || TREE_SIDE_EFFECTS (arg0))
10814 return NULL_TREE;
10815 else
10816 return fold_builtin_printf (loc, fndecl,
10817 arg1, NULL_TREE, ignore, fcode);
10818 break;
10819
10820 case BUILT_IN_FPRINTF:
10821 case BUILT_IN_FPRINTF_UNLOCKED:
10822 case BUILT_IN_VFPRINTF:
10823 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10824 ignore, fcode);
10825
10826 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10827 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10828
10829 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10830 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10831
10832 default:
10833 break;
10834 }
10835 return NULL_TREE;
10836 }
10837
10838 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10839 and ARG2. IGNORE is true if the result of the function call is ignored.
10840 This function returns NULL_TREE if no simplification was possible. */
10841
10842 static tree
10843 fold_builtin_3 (location_t loc, tree fndecl,
10844 tree arg0, tree arg1, tree arg2, bool ignore)
10845 {
10846 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10847 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10848 switch (fcode)
10849 {
10850
10851 CASE_FLT_FN (BUILT_IN_SINCOS):
10852 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10853
10854 CASE_FLT_FN (BUILT_IN_FMA):
10855 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10856 break;
10857
10858 CASE_FLT_FN (BUILT_IN_REMQUO):
10859 if (validate_arg (arg0, REAL_TYPE)
10860 && validate_arg (arg1, REAL_TYPE)
10861 && validate_arg (arg2, POINTER_TYPE))
10862 return do_mpfr_remquo (arg0, arg1, arg2);
10863 break;
10864
10865 case BUILT_IN_MEMSET:
10866 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10867
10868 case BUILT_IN_BCOPY:
10869 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10870 void_type_node, true, /*endp=*/3);
10871
10872 case BUILT_IN_MEMCPY:
10873 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10874 type, ignore, /*endp=*/0);
10875
10876 case BUILT_IN_MEMPCPY:
10877 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10878 type, ignore, /*endp=*/1);
10879
10880 case BUILT_IN_MEMMOVE:
10881 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10882 type, ignore, /*endp=*/3);
10883
10884 case BUILT_IN_STRNCAT:
10885 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10886
10887 case BUILT_IN_STRNCPY:
10888 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10889
10890 case BUILT_IN_STRNCMP:
10891 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10892
10893 case BUILT_IN_MEMCHR:
10894 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10895
10896 case BUILT_IN_BCMP:
10897 case BUILT_IN_MEMCMP:
10898 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10899
10900 case BUILT_IN_SPRINTF:
10901 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10902
10903 case BUILT_IN_SNPRINTF:
10904 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
10905
10906 case BUILT_IN_STRCPY_CHK:
10907 case BUILT_IN_STPCPY_CHK:
10908 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10909 ignore, fcode);
10910
10911 case BUILT_IN_STRCAT_CHK:
10912 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10913
10914 case BUILT_IN_PRINTF_CHK:
10915 case BUILT_IN_VPRINTF_CHK:
10916 if (!validate_arg (arg0, INTEGER_TYPE)
10917 || TREE_SIDE_EFFECTS (arg0))
10918 return NULL_TREE;
10919 else
10920 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10921 break;
10922
10923 case BUILT_IN_FPRINTF:
10924 case BUILT_IN_FPRINTF_UNLOCKED:
10925 case BUILT_IN_VFPRINTF:
10926 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10927 ignore, fcode);
10928
10929 case BUILT_IN_FPRINTF_CHK:
10930 case BUILT_IN_VFPRINTF_CHK:
10931 if (!validate_arg (arg1, INTEGER_TYPE)
10932 || TREE_SIDE_EFFECTS (arg1))
10933 return NULL_TREE;
10934 else
10935 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10936 ignore, fcode);
10937
10938 default:
10939 break;
10940 }
10941 return NULL_TREE;
10942 }
10943
10944 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10945 ARG2, and ARG3. IGNORE is true if the result of the function call is
10946 ignored. This function returns NULL_TREE if no simplification was
10947 possible. */
10948
10949 static tree
10950 fold_builtin_4 (location_t loc, tree fndecl,
10951 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10952 {
10953 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10954
10955 switch (fcode)
10956 {
10957 case BUILT_IN_MEMCPY_CHK:
10958 case BUILT_IN_MEMPCPY_CHK:
10959 case BUILT_IN_MEMMOVE_CHK:
10960 case BUILT_IN_MEMSET_CHK:
10961 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10962 NULL_TREE, ignore,
10963 DECL_FUNCTION_CODE (fndecl));
10964
10965 case BUILT_IN_STRNCPY_CHK:
10966 case BUILT_IN_STPNCPY_CHK:
10967 return fold_builtin_stxncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE,
10968 ignore, fcode);
10969
10970 case BUILT_IN_STRNCAT_CHK:
10971 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10972
10973 case BUILT_IN_SNPRINTF:
10974 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
10975
10976 case BUILT_IN_FPRINTF_CHK:
10977 case BUILT_IN_VFPRINTF_CHK:
10978 if (!validate_arg (arg1, INTEGER_TYPE)
10979 || TREE_SIDE_EFFECTS (arg1))
10980 return NULL_TREE;
10981 else
10982 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10983 ignore, fcode);
10984 break;
10985
10986 default:
10987 break;
10988 }
10989 return NULL_TREE;
10990 }
10991
10992 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10993 arguments, where NARGS <= 4. IGNORE is true if the result of the
10994 function call is ignored. This function returns NULL_TREE if no
10995 simplification was possible. Note that this only folds builtins with
10996 fixed argument patterns. Foldings that do varargs-to-varargs
10997 transformations, or that match calls with more than 4 arguments,
10998 need to be handled with fold_builtin_varargs instead. */
10999
11000 #define MAX_ARGS_TO_FOLD_BUILTIN 4
11001
11002 static tree
11003 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
11004 {
11005 tree ret = NULL_TREE;
11006
11007 switch (nargs)
11008 {
11009 case 0:
11010 ret = fold_builtin_0 (loc, fndecl, ignore);
11011 break;
11012 case 1:
11013 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
11014 break;
11015 case 2:
11016 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
11017 break;
11018 case 3:
11019 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
11020 break;
11021 case 4:
11022 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
11023 ignore);
11024 break;
11025 default:
11026 break;
11027 }
11028 if (ret)
11029 {
11030 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11031 SET_EXPR_LOCATION (ret, loc);
11032 TREE_NO_WARNING (ret) = 1;
11033 return ret;
11034 }
11035 return NULL_TREE;
11036 }
11037
11038 /* Builtins with folding operations that operate on "..." arguments
11039 need special handling; we need to store the arguments in a convenient
11040 data structure before attempting any folding. Fortunately there are
11041 only a few builtins that fall into this category. FNDECL is the
11042 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11043 result of the function call is ignored. */
11044
11045 static tree
11046 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
11047 bool ignore ATTRIBUTE_UNUSED)
11048 {
11049 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11050 tree ret = NULL_TREE;
11051
11052 switch (fcode)
11053 {
11054 case BUILT_IN_SPRINTF_CHK:
11055 case BUILT_IN_VSPRINTF_CHK:
11056 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
11057 break;
11058
11059 case BUILT_IN_SNPRINTF_CHK:
11060 case BUILT_IN_VSNPRINTF_CHK:
11061 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
11062 break;
11063
11064 case BUILT_IN_FPCLASSIFY:
11065 ret = fold_builtin_fpclassify (loc, exp);
11066 break;
11067
11068 default:
11069 break;
11070 }
11071 if (ret)
11072 {
11073 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11074 SET_EXPR_LOCATION (ret, loc);
11075 TREE_NO_WARNING (ret) = 1;
11076 return ret;
11077 }
11078 return NULL_TREE;
11079 }
11080
11081 /* Return true if FNDECL shouldn't be folded right now.
11082 If a built-in function has an inline attribute always_inline
11083 wrapper, defer folding it after always_inline functions have
11084 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11085 might not be performed. */
11086
11087 bool
11088 avoid_folding_inline_builtin (tree fndecl)
11089 {
11090 return (DECL_DECLARED_INLINE_P (fndecl)
11091 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11092 && cfun
11093 && !cfun->always_inline_functions_inlined
11094 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11095 }
11096
11097 /* A wrapper function for builtin folding that prevents warnings for
11098 "statement without effect" and the like, caused by removing the
11099 call node earlier than the warning is generated. */
11100
11101 tree
11102 fold_call_expr (location_t loc, tree exp, bool ignore)
11103 {
11104 tree ret = NULL_TREE;
11105 tree fndecl = get_callee_fndecl (exp);
11106 if (fndecl
11107 && TREE_CODE (fndecl) == FUNCTION_DECL
11108 && DECL_BUILT_IN (fndecl)
11109 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11110 yet. Defer folding until we see all the arguments
11111 (after inlining). */
11112 && !CALL_EXPR_VA_ARG_PACK (exp))
11113 {
11114 int nargs = call_expr_nargs (exp);
11115
11116 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11117 instead last argument is __builtin_va_arg_pack (). Defer folding
11118 even in that case, until arguments are finalized. */
11119 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11120 {
11121 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11122 if (fndecl2
11123 && TREE_CODE (fndecl2) == FUNCTION_DECL
11124 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11125 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11126 return NULL_TREE;
11127 }
11128
11129 if (avoid_folding_inline_builtin (fndecl))
11130 return NULL_TREE;
11131
11132 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11133 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11134 CALL_EXPR_ARGP (exp), ignore);
11135 else
11136 {
11137 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11138 {
11139 tree *args = CALL_EXPR_ARGP (exp);
11140 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11141 }
11142 if (!ret)
11143 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11144 if (ret)
11145 return ret;
11146 }
11147 }
11148 return NULL_TREE;
11149 }
11150
11151 /* Conveniently construct a function call expression. FNDECL names the
11152 function to be called and N arguments are passed in the array
11153 ARGARRAY. */
11154
11155 tree
11156 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11157 {
11158 tree fntype = TREE_TYPE (fndecl);
11159 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11160
11161 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11162 }
11163
11164 /* Conveniently construct a function call expression. FNDECL names the
11165 function to be called and the arguments are passed in the vector
11166 VEC. */
11167
11168 tree
11169 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11170 {
11171 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11172 vec_safe_address (vec));
11173 }
11174
11175
11176 /* Conveniently construct a function call expression. FNDECL names the
11177 function to be called, N is the number of arguments, and the "..."
11178 parameters are the argument expressions. */
11179
11180 tree
11181 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11182 {
11183 va_list ap;
11184 tree *argarray = XALLOCAVEC (tree, n);
11185 int i;
11186
11187 va_start (ap, n);
11188 for (i = 0; i < n; i++)
11189 argarray[i] = va_arg (ap, tree);
11190 va_end (ap);
11191 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11192 }
11193
11194 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11195 varargs macros aren't supported by all bootstrap compilers. */
11196
11197 tree
11198 build_call_expr (tree fndecl, int n, ...)
11199 {
11200 va_list ap;
11201 tree *argarray = XALLOCAVEC (tree, n);
11202 int i;
11203
11204 va_start (ap, n);
11205 for (i = 0; i < n; i++)
11206 argarray[i] = va_arg (ap, tree);
11207 va_end (ap);
11208 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11209 }
11210
11211 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11212 N arguments are passed in the array ARGARRAY. */
11213
11214 tree
11215 fold_builtin_call_array (location_t loc, tree type,
11216 tree fn,
11217 int n,
11218 tree *argarray)
11219 {
11220 tree ret = NULL_TREE;
11221 tree exp;
11222
11223 if (TREE_CODE (fn) == ADDR_EXPR)
11224 {
11225 tree fndecl = TREE_OPERAND (fn, 0);
11226 if (TREE_CODE (fndecl) == FUNCTION_DECL
11227 && DECL_BUILT_IN (fndecl))
11228 {
11229 /* If last argument is __builtin_va_arg_pack (), arguments to this
11230 function are not finalized yet. Defer folding until they are. */
11231 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11232 {
11233 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11234 if (fndecl2
11235 && TREE_CODE (fndecl2) == FUNCTION_DECL
11236 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11237 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11238 return build_call_array_loc (loc, type, fn, n, argarray);
11239 }
11240 if (avoid_folding_inline_builtin (fndecl))
11241 return build_call_array_loc (loc, type, fn, n, argarray);
11242 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11243 {
11244 ret = targetm.fold_builtin (fndecl, n, argarray, false);
11245 if (ret)
11246 return ret;
11247
11248 return build_call_array_loc (loc, type, fn, n, argarray);
11249 }
11250 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11251 {
11252 /* First try the transformations that don't require consing up
11253 an exp. */
11254 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11255 if (ret)
11256 return ret;
11257 }
11258
11259 /* If we got this far, we need to build an exp. */
11260 exp = build_call_array_loc (loc, type, fn, n, argarray);
11261 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11262 return ret ? ret : exp;
11263 }
11264 }
11265
11266 return build_call_array_loc (loc, type, fn, n, argarray);
11267 }
11268
11269 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11270 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11271 of arguments in ARGS to be omitted. OLDNARGS is the number of
11272 elements in ARGS. */
11273
11274 static tree
11275 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11276 int skip, tree fndecl, int n, va_list newargs)
11277 {
11278 int nargs = oldnargs - skip + n;
11279 tree *buffer;
11280
11281 if (n > 0)
11282 {
11283 int i, j;
11284
11285 buffer = XALLOCAVEC (tree, nargs);
11286 for (i = 0; i < n; i++)
11287 buffer[i] = va_arg (newargs, tree);
11288 for (j = skip; j < oldnargs; j++, i++)
11289 buffer[i] = args[j];
11290 }
11291 else
11292 buffer = args + skip;
11293
11294 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11295 }
11296
11297 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11298 list ARGS along with N new arguments specified as the "..."
11299 parameters. SKIP is the number of arguments in ARGS to be omitted.
11300 OLDNARGS is the number of elements in ARGS. */
11301
11302 static tree
11303 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
11304 int skip, tree fndecl, int n, ...)
11305 {
11306 va_list ap;
11307 tree t;
11308
11309 va_start (ap, n);
11310 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
11311 va_end (ap);
11312
11313 return t;
11314 }
11315
11316 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11317 along with N new arguments specified as the "..." parameters. SKIP
11318 is the number of arguments in EXP to be omitted. This function is used
11319 to do varargs-to-varargs transformations. */
11320
11321 static tree
11322 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11323 {
11324 va_list ap;
11325 tree t;
11326
11327 va_start (ap, n);
11328 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11329 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11330 va_end (ap);
11331
11332 return t;
11333 }
11334
11335 /* Validate a single argument ARG against a tree code CODE representing
11336 a type. */
11337
11338 static bool
11339 validate_arg (const_tree arg, enum tree_code code)
11340 {
11341 if (!arg)
11342 return false;
11343 else if (code == POINTER_TYPE)
11344 return POINTER_TYPE_P (TREE_TYPE (arg));
11345 else if (code == INTEGER_TYPE)
11346 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11347 return code == TREE_CODE (TREE_TYPE (arg));
11348 }
11349
11350 /* This function validates the types of a function call argument list
11351 against a specified list of tree_codes. If the last specifier is a 0,
11352 that represents an ellipses, otherwise the last specifier must be a
11353 VOID_TYPE.
11354
11355 This is the GIMPLE version of validate_arglist. Eventually we want to
11356 completely convert builtins.c to work from GIMPLEs and the tree based
11357 validate_arglist will then be removed. */
11358
11359 bool
11360 validate_gimple_arglist (const_gimple call, ...)
11361 {
11362 enum tree_code code;
11363 bool res = 0;
11364 va_list ap;
11365 const_tree arg;
11366 size_t i;
11367
11368 va_start (ap, call);
11369 i = 0;
11370
11371 do
11372 {
11373 code = (enum tree_code) va_arg (ap, int);
11374 switch (code)
11375 {
11376 case 0:
11377 /* This signifies an ellipses, any further arguments are all ok. */
11378 res = true;
11379 goto end;
11380 case VOID_TYPE:
11381 /* This signifies an endlink, if no arguments remain, return
11382 true, otherwise return false. */
11383 res = (i == gimple_call_num_args (call));
11384 goto end;
11385 default:
11386 /* If no parameters remain or the parameter's code does not
11387 match the specified code, return false. Otherwise continue
11388 checking any remaining arguments. */
11389 arg = gimple_call_arg (call, i++);
11390 if (!validate_arg (arg, code))
11391 goto end;
11392 break;
11393 }
11394 }
11395 while (1);
11396
11397 /* We need gotos here since we can only have one VA_CLOSE in a
11398 function. */
11399 end: ;
11400 va_end (ap);
11401
11402 return res;
11403 }
11404
11405 /* This function validates the types of a function call argument list
11406 against a specified list of tree_codes. If the last specifier is a 0,
11407 that represents an ellipses, otherwise the last specifier must be a
11408 VOID_TYPE. */
11409
11410 bool
11411 validate_arglist (const_tree callexpr, ...)
11412 {
11413 enum tree_code code;
11414 bool res = 0;
11415 va_list ap;
11416 const_call_expr_arg_iterator iter;
11417 const_tree arg;
11418
11419 va_start (ap, callexpr);
11420 init_const_call_expr_arg_iterator (callexpr, &iter);
11421
11422 do
11423 {
11424 code = (enum tree_code) va_arg (ap, int);
11425 switch (code)
11426 {
11427 case 0:
11428 /* This signifies an ellipses, any further arguments are all ok. */
11429 res = true;
11430 goto end;
11431 case VOID_TYPE:
11432 /* This signifies an endlink, if no arguments remain, return
11433 true, otherwise return false. */
11434 res = !more_const_call_expr_args_p (&iter);
11435 goto end;
11436 default:
11437 /* If no parameters remain or the parameter's code does not
11438 match the specified code, return false. Otherwise continue
11439 checking any remaining arguments. */
11440 arg = next_const_call_expr_arg (&iter);
11441 if (!validate_arg (arg, code))
11442 goto end;
11443 break;
11444 }
11445 }
11446 while (1);
11447
11448 /* We need gotos here since we can only have one VA_CLOSE in a
11449 function. */
11450 end: ;
11451 va_end (ap);
11452
11453 return res;
11454 }
11455
11456 /* Default target-specific builtin expander that does nothing. */
11457
11458 rtx
11459 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11460 rtx target ATTRIBUTE_UNUSED,
11461 rtx subtarget ATTRIBUTE_UNUSED,
11462 enum machine_mode mode ATTRIBUTE_UNUSED,
11463 int ignore ATTRIBUTE_UNUSED)
11464 {
11465 return NULL_RTX;
11466 }
11467
11468 /* Returns true is EXP represents data that would potentially reside
11469 in a readonly section. */
11470
11471 static bool
11472 readonly_data_expr (tree exp)
11473 {
11474 STRIP_NOPS (exp);
11475
11476 if (TREE_CODE (exp) != ADDR_EXPR)
11477 return false;
11478
11479 exp = get_base_address (TREE_OPERAND (exp, 0));
11480 if (!exp)
11481 return false;
11482
11483 /* Make sure we call decl_readonly_section only for trees it
11484 can handle (since it returns true for everything it doesn't
11485 understand). */
11486 if (TREE_CODE (exp) == STRING_CST
11487 || TREE_CODE (exp) == CONSTRUCTOR
11488 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11489 return decl_readonly_section (exp, 0);
11490 else
11491 return false;
11492 }
11493
11494 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11495 to the call, and TYPE is its return type.
11496
11497 Return NULL_TREE if no simplification was possible, otherwise return the
11498 simplified form of the call as a tree.
11499
11500 The simplified form may be a constant or other expression which
11501 computes the same value, but in a more efficient manner (including
11502 calls to other builtin functions).
11503
11504 The call may contain arguments which need to be evaluated, but
11505 which are not useful to determine the result of the call. In
11506 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11507 COMPOUND_EXPR will be an argument which must be evaluated.
11508 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11509 COMPOUND_EXPR in the chain will contain the tree for the simplified
11510 form of the builtin function call. */
11511
11512 static tree
11513 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11514 {
11515 if (!validate_arg (s1, POINTER_TYPE)
11516 || !validate_arg (s2, POINTER_TYPE))
11517 return NULL_TREE;
11518 else
11519 {
11520 tree fn;
11521 const char *p1, *p2;
11522
11523 p2 = c_getstr (s2);
11524 if (p2 == NULL)
11525 return NULL_TREE;
11526
11527 p1 = c_getstr (s1);
11528 if (p1 != NULL)
11529 {
11530 const char *r = strstr (p1, p2);
11531 tree tem;
11532
11533 if (r == NULL)
11534 return build_int_cst (TREE_TYPE (s1), 0);
11535
11536 /* Return an offset into the constant string argument. */
11537 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11538 return fold_convert_loc (loc, type, tem);
11539 }
11540
11541 /* The argument is const char *, and the result is char *, so we need
11542 a type conversion here to avoid a warning. */
11543 if (p2[0] == '\0')
11544 return fold_convert_loc (loc, type, s1);
11545
11546 if (p2[1] != '\0')
11547 return NULL_TREE;
11548
11549 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11550 if (!fn)
11551 return NULL_TREE;
11552
11553 /* New argument list transforming strstr(s1, s2) to
11554 strchr(s1, s2[0]). */
11555 return build_call_expr_loc (loc, fn, 2, s1,
11556 build_int_cst (integer_type_node, p2[0]));
11557 }
11558 }
11559
11560 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11561 the call, and TYPE is its return type.
11562
11563 Return NULL_TREE if no simplification was possible, otherwise return the
11564 simplified form of the call as a tree.
11565
11566 The simplified form may be a constant or other expression which
11567 computes the same value, but in a more efficient manner (including
11568 calls to other builtin functions).
11569
11570 The call may contain arguments which need to be evaluated, but
11571 which are not useful to determine the result of the call. In
11572 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11573 COMPOUND_EXPR will be an argument which must be evaluated.
11574 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11575 COMPOUND_EXPR in the chain will contain the tree for the simplified
11576 form of the builtin function call. */
11577
11578 static tree
11579 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11580 {
11581 if (!validate_arg (s1, POINTER_TYPE)
11582 || !validate_arg (s2, INTEGER_TYPE))
11583 return NULL_TREE;
11584 else
11585 {
11586 const char *p1;
11587
11588 if (TREE_CODE (s2) != INTEGER_CST)
11589 return NULL_TREE;
11590
11591 p1 = c_getstr (s1);
11592 if (p1 != NULL)
11593 {
11594 char c;
11595 const char *r;
11596 tree tem;
11597
11598 if (target_char_cast (s2, &c))
11599 return NULL_TREE;
11600
11601 r = strchr (p1, c);
11602
11603 if (r == NULL)
11604 return build_int_cst (TREE_TYPE (s1), 0);
11605
11606 /* Return an offset into the constant string argument. */
11607 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11608 return fold_convert_loc (loc, type, tem);
11609 }
11610 return NULL_TREE;
11611 }
11612 }
11613
11614 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11615 the call, and TYPE is its return type.
11616
11617 Return NULL_TREE if no simplification was possible, otherwise return the
11618 simplified form of the call as a tree.
11619
11620 The simplified form may be a constant or other expression which
11621 computes the same value, but in a more efficient manner (including
11622 calls to other builtin functions).
11623
11624 The call may contain arguments which need to be evaluated, but
11625 which are not useful to determine the result of the call. In
11626 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11627 COMPOUND_EXPR will be an argument which must be evaluated.
11628 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11629 COMPOUND_EXPR in the chain will contain the tree for the simplified
11630 form of the builtin function call. */
11631
11632 static tree
11633 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11634 {
11635 if (!validate_arg (s1, POINTER_TYPE)
11636 || !validate_arg (s2, INTEGER_TYPE))
11637 return NULL_TREE;
11638 else
11639 {
11640 tree fn;
11641 const char *p1;
11642
11643 if (TREE_CODE (s2) != INTEGER_CST)
11644 return NULL_TREE;
11645
11646 p1 = c_getstr (s1);
11647 if (p1 != NULL)
11648 {
11649 char c;
11650 const char *r;
11651 tree tem;
11652
11653 if (target_char_cast (s2, &c))
11654 return NULL_TREE;
11655
11656 r = strrchr (p1, c);
11657
11658 if (r == NULL)
11659 return build_int_cst (TREE_TYPE (s1), 0);
11660
11661 /* Return an offset into the constant string argument. */
11662 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11663 return fold_convert_loc (loc, type, tem);
11664 }
11665
11666 if (! integer_zerop (s2))
11667 return NULL_TREE;
11668
11669 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11670 if (!fn)
11671 return NULL_TREE;
11672
11673 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11674 return build_call_expr_loc (loc, fn, 2, s1, s2);
11675 }
11676 }
11677
11678 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11679 to the call, and TYPE is its return type.
11680
11681 Return NULL_TREE if no simplification was possible, otherwise return the
11682 simplified form of the call as a tree.
11683
11684 The simplified form may be a constant or other expression which
11685 computes the same value, but in a more efficient manner (including
11686 calls to other builtin functions).
11687
11688 The call may contain arguments which need to be evaluated, but
11689 which are not useful to determine the result of the call. In
11690 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11691 COMPOUND_EXPR will be an argument which must be evaluated.
11692 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11693 COMPOUND_EXPR in the chain will contain the tree for the simplified
11694 form of the builtin function call. */
11695
11696 static tree
11697 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11698 {
11699 if (!validate_arg (s1, POINTER_TYPE)
11700 || !validate_arg (s2, POINTER_TYPE))
11701 return NULL_TREE;
11702 else
11703 {
11704 tree fn;
11705 const char *p1, *p2;
11706
11707 p2 = c_getstr (s2);
11708 if (p2 == NULL)
11709 return NULL_TREE;
11710
11711 p1 = c_getstr (s1);
11712 if (p1 != NULL)
11713 {
11714 const char *r = strpbrk (p1, p2);
11715 tree tem;
11716
11717 if (r == NULL)
11718 return build_int_cst (TREE_TYPE (s1), 0);
11719
11720 /* Return an offset into the constant string argument. */
11721 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11722 return fold_convert_loc (loc, type, tem);
11723 }
11724
11725 if (p2[0] == '\0')
11726 /* strpbrk(x, "") == NULL.
11727 Evaluate and ignore s1 in case it had side-effects. */
11728 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11729
11730 if (p2[1] != '\0')
11731 return NULL_TREE; /* Really call strpbrk. */
11732
11733 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11734 if (!fn)
11735 return NULL_TREE;
11736
11737 /* New argument list transforming strpbrk(s1, s2) to
11738 strchr(s1, s2[0]). */
11739 return build_call_expr_loc (loc, fn, 2, s1,
11740 build_int_cst (integer_type_node, p2[0]));
11741 }
11742 }
11743
11744 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11745 to the call.
11746
11747 Return NULL_TREE if no simplification was possible, otherwise return the
11748 simplified form of the call as a tree.
11749
11750 The simplified form may be a constant or other expression which
11751 computes the same value, but in a more efficient manner (including
11752 calls to other builtin functions).
11753
11754 The call may contain arguments which need to be evaluated, but
11755 which are not useful to determine the result of the call. In
11756 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11757 COMPOUND_EXPR will be an argument which must be evaluated.
11758 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11759 COMPOUND_EXPR in the chain will contain the tree for the simplified
11760 form of the builtin function call. */
11761
11762 static tree
11763 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11764 {
11765 if (!validate_arg (dst, POINTER_TYPE)
11766 || !validate_arg (src, POINTER_TYPE))
11767 return NULL_TREE;
11768 else
11769 {
11770 const char *p = c_getstr (src);
11771
11772 /* If the string length is zero, return the dst parameter. */
11773 if (p && *p == '\0')
11774 return dst;
11775
11776 if (optimize_insn_for_speed_p ())
11777 {
11778 /* See if we can store by pieces into (dst + strlen(dst)). */
11779 tree newdst, call;
11780 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11781 tree strcpy_fn = builtin_decl_implicit (BUILT_IN_STRCPY);
11782
11783 if (!strlen_fn || !strcpy_fn)
11784 return NULL_TREE;
11785
11786 /* If we don't have a movstr we don't want to emit an strcpy
11787 call. We have to do that if the length of the source string
11788 isn't computable (in that case we can use memcpy probably
11789 later expanding to a sequence of mov instructions). If we
11790 have movstr instructions we can emit strcpy calls. */
11791 if (!HAVE_movstr)
11792 {
11793 tree len = c_strlen (src, 1);
11794 if (! len || TREE_SIDE_EFFECTS (len))
11795 return NULL_TREE;
11796 }
11797
11798 /* Stabilize the argument list. */
11799 dst = builtin_save_expr (dst);
11800
11801 /* Create strlen (dst). */
11802 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11803 /* Create (dst p+ strlen (dst)). */
11804
11805 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11806 newdst = builtin_save_expr (newdst);
11807
11808 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11809 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11810 }
11811 return NULL_TREE;
11812 }
11813 }
11814
11815 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11816 arguments to the call.
11817
11818 Return NULL_TREE if no simplification was possible, otherwise return the
11819 simplified form of the call as a tree.
11820
11821 The simplified form may be a constant or other expression which
11822 computes the same value, but in a more efficient manner (including
11823 calls to other builtin functions).
11824
11825 The call may contain arguments which need to be evaluated, but
11826 which are not useful to determine the result of the call. In
11827 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11828 COMPOUND_EXPR will be an argument which must be evaluated.
11829 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11830 COMPOUND_EXPR in the chain will contain the tree for the simplified
11831 form of the builtin function call. */
11832
11833 static tree
11834 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11835 {
11836 if (!validate_arg (dst, POINTER_TYPE)
11837 || !validate_arg (src, POINTER_TYPE)
11838 || !validate_arg (len, INTEGER_TYPE))
11839 return NULL_TREE;
11840 else
11841 {
11842 const char *p = c_getstr (src);
11843
11844 /* If the requested length is zero, or the src parameter string
11845 length is zero, return the dst parameter. */
11846 if (integer_zerop (len) || (p && *p == '\0'))
11847 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11848
11849 /* If the requested len is greater than or equal to the string
11850 length, call strcat. */
11851 if (TREE_CODE (len) == INTEGER_CST && p
11852 && compare_tree_int (len, strlen (p)) >= 0)
11853 {
11854 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11855
11856 /* If the replacement _DECL isn't initialized, don't do the
11857 transformation. */
11858 if (!fn)
11859 return NULL_TREE;
11860
11861 return build_call_expr_loc (loc, fn, 2, dst, src);
11862 }
11863 return NULL_TREE;
11864 }
11865 }
11866
11867 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11868 to the call.
11869
11870 Return NULL_TREE if no simplification was possible, otherwise return the
11871 simplified form of the call as a tree.
11872
11873 The simplified form may be a constant or other expression which
11874 computes the same value, but in a more efficient manner (including
11875 calls to other builtin functions).
11876
11877 The call may contain arguments which need to be evaluated, but
11878 which are not useful to determine the result of the call. In
11879 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11880 COMPOUND_EXPR will be an argument which must be evaluated.
11881 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11882 COMPOUND_EXPR in the chain will contain the tree for the simplified
11883 form of the builtin function call. */
11884
11885 static tree
11886 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11887 {
11888 if (!validate_arg (s1, POINTER_TYPE)
11889 || !validate_arg (s2, POINTER_TYPE))
11890 return NULL_TREE;
11891 else
11892 {
11893 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11894
11895 /* If both arguments are constants, evaluate at compile-time. */
11896 if (p1 && p2)
11897 {
11898 const size_t r = strspn (p1, p2);
11899 return build_int_cst (size_type_node, r);
11900 }
11901
11902 /* If either argument is "", return NULL_TREE. */
11903 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11904 /* Evaluate and ignore both arguments in case either one has
11905 side-effects. */
11906 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11907 s1, s2);
11908 return NULL_TREE;
11909 }
11910 }
11911
11912 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11913 to the call.
11914
11915 Return NULL_TREE if no simplification was possible, otherwise return the
11916 simplified form of the call as a tree.
11917
11918 The simplified form may be a constant or other expression which
11919 computes the same value, but in a more efficient manner (including
11920 calls to other builtin functions).
11921
11922 The call may contain arguments which need to be evaluated, but
11923 which are not useful to determine the result of the call. In
11924 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11925 COMPOUND_EXPR will be an argument which must be evaluated.
11926 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11927 COMPOUND_EXPR in the chain will contain the tree for the simplified
11928 form of the builtin function call. */
11929
11930 static tree
11931 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11932 {
11933 if (!validate_arg (s1, POINTER_TYPE)
11934 || !validate_arg (s2, POINTER_TYPE))
11935 return NULL_TREE;
11936 else
11937 {
11938 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11939
11940 /* If both arguments are constants, evaluate at compile-time. */
11941 if (p1 && p2)
11942 {
11943 const size_t r = strcspn (p1, p2);
11944 return build_int_cst (size_type_node, r);
11945 }
11946
11947 /* If the first argument is "", return NULL_TREE. */
11948 if (p1 && *p1 == '\0')
11949 {
11950 /* Evaluate and ignore argument s2 in case it has
11951 side-effects. */
11952 return omit_one_operand_loc (loc, size_type_node,
11953 size_zero_node, s2);
11954 }
11955
11956 /* If the second argument is "", return __builtin_strlen(s1). */
11957 if (p2 && *p2 == '\0')
11958 {
11959 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11960
11961 /* If the replacement _DECL isn't initialized, don't do the
11962 transformation. */
11963 if (!fn)
11964 return NULL_TREE;
11965
11966 return build_call_expr_loc (loc, fn, 1, s1);
11967 }
11968 return NULL_TREE;
11969 }
11970 }
11971
11972 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11973 to the call. IGNORE is true if the value returned
11974 by the builtin will be ignored. UNLOCKED is true is true if this
11975 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11976 the known length of the string. Return NULL_TREE if no simplification
11977 was possible. */
11978
11979 tree
11980 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11981 bool ignore, bool unlocked, tree len)
11982 {
11983 /* If we're using an unlocked function, assume the other unlocked
11984 functions exist explicitly. */
11985 tree const fn_fputc = (unlocked
11986 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
11987 : builtin_decl_implicit (BUILT_IN_FPUTC));
11988 tree const fn_fwrite = (unlocked
11989 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
11990 : builtin_decl_implicit (BUILT_IN_FWRITE));
11991
11992 /* If the return value is used, don't do the transformation. */
11993 if (!ignore)
11994 return NULL_TREE;
11995
11996 /* Verify the arguments in the original call. */
11997 if (!validate_arg (arg0, POINTER_TYPE)
11998 || !validate_arg (arg1, POINTER_TYPE))
11999 return NULL_TREE;
12000
12001 if (! len)
12002 len = c_strlen (arg0, 0);
12003
12004 /* Get the length of the string passed to fputs. If the length
12005 can't be determined, punt. */
12006 if (!len
12007 || TREE_CODE (len) != INTEGER_CST)
12008 return NULL_TREE;
12009
12010 switch (compare_tree_int (len, 1))
12011 {
12012 case -1: /* length is 0, delete the call entirely . */
12013 return omit_one_operand_loc (loc, integer_type_node,
12014 integer_zero_node, arg1);;
12015
12016 case 0: /* length is 1, call fputc. */
12017 {
12018 const char *p = c_getstr (arg0);
12019
12020 if (p != NULL)
12021 {
12022 if (fn_fputc)
12023 return build_call_expr_loc (loc, fn_fputc, 2,
12024 build_int_cst
12025 (integer_type_node, p[0]), arg1);
12026 else
12027 return NULL_TREE;
12028 }
12029 }
12030 /* FALLTHROUGH */
12031 case 1: /* length is greater than 1, call fwrite. */
12032 {
12033 /* If optimizing for size keep fputs. */
12034 if (optimize_function_for_size_p (cfun))
12035 return NULL_TREE;
12036 /* New argument list transforming fputs(string, stream) to
12037 fwrite(string, 1, len, stream). */
12038 if (fn_fwrite)
12039 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
12040 size_one_node, len, arg1);
12041 else
12042 return NULL_TREE;
12043 }
12044 default:
12045 gcc_unreachable ();
12046 }
12047 return NULL_TREE;
12048 }
12049
12050 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12051 produced. False otherwise. This is done so that we don't output the error
12052 or warning twice or three times. */
12053
12054 bool
12055 fold_builtin_next_arg (tree exp, bool va_start_p)
12056 {
12057 tree fntype = TREE_TYPE (current_function_decl);
12058 int nargs = call_expr_nargs (exp);
12059 tree arg;
12060 /* There is good chance the current input_location points inside the
12061 definition of the va_start macro (perhaps on the token for
12062 builtin) in a system header, so warnings will not be emitted.
12063 Use the location in real source code. */
12064 source_location current_location =
12065 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
12066 NULL);
12067
12068 if (!stdarg_p (fntype))
12069 {
12070 error ("%<va_start%> used in function with fixed args");
12071 return true;
12072 }
12073
12074 if (va_start_p)
12075 {
12076 if (va_start_p && (nargs != 2))
12077 {
12078 error ("wrong number of arguments to function %<va_start%>");
12079 return true;
12080 }
12081 arg = CALL_EXPR_ARG (exp, 1);
12082 }
12083 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12084 when we checked the arguments and if needed issued a warning. */
12085 else
12086 {
12087 if (nargs == 0)
12088 {
12089 /* Evidently an out of date version of <stdarg.h>; can't validate
12090 va_start's second argument, but can still work as intended. */
12091 warning_at (current_location,
12092 OPT_Wvarargs,
12093 "%<__builtin_next_arg%> called without an argument");
12094 return true;
12095 }
12096 else if (nargs > 1)
12097 {
12098 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12099 return true;
12100 }
12101 arg = CALL_EXPR_ARG (exp, 0);
12102 }
12103
12104 if (TREE_CODE (arg) == SSA_NAME)
12105 arg = SSA_NAME_VAR (arg);
12106
12107 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12108 or __builtin_next_arg (0) the first time we see it, after checking
12109 the arguments and if needed issuing a warning. */
12110 if (!integer_zerop (arg))
12111 {
12112 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12113
12114 /* Strip off all nops for the sake of the comparison. This
12115 is not quite the same as STRIP_NOPS. It does more.
12116 We must also strip off INDIRECT_EXPR for C++ reference
12117 parameters. */
12118 while (CONVERT_EXPR_P (arg)
12119 || TREE_CODE (arg) == INDIRECT_REF)
12120 arg = TREE_OPERAND (arg, 0);
12121 if (arg != last_parm)
12122 {
12123 /* FIXME: Sometimes with the tree optimizers we can get the
12124 not the last argument even though the user used the last
12125 argument. We just warn and set the arg to be the last
12126 argument so that we will get wrong-code because of
12127 it. */
12128 warning_at (current_location,
12129 OPT_Wvarargs,
12130 "second parameter of %<va_start%> not last named argument");
12131 }
12132
12133 /* Undefined by C99 7.15.1.4p4 (va_start):
12134 "If the parameter parmN is declared with the register storage
12135 class, with a function or array type, or with a type that is
12136 not compatible with the type that results after application of
12137 the default argument promotions, the behavior is undefined."
12138 */
12139 else if (DECL_REGISTER (arg))
12140 {
12141 warning_at (current_location,
12142 OPT_Wvarargs,
12143 "undefined behaviour when second parameter of "
12144 "%<va_start%> is declared with %<register%> storage");
12145 }
12146
12147 /* We want to verify the second parameter just once before the tree
12148 optimizers are run and then avoid keeping it in the tree,
12149 as otherwise we could warn even for correct code like:
12150 void foo (int i, ...)
12151 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12152 if (va_start_p)
12153 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12154 else
12155 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12156 }
12157 return false;
12158 }
12159
12160
12161 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12162 ORIG may be null if this is a 2-argument call. We don't attempt to
12163 simplify calls with more than 3 arguments.
12164
12165 Return NULL_TREE if no simplification was possible, otherwise return the
12166 simplified form of the call as a tree. If IGNORED is true, it means that
12167 the caller does not use the returned value of the function. */
12168
12169 static tree
12170 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12171 tree orig, int ignored)
12172 {
12173 tree call, retval;
12174 const char *fmt_str = NULL;
12175
12176 /* Verify the required arguments in the original call. We deal with two
12177 types of sprintf() calls: 'sprintf (str, fmt)' and
12178 'sprintf (dest, "%s", orig)'. */
12179 if (!validate_arg (dest, POINTER_TYPE)
12180 || !validate_arg (fmt, POINTER_TYPE))
12181 return NULL_TREE;
12182 if (orig && !validate_arg (orig, POINTER_TYPE))
12183 return NULL_TREE;
12184
12185 /* Check whether the format is a literal string constant. */
12186 fmt_str = c_getstr (fmt);
12187 if (fmt_str == NULL)
12188 return NULL_TREE;
12189
12190 call = NULL_TREE;
12191 retval = NULL_TREE;
12192
12193 if (!init_target_chars ())
12194 return NULL_TREE;
12195
12196 /* If the format doesn't contain % args or %%, use strcpy. */
12197 if (strchr (fmt_str, target_percent) == NULL)
12198 {
12199 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12200
12201 if (!fn)
12202 return NULL_TREE;
12203
12204 /* Don't optimize sprintf (buf, "abc", ptr++). */
12205 if (orig)
12206 return NULL_TREE;
12207
12208 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12209 'format' is known to contain no % formats. */
12210 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12211 if (!ignored)
12212 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12213 }
12214
12215 /* If the format is "%s", use strcpy if the result isn't used. */
12216 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12217 {
12218 tree fn;
12219 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12220
12221 if (!fn)
12222 return NULL_TREE;
12223
12224 /* Don't crash on sprintf (str1, "%s"). */
12225 if (!orig)
12226 return NULL_TREE;
12227
12228 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12229 if (!ignored)
12230 {
12231 retval = c_strlen (orig, 1);
12232 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12233 return NULL_TREE;
12234 }
12235 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12236 }
12237
12238 if (call && retval)
12239 {
12240 retval = fold_convert_loc
12241 (loc, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF))),
12242 retval);
12243 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12244 }
12245 else
12246 return call;
12247 }
12248
12249 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12250 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12251 attempt to simplify calls with more than 4 arguments.
12252
12253 Return NULL_TREE if no simplification was possible, otherwise return the
12254 simplified form of the call as a tree. If IGNORED is true, it means that
12255 the caller does not use the returned value of the function. */
12256
12257 static tree
12258 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
12259 tree orig, int ignored)
12260 {
12261 tree call, retval;
12262 const char *fmt_str = NULL;
12263 unsigned HOST_WIDE_INT destlen;
12264
12265 /* Verify the required arguments in the original call. We deal with two
12266 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12267 'snprintf (dest, cst, "%s", orig)'. */
12268 if (!validate_arg (dest, POINTER_TYPE)
12269 || !validate_arg (destsize, INTEGER_TYPE)
12270 || !validate_arg (fmt, POINTER_TYPE))
12271 return NULL_TREE;
12272 if (orig && !validate_arg (orig, POINTER_TYPE))
12273 return NULL_TREE;
12274
12275 if (!host_integerp (destsize, 1))
12276 return NULL_TREE;
12277
12278 /* Check whether the format is a literal string constant. */
12279 fmt_str = c_getstr (fmt);
12280 if (fmt_str == NULL)
12281 return NULL_TREE;
12282
12283 call = NULL_TREE;
12284 retval = NULL_TREE;
12285
12286 if (!init_target_chars ())
12287 return NULL_TREE;
12288
12289 destlen = tree_low_cst (destsize, 1);
12290
12291 /* If the format doesn't contain % args or %%, use strcpy. */
12292 if (strchr (fmt_str, target_percent) == NULL)
12293 {
12294 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12295 size_t len = strlen (fmt_str);
12296
12297 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12298 if (orig)
12299 return NULL_TREE;
12300
12301 /* We could expand this as
12302 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12303 or to
12304 memcpy (str, fmt_with_nul_at_cstm1, cst);
12305 but in the former case that might increase code size
12306 and in the latter case grow .rodata section too much.
12307 So punt for now. */
12308 if (len >= destlen)
12309 return NULL_TREE;
12310
12311 if (!fn)
12312 return NULL_TREE;
12313
12314 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12315 'format' is known to contain no % formats and
12316 strlen (fmt) < cst. */
12317 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12318
12319 if (!ignored)
12320 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12321 }
12322
12323 /* If the format is "%s", use strcpy if the result isn't used. */
12324 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12325 {
12326 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12327 unsigned HOST_WIDE_INT origlen;
12328
12329 /* Don't crash on snprintf (str1, cst, "%s"). */
12330 if (!orig)
12331 return NULL_TREE;
12332
12333 retval = c_strlen (orig, 1);
12334 if (!retval || !host_integerp (retval, 1))
12335 return NULL_TREE;
12336
12337 origlen = tree_low_cst (retval, 1);
12338 /* We could expand this as
12339 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12340 or to
12341 memcpy (str1, str2_with_nul_at_cstm1, cst);
12342 but in the former case that might increase code size
12343 and in the latter case grow .rodata section too much.
12344 So punt for now. */
12345 if (origlen >= destlen)
12346 return NULL_TREE;
12347
12348 /* Convert snprintf (str1, cst, "%s", str2) into
12349 strcpy (str1, str2) if strlen (str2) < cst. */
12350 if (!fn)
12351 return NULL_TREE;
12352
12353 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12354
12355 if (ignored)
12356 retval = NULL_TREE;
12357 }
12358
12359 if (call && retval)
12360 {
12361 tree fn = builtin_decl_explicit (BUILT_IN_SNPRINTF);
12362 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
12363 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12364 }
12365 else
12366 return call;
12367 }
12368
12369 /* Expand a call EXP to __builtin_object_size. */
12370
12371 rtx
12372 expand_builtin_object_size (tree exp)
12373 {
12374 tree ost;
12375 int object_size_type;
12376 tree fndecl = get_callee_fndecl (exp);
12377
12378 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12379 {
12380 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12381 exp, fndecl);
12382 expand_builtin_trap ();
12383 return const0_rtx;
12384 }
12385
12386 ost = CALL_EXPR_ARG (exp, 1);
12387 STRIP_NOPS (ost);
12388
12389 if (TREE_CODE (ost) != INTEGER_CST
12390 || tree_int_cst_sgn (ost) < 0
12391 || compare_tree_int (ost, 3) > 0)
12392 {
12393 error ("%Klast argument of %D is not integer constant between 0 and 3",
12394 exp, fndecl);
12395 expand_builtin_trap ();
12396 return const0_rtx;
12397 }
12398
12399 object_size_type = tree_low_cst (ost, 0);
12400
12401 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12402 }
12403
12404 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12405 FCODE is the BUILT_IN_* to use.
12406 Return NULL_RTX if we failed; the caller should emit a normal call,
12407 otherwise try to get the result in TARGET, if convenient (and in
12408 mode MODE if that's convenient). */
12409
12410 static rtx
12411 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12412 enum built_in_function fcode)
12413 {
12414 tree dest, src, len, size;
12415
12416 if (!validate_arglist (exp,
12417 POINTER_TYPE,
12418 fcode == BUILT_IN_MEMSET_CHK
12419 ? INTEGER_TYPE : POINTER_TYPE,
12420 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12421 return NULL_RTX;
12422
12423 dest = CALL_EXPR_ARG (exp, 0);
12424 src = CALL_EXPR_ARG (exp, 1);
12425 len = CALL_EXPR_ARG (exp, 2);
12426 size = CALL_EXPR_ARG (exp, 3);
12427
12428 if (! host_integerp (size, 1))
12429 return NULL_RTX;
12430
12431 if (host_integerp (len, 1) || integer_all_onesp (size))
12432 {
12433 tree fn;
12434
12435 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12436 {
12437 warning_at (tree_nonartificial_location (exp),
12438 0, "%Kcall to %D will always overflow destination buffer",
12439 exp, get_callee_fndecl (exp));
12440 return NULL_RTX;
12441 }
12442
12443 fn = NULL_TREE;
12444 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12445 mem{cpy,pcpy,move,set} is available. */
12446 switch (fcode)
12447 {
12448 case BUILT_IN_MEMCPY_CHK:
12449 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12450 break;
12451 case BUILT_IN_MEMPCPY_CHK:
12452 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12453 break;
12454 case BUILT_IN_MEMMOVE_CHK:
12455 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12456 break;
12457 case BUILT_IN_MEMSET_CHK:
12458 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12459 break;
12460 default:
12461 break;
12462 }
12463
12464 if (! fn)
12465 return NULL_RTX;
12466
12467 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12468 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12469 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12470 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12471 }
12472 else if (fcode == BUILT_IN_MEMSET_CHK)
12473 return NULL_RTX;
12474 else
12475 {
12476 unsigned int dest_align = get_pointer_alignment (dest);
12477
12478 /* If DEST is not a pointer type, call the normal function. */
12479 if (dest_align == 0)
12480 return NULL_RTX;
12481
12482 /* If SRC and DEST are the same (and not volatile), do nothing. */
12483 if (operand_equal_p (src, dest, 0))
12484 {
12485 tree expr;
12486
12487 if (fcode != BUILT_IN_MEMPCPY_CHK)
12488 {
12489 /* Evaluate and ignore LEN in case it has side-effects. */
12490 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12491 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12492 }
12493
12494 expr = fold_build_pointer_plus (dest, len);
12495 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12496 }
12497
12498 /* __memmove_chk special case. */
12499 if (fcode == BUILT_IN_MEMMOVE_CHK)
12500 {
12501 unsigned int src_align = get_pointer_alignment (src);
12502
12503 if (src_align == 0)
12504 return NULL_RTX;
12505
12506 /* If src is categorized for a readonly section we can use
12507 normal __memcpy_chk. */
12508 if (readonly_data_expr (src))
12509 {
12510 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12511 if (!fn)
12512 return NULL_RTX;
12513 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12514 dest, src, len, size);
12515 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12516 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12517 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12518 }
12519 }
12520 return NULL_RTX;
12521 }
12522 }
12523
12524 /* Emit warning if a buffer overflow is detected at compile time. */
12525
12526 static void
12527 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12528 {
12529 int is_strlen = 0;
12530 tree len, size;
12531 location_t loc = tree_nonartificial_location (exp);
12532
12533 switch (fcode)
12534 {
12535 case BUILT_IN_STRCPY_CHK:
12536 case BUILT_IN_STPCPY_CHK:
12537 /* For __strcat_chk the warning will be emitted only if overflowing
12538 by at least strlen (dest) + 1 bytes. */
12539 case BUILT_IN_STRCAT_CHK:
12540 len = CALL_EXPR_ARG (exp, 1);
12541 size = CALL_EXPR_ARG (exp, 2);
12542 is_strlen = 1;
12543 break;
12544 case BUILT_IN_STRNCAT_CHK:
12545 case BUILT_IN_STRNCPY_CHK:
12546 case BUILT_IN_STPNCPY_CHK:
12547 len = CALL_EXPR_ARG (exp, 2);
12548 size = CALL_EXPR_ARG (exp, 3);
12549 break;
12550 case BUILT_IN_SNPRINTF_CHK:
12551 case BUILT_IN_VSNPRINTF_CHK:
12552 len = CALL_EXPR_ARG (exp, 1);
12553 size = CALL_EXPR_ARG (exp, 3);
12554 break;
12555 default:
12556 gcc_unreachable ();
12557 }
12558
12559 if (!len || !size)
12560 return;
12561
12562 if (! host_integerp (size, 1) || integer_all_onesp (size))
12563 return;
12564
12565 if (is_strlen)
12566 {
12567 len = c_strlen (len, 1);
12568 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12569 return;
12570 }
12571 else if (fcode == BUILT_IN_STRNCAT_CHK)
12572 {
12573 tree src = CALL_EXPR_ARG (exp, 1);
12574 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12575 return;
12576 src = c_strlen (src, 1);
12577 if (! src || ! host_integerp (src, 1))
12578 {
12579 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12580 exp, get_callee_fndecl (exp));
12581 return;
12582 }
12583 else if (tree_int_cst_lt (src, size))
12584 return;
12585 }
12586 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12587 return;
12588
12589 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12590 exp, get_callee_fndecl (exp));
12591 }
12592
12593 /* Emit warning if a buffer overflow is detected at compile time
12594 in __sprintf_chk/__vsprintf_chk calls. */
12595
12596 static void
12597 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12598 {
12599 tree size, len, fmt;
12600 const char *fmt_str;
12601 int nargs = call_expr_nargs (exp);
12602
12603 /* Verify the required arguments in the original call. */
12604
12605 if (nargs < 4)
12606 return;
12607 size = CALL_EXPR_ARG (exp, 2);
12608 fmt = CALL_EXPR_ARG (exp, 3);
12609
12610 if (! host_integerp (size, 1) || integer_all_onesp (size))
12611 return;
12612
12613 /* Check whether the format is a literal string constant. */
12614 fmt_str = c_getstr (fmt);
12615 if (fmt_str == NULL)
12616 return;
12617
12618 if (!init_target_chars ())
12619 return;
12620
12621 /* If the format doesn't contain % args or %%, we know its size. */
12622 if (strchr (fmt_str, target_percent) == 0)
12623 len = build_int_cstu (size_type_node, strlen (fmt_str));
12624 /* If the format is "%s" and first ... argument is a string literal,
12625 we know it too. */
12626 else if (fcode == BUILT_IN_SPRINTF_CHK
12627 && strcmp (fmt_str, target_percent_s) == 0)
12628 {
12629 tree arg;
12630
12631 if (nargs < 5)
12632 return;
12633 arg = CALL_EXPR_ARG (exp, 4);
12634 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12635 return;
12636
12637 len = c_strlen (arg, 1);
12638 if (!len || ! host_integerp (len, 1))
12639 return;
12640 }
12641 else
12642 return;
12643
12644 if (! tree_int_cst_lt (len, size))
12645 warning_at (tree_nonartificial_location (exp),
12646 0, "%Kcall to %D will always overflow destination buffer",
12647 exp, get_callee_fndecl (exp));
12648 }
12649
12650 /* Emit warning if a free is called with address of a variable. */
12651
12652 static void
12653 maybe_emit_free_warning (tree exp)
12654 {
12655 tree arg = CALL_EXPR_ARG (exp, 0);
12656
12657 STRIP_NOPS (arg);
12658 if (TREE_CODE (arg) != ADDR_EXPR)
12659 return;
12660
12661 arg = get_base_address (TREE_OPERAND (arg, 0));
12662 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12663 return;
12664
12665 if (SSA_VAR_P (arg))
12666 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12667 "%Kattempt to free a non-heap object %qD", exp, arg);
12668 else
12669 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12670 "%Kattempt to free a non-heap object", exp);
12671 }
12672
12673 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12674 if possible. */
12675
12676 tree
12677 fold_builtin_object_size (tree ptr, tree ost)
12678 {
12679 unsigned HOST_WIDE_INT bytes;
12680 int object_size_type;
12681
12682 if (!validate_arg (ptr, POINTER_TYPE)
12683 || !validate_arg (ost, INTEGER_TYPE))
12684 return NULL_TREE;
12685
12686 STRIP_NOPS (ost);
12687
12688 if (TREE_CODE (ost) != INTEGER_CST
12689 || tree_int_cst_sgn (ost) < 0
12690 || compare_tree_int (ost, 3) > 0)
12691 return NULL_TREE;
12692
12693 object_size_type = tree_low_cst (ost, 0);
12694
12695 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12696 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12697 and (size_t) 0 for types 2 and 3. */
12698 if (TREE_SIDE_EFFECTS (ptr))
12699 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12700
12701 if (TREE_CODE (ptr) == ADDR_EXPR)
12702 {
12703 bytes = compute_builtin_object_size (ptr, object_size_type);
12704 if (double_int_fits_to_tree_p (size_type_node,
12705 double_int::from_uhwi (bytes)))
12706 return build_int_cstu (size_type_node, bytes);
12707 }
12708 else if (TREE_CODE (ptr) == SSA_NAME)
12709 {
12710 /* If object size is not known yet, delay folding until
12711 later. Maybe subsequent passes will help determining
12712 it. */
12713 bytes = compute_builtin_object_size (ptr, object_size_type);
12714 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12715 && double_int_fits_to_tree_p (size_type_node,
12716 double_int::from_uhwi (bytes)))
12717 return build_int_cstu (size_type_node, bytes);
12718 }
12719
12720 return NULL_TREE;
12721 }
12722
12723 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12724 DEST, SRC, LEN, and SIZE are the arguments to the call.
12725 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12726 code of the builtin. If MAXLEN is not NULL, it is maximum length
12727 passed as third argument. */
12728
12729 tree
12730 fold_builtin_memory_chk (location_t loc, tree fndecl,
12731 tree dest, tree src, tree len, tree size,
12732 tree maxlen, bool ignore,
12733 enum built_in_function fcode)
12734 {
12735 tree fn;
12736
12737 if (!validate_arg (dest, POINTER_TYPE)
12738 || !validate_arg (src,
12739 (fcode == BUILT_IN_MEMSET_CHK
12740 ? INTEGER_TYPE : POINTER_TYPE))
12741 || !validate_arg (len, INTEGER_TYPE)
12742 || !validate_arg (size, INTEGER_TYPE))
12743 return NULL_TREE;
12744
12745 /* If SRC and DEST are the same (and not volatile), return DEST
12746 (resp. DEST+LEN for __mempcpy_chk). */
12747 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12748 {
12749 if (fcode != BUILT_IN_MEMPCPY_CHK)
12750 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12751 dest, len);
12752 else
12753 {
12754 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12755 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12756 }
12757 }
12758
12759 if (! host_integerp (size, 1))
12760 return NULL_TREE;
12761
12762 if (! integer_all_onesp (size))
12763 {
12764 if (! host_integerp (len, 1))
12765 {
12766 /* If LEN is not constant, try MAXLEN too.
12767 For MAXLEN only allow optimizing into non-_ocs function
12768 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12769 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12770 {
12771 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12772 {
12773 /* (void) __mempcpy_chk () can be optimized into
12774 (void) __memcpy_chk (). */
12775 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12776 if (!fn)
12777 return NULL_TREE;
12778
12779 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12780 }
12781 return NULL_TREE;
12782 }
12783 }
12784 else
12785 maxlen = len;
12786
12787 if (tree_int_cst_lt (size, maxlen))
12788 return NULL_TREE;
12789 }
12790
12791 fn = NULL_TREE;
12792 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12793 mem{cpy,pcpy,move,set} is available. */
12794 switch (fcode)
12795 {
12796 case BUILT_IN_MEMCPY_CHK:
12797 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12798 break;
12799 case BUILT_IN_MEMPCPY_CHK:
12800 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12801 break;
12802 case BUILT_IN_MEMMOVE_CHK:
12803 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12804 break;
12805 case BUILT_IN_MEMSET_CHK:
12806 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12807 break;
12808 default:
12809 break;
12810 }
12811
12812 if (!fn)
12813 return NULL_TREE;
12814
12815 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12816 }
12817
12818 /* Fold a call to the __st[rp]cpy_chk builtin.
12819 DEST, SRC, and SIZE are the arguments to the call.
12820 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12821 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12822 strings passed as second argument. */
12823
12824 tree
12825 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12826 tree src, tree size,
12827 tree maxlen, bool ignore,
12828 enum built_in_function fcode)
12829 {
12830 tree len, fn;
12831
12832 if (!validate_arg (dest, POINTER_TYPE)
12833 || !validate_arg (src, POINTER_TYPE)
12834 || !validate_arg (size, INTEGER_TYPE))
12835 return NULL_TREE;
12836
12837 /* If SRC and DEST are the same (and not volatile), return DEST. */
12838 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12839 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12840
12841 if (! host_integerp (size, 1))
12842 return NULL_TREE;
12843
12844 if (! integer_all_onesp (size))
12845 {
12846 len = c_strlen (src, 1);
12847 if (! len || ! host_integerp (len, 1))
12848 {
12849 /* If LEN is not constant, try MAXLEN too.
12850 For MAXLEN only allow optimizing into non-_ocs function
12851 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12852 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12853 {
12854 if (fcode == BUILT_IN_STPCPY_CHK)
12855 {
12856 if (! ignore)
12857 return NULL_TREE;
12858
12859 /* If return value of __stpcpy_chk is ignored,
12860 optimize into __strcpy_chk. */
12861 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
12862 if (!fn)
12863 return NULL_TREE;
12864
12865 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12866 }
12867
12868 if (! len || TREE_SIDE_EFFECTS (len))
12869 return NULL_TREE;
12870
12871 /* If c_strlen returned something, but not a constant,
12872 transform __strcpy_chk into __memcpy_chk. */
12873 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12874 if (!fn)
12875 return NULL_TREE;
12876
12877 len = fold_convert_loc (loc, size_type_node, len);
12878 len = size_binop_loc (loc, PLUS_EXPR, len,
12879 build_int_cst (size_type_node, 1));
12880 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12881 build_call_expr_loc (loc, fn, 4,
12882 dest, src, len, size));
12883 }
12884 }
12885 else
12886 maxlen = len;
12887
12888 if (! tree_int_cst_lt (maxlen, size))
12889 return NULL_TREE;
12890 }
12891
12892 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12893 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
12894 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
12895 if (!fn)
12896 return NULL_TREE;
12897
12898 return build_call_expr_loc (loc, fn, 2, dest, src);
12899 }
12900
12901 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
12902 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12903 length passed as third argument. IGNORE is true if return value can be
12904 ignored. FCODE is the BUILT_IN_* code of the builtin. */
12905
12906 tree
12907 fold_builtin_stxncpy_chk (location_t loc, tree dest, tree src,
12908 tree len, tree size, tree maxlen, bool ignore,
12909 enum built_in_function fcode)
12910 {
12911 tree fn;
12912
12913 if (!validate_arg (dest, POINTER_TYPE)
12914 || !validate_arg (src, POINTER_TYPE)
12915 || !validate_arg (len, INTEGER_TYPE)
12916 || !validate_arg (size, INTEGER_TYPE))
12917 return NULL_TREE;
12918
12919 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
12920 {
12921 /* If return value of __stpncpy_chk is ignored,
12922 optimize into __strncpy_chk. */
12923 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
12924 if (fn)
12925 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12926 }
12927
12928 if (! host_integerp (size, 1))
12929 return NULL_TREE;
12930
12931 if (! integer_all_onesp (size))
12932 {
12933 if (! host_integerp (len, 1))
12934 {
12935 /* If LEN is not constant, try MAXLEN too.
12936 For MAXLEN only allow optimizing into non-_ocs function
12937 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12938 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12939 return NULL_TREE;
12940 }
12941 else
12942 maxlen = len;
12943
12944 if (tree_int_cst_lt (size, maxlen))
12945 return NULL_TREE;
12946 }
12947
12948 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
12949 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
12950 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
12951 if (!fn)
12952 return NULL_TREE;
12953
12954 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12955 }
12956
12957 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12958 are the arguments to the call. */
12959
12960 static tree
12961 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12962 tree src, tree size)
12963 {
12964 tree fn;
12965 const char *p;
12966
12967 if (!validate_arg (dest, POINTER_TYPE)
12968 || !validate_arg (src, POINTER_TYPE)
12969 || !validate_arg (size, INTEGER_TYPE))
12970 return NULL_TREE;
12971
12972 p = c_getstr (src);
12973 /* If the SRC parameter is "", return DEST. */
12974 if (p && *p == '\0')
12975 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12976
12977 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12978 return NULL_TREE;
12979
12980 /* If __builtin_strcat_chk is used, assume strcat is available. */
12981 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
12982 if (!fn)
12983 return NULL_TREE;
12984
12985 return build_call_expr_loc (loc, fn, 2, dest, src);
12986 }
12987
12988 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12989 LEN, and SIZE. */
12990
12991 static tree
12992 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12993 tree dest, tree src, tree len, tree size)
12994 {
12995 tree fn;
12996 const char *p;
12997
12998 if (!validate_arg (dest, POINTER_TYPE)
12999 || !validate_arg (src, POINTER_TYPE)
13000 || !validate_arg (size, INTEGER_TYPE)
13001 || !validate_arg (size, INTEGER_TYPE))
13002 return NULL_TREE;
13003
13004 p = c_getstr (src);
13005 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
13006 if (p && *p == '\0')
13007 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
13008 else if (integer_zerop (len))
13009 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
13010
13011 if (! host_integerp (size, 1))
13012 return NULL_TREE;
13013
13014 if (! integer_all_onesp (size))
13015 {
13016 tree src_len = c_strlen (src, 1);
13017 if (src_len
13018 && host_integerp (src_len, 1)
13019 && host_integerp (len, 1)
13020 && ! tree_int_cst_lt (len, src_len))
13021 {
13022 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
13023 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
13024 if (!fn)
13025 return NULL_TREE;
13026
13027 return build_call_expr_loc (loc, fn, 3, dest, src, size);
13028 }
13029 return NULL_TREE;
13030 }
13031
13032 /* If __builtin_strncat_chk is used, assume strncat is available. */
13033 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
13034 if (!fn)
13035 return NULL_TREE;
13036
13037 return build_call_expr_loc (loc, fn, 3, dest, src, len);
13038 }
13039
13040 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
13041 Return NULL_TREE if a normal call should be emitted rather than
13042 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
13043 or BUILT_IN_VSPRINTF_CHK. */
13044
13045 static tree
13046 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
13047 enum built_in_function fcode)
13048 {
13049 tree dest, size, len, fn, fmt, flag;
13050 const char *fmt_str;
13051
13052 /* Verify the required arguments in the original call. */
13053 if (nargs < 4)
13054 return NULL_TREE;
13055 dest = args[0];
13056 if (!validate_arg (dest, POINTER_TYPE))
13057 return NULL_TREE;
13058 flag = args[1];
13059 if (!validate_arg (flag, INTEGER_TYPE))
13060 return NULL_TREE;
13061 size = args[2];
13062 if (!validate_arg (size, INTEGER_TYPE))
13063 return NULL_TREE;
13064 fmt = args[3];
13065 if (!validate_arg (fmt, POINTER_TYPE))
13066 return NULL_TREE;
13067
13068 if (! host_integerp (size, 1))
13069 return NULL_TREE;
13070
13071 len = NULL_TREE;
13072
13073 if (!init_target_chars ())
13074 return NULL_TREE;
13075
13076 /* Check whether the format is a literal string constant. */
13077 fmt_str = c_getstr (fmt);
13078 if (fmt_str != NULL)
13079 {
13080 /* If the format doesn't contain % args or %%, we know the size. */
13081 if (strchr (fmt_str, target_percent) == 0)
13082 {
13083 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13084 len = build_int_cstu (size_type_node, strlen (fmt_str));
13085 }
13086 /* If the format is "%s" and first ... argument is a string literal,
13087 we know the size too. */
13088 else if (fcode == BUILT_IN_SPRINTF_CHK
13089 && strcmp (fmt_str, target_percent_s) == 0)
13090 {
13091 tree arg;
13092
13093 if (nargs == 5)
13094 {
13095 arg = args[4];
13096 if (validate_arg (arg, POINTER_TYPE))
13097 {
13098 len = c_strlen (arg, 1);
13099 if (! len || ! host_integerp (len, 1))
13100 len = NULL_TREE;
13101 }
13102 }
13103 }
13104 }
13105
13106 if (! integer_all_onesp (size))
13107 {
13108 if (! len || ! tree_int_cst_lt (len, size))
13109 return NULL_TREE;
13110 }
13111
13112 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13113 or if format doesn't contain % chars or is "%s". */
13114 if (! integer_zerop (flag))
13115 {
13116 if (fmt_str == NULL)
13117 return NULL_TREE;
13118 if (strchr (fmt_str, target_percent) != NULL
13119 && strcmp (fmt_str, target_percent_s))
13120 return NULL_TREE;
13121 }
13122
13123 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13124 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
13125 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
13126 if (!fn)
13127 return NULL_TREE;
13128
13129 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
13130 }
13131
13132 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13133 a normal call should be emitted rather than expanding the function
13134 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13135
13136 static tree
13137 fold_builtin_sprintf_chk (location_t loc, tree exp,
13138 enum built_in_function fcode)
13139 {
13140 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
13141 CALL_EXPR_ARGP (exp), fcode);
13142 }
13143
13144 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13145 NULL_TREE if a normal call should be emitted rather than expanding
13146 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13147 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13148 passed as second argument. */
13149
13150 static tree
13151 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
13152 tree maxlen, enum built_in_function fcode)
13153 {
13154 tree dest, size, len, fn, fmt, flag;
13155 const char *fmt_str;
13156
13157 /* Verify the required arguments in the original call. */
13158 if (nargs < 5)
13159 return NULL_TREE;
13160 dest = args[0];
13161 if (!validate_arg (dest, POINTER_TYPE))
13162 return NULL_TREE;
13163 len = args[1];
13164 if (!validate_arg (len, INTEGER_TYPE))
13165 return NULL_TREE;
13166 flag = args[2];
13167 if (!validate_arg (flag, INTEGER_TYPE))
13168 return NULL_TREE;
13169 size = args[3];
13170 if (!validate_arg (size, INTEGER_TYPE))
13171 return NULL_TREE;
13172 fmt = args[4];
13173 if (!validate_arg (fmt, POINTER_TYPE))
13174 return NULL_TREE;
13175
13176 if (! host_integerp (size, 1))
13177 return NULL_TREE;
13178
13179 if (! integer_all_onesp (size))
13180 {
13181 if (! host_integerp (len, 1))
13182 {
13183 /* If LEN is not constant, try MAXLEN too.
13184 For MAXLEN only allow optimizing into non-_ocs function
13185 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13186 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13187 return NULL_TREE;
13188 }
13189 else
13190 maxlen = len;
13191
13192 if (tree_int_cst_lt (size, maxlen))
13193 return NULL_TREE;
13194 }
13195
13196 if (!init_target_chars ())
13197 return NULL_TREE;
13198
13199 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13200 or if format doesn't contain % chars or is "%s". */
13201 if (! integer_zerop (flag))
13202 {
13203 fmt_str = c_getstr (fmt);
13204 if (fmt_str == NULL)
13205 return NULL_TREE;
13206 if (strchr (fmt_str, target_percent) != NULL
13207 && strcmp (fmt_str, target_percent_s))
13208 return NULL_TREE;
13209 }
13210
13211 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13212 available. */
13213 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
13214 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
13215 if (!fn)
13216 return NULL_TREE;
13217
13218 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
13219 }
13220
13221 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13222 a normal call should be emitted rather than expanding the function
13223 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13224 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13225 passed as second argument. */
13226
13227 tree
13228 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
13229 enum built_in_function fcode)
13230 {
13231 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
13232 CALL_EXPR_ARGP (exp), maxlen, fcode);
13233 }
13234
13235 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13236 FMT and ARG are the arguments to the call; we don't fold cases with
13237 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13238
13239 Return NULL_TREE if no simplification was possible, otherwise return the
13240 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13241 code of the function to be simplified. */
13242
13243 static tree
13244 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13245 tree arg, bool ignore,
13246 enum built_in_function fcode)
13247 {
13248 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13249 const char *fmt_str = NULL;
13250
13251 /* If the return value is used, don't do the transformation. */
13252 if (! ignore)
13253 return NULL_TREE;
13254
13255 /* Verify the required arguments in the original call. */
13256 if (!validate_arg (fmt, POINTER_TYPE))
13257 return NULL_TREE;
13258
13259 /* Check whether the format is a literal string constant. */
13260 fmt_str = c_getstr (fmt);
13261 if (fmt_str == NULL)
13262 return NULL_TREE;
13263
13264 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13265 {
13266 /* If we're using an unlocked function, assume the other
13267 unlocked functions exist explicitly. */
13268 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
13269 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
13270 }
13271 else
13272 {
13273 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
13274 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
13275 }
13276
13277 if (!init_target_chars ())
13278 return NULL_TREE;
13279
13280 if (strcmp (fmt_str, target_percent_s) == 0
13281 || strchr (fmt_str, target_percent) == NULL)
13282 {
13283 const char *str;
13284
13285 if (strcmp (fmt_str, target_percent_s) == 0)
13286 {
13287 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13288 return NULL_TREE;
13289
13290 if (!arg || !validate_arg (arg, POINTER_TYPE))
13291 return NULL_TREE;
13292
13293 str = c_getstr (arg);
13294 if (str == NULL)
13295 return NULL_TREE;
13296 }
13297 else
13298 {
13299 /* The format specifier doesn't contain any '%' characters. */
13300 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13301 && arg)
13302 return NULL_TREE;
13303 str = fmt_str;
13304 }
13305
13306 /* If the string was "", printf does nothing. */
13307 if (str[0] == '\0')
13308 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13309
13310 /* If the string has length of 1, call putchar. */
13311 if (str[1] == '\0')
13312 {
13313 /* Given printf("c"), (where c is any one character,)
13314 convert "c"[0] to an int and pass that to the replacement
13315 function. */
13316 newarg = build_int_cst (integer_type_node, str[0]);
13317 if (fn_putchar)
13318 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13319 }
13320 else
13321 {
13322 /* If the string was "string\n", call puts("string"). */
13323 size_t len = strlen (str);
13324 if ((unsigned char)str[len - 1] == target_newline
13325 && (size_t) (int) len == len
13326 && (int) len > 0)
13327 {
13328 char *newstr;
13329 tree offset_node, string_cst;
13330
13331 /* Create a NUL-terminated string that's one char shorter
13332 than the original, stripping off the trailing '\n'. */
13333 newarg = build_string_literal (len, str);
13334 string_cst = string_constant (newarg, &offset_node);
13335 gcc_checking_assert (string_cst
13336 && (TREE_STRING_LENGTH (string_cst)
13337 == (int) len)
13338 && integer_zerop (offset_node)
13339 && (unsigned char)
13340 TREE_STRING_POINTER (string_cst)[len - 1]
13341 == target_newline);
13342 /* build_string_literal creates a new STRING_CST,
13343 modify it in place to avoid double copying. */
13344 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
13345 newstr[len - 1] = '\0';
13346 if (fn_puts)
13347 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13348 }
13349 else
13350 /* We'd like to arrange to call fputs(string,stdout) here,
13351 but we need stdout and don't have a way to get it yet. */
13352 return NULL_TREE;
13353 }
13354 }
13355
13356 /* The other optimizations can be done only on the non-va_list variants. */
13357 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13358 return NULL_TREE;
13359
13360 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13361 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13362 {
13363 if (!arg || !validate_arg (arg, POINTER_TYPE))
13364 return NULL_TREE;
13365 if (fn_puts)
13366 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13367 }
13368
13369 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13370 else if (strcmp (fmt_str, target_percent_c) == 0)
13371 {
13372 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13373 return NULL_TREE;
13374 if (fn_putchar)
13375 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13376 }
13377
13378 if (!call)
13379 return NULL_TREE;
13380
13381 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13382 }
13383
13384 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13385 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13386 more than 3 arguments, and ARG may be null in the 2-argument case.
13387
13388 Return NULL_TREE if no simplification was possible, otherwise return the
13389 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13390 code of the function to be simplified. */
13391
13392 static tree
13393 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13394 tree fmt, tree arg, bool ignore,
13395 enum built_in_function fcode)
13396 {
13397 tree fn_fputc, fn_fputs, call = NULL_TREE;
13398 const char *fmt_str = NULL;
13399
13400 /* If the return value is used, don't do the transformation. */
13401 if (! ignore)
13402 return NULL_TREE;
13403
13404 /* Verify the required arguments in the original call. */
13405 if (!validate_arg (fp, POINTER_TYPE))
13406 return NULL_TREE;
13407 if (!validate_arg (fmt, POINTER_TYPE))
13408 return NULL_TREE;
13409
13410 /* Check whether the format is a literal string constant. */
13411 fmt_str = c_getstr (fmt);
13412 if (fmt_str == NULL)
13413 return NULL_TREE;
13414
13415 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13416 {
13417 /* If we're using an unlocked function, assume the other
13418 unlocked functions exist explicitly. */
13419 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
13420 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
13421 }
13422 else
13423 {
13424 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
13425 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
13426 }
13427
13428 if (!init_target_chars ())
13429 return NULL_TREE;
13430
13431 /* If the format doesn't contain % args or %%, use strcpy. */
13432 if (strchr (fmt_str, target_percent) == NULL)
13433 {
13434 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13435 && arg)
13436 return NULL_TREE;
13437
13438 /* If the format specifier was "", fprintf does nothing. */
13439 if (fmt_str[0] == '\0')
13440 {
13441 /* If FP has side-effects, just wait until gimplification is
13442 done. */
13443 if (TREE_SIDE_EFFECTS (fp))
13444 return NULL_TREE;
13445
13446 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13447 }
13448
13449 /* When "string" doesn't contain %, replace all cases of
13450 fprintf (fp, string) with fputs (string, fp). The fputs
13451 builtin will take care of special cases like length == 1. */
13452 if (fn_fputs)
13453 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13454 }
13455
13456 /* The other optimizations can be done only on the non-va_list variants. */
13457 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13458 return NULL_TREE;
13459
13460 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13461 else if (strcmp (fmt_str, target_percent_s) == 0)
13462 {
13463 if (!arg || !validate_arg (arg, POINTER_TYPE))
13464 return NULL_TREE;
13465 if (fn_fputs)
13466 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13467 }
13468
13469 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13470 else if (strcmp (fmt_str, target_percent_c) == 0)
13471 {
13472 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13473 return NULL_TREE;
13474 if (fn_fputc)
13475 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13476 }
13477
13478 if (!call)
13479 return NULL_TREE;
13480 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13481 }
13482
13483 /* Initialize format string characters in the target charset. */
13484
13485 static bool
13486 init_target_chars (void)
13487 {
13488 static bool init;
13489 if (!init)
13490 {
13491 target_newline = lang_hooks.to_target_charset ('\n');
13492 target_percent = lang_hooks.to_target_charset ('%');
13493 target_c = lang_hooks.to_target_charset ('c');
13494 target_s = lang_hooks.to_target_charset ('s');
13495 if (target_newline == 0 || target_percent == 0 || target_c == 0
13496 || target_s == 0)
13497 return false;
13498
13499 target_percent_c[0] = target_percent;
13500 target_percent_c[1] = target_c;
13501 target_percent_c[2] = '\0';
13502
13503 target_percent_s[0] = target_percent;
13504 target_percent_s[1] = target_s;
13505 target_percent_s[2] = '\0';
13506
13507 target_percent_s_newline[0] = target_percent;
13508 target_percent_s_newline[1] = target_s;
13509 target_percent_s_newline[2] = target_newline;
13510 target_percent_s_newline[3] = '\0';
13511
13512 init = true;
13513 }
13514 return true;
13515 }
13516
13517 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13518 and no overflow/underflow occurred. INEXACT is true if M was not
13519 exactly calculated. TYPE is the tree type for the result. This
13520 function assumes that you cleared the MPFR flags and then
13521 calculated M to see if anything subsequently set a flag prior to
13522 entering this function. Return NULL_TREE if any checks fail. */
13523
13524 static tree
13525 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13526 {
13527 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13528 overflow/underflow occurred. If -frounding-math, proceed iff the
13529 result of calling FUNC was exact. */
13530 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13531 && (!flag_rounding_math || !inexact))
13532 {
13533 REAL_VALUE_TYPE rr;
13534
13535 real_from_mpfr (&rr, m, type, GMP_RNDN);
13536 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13537 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13538 but the mpft_t is not, then we underflowed in the
13539 conversion. */
13540 if (real_isfinite (&rr)
13541 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13542 {
13543 REAL_VALUE_TYPE rmode;
13544
13545 real_convert (&rmode, TYPE_MODE (type), &rr);
13546 /* Proceed iff the specified mode can hold the value. */
13547 if (real_identical (&rmode, &rr))
13548 return build_real (type, rmode);
13549 }
13550 }
13551 return NULL_TREE;
13552 }
13553
13554 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13555 number and no overflow/underflow occurred. INEXACT is true if M
13556 was not exactly calculated. TYPE is the tree type for the result.
13557 This function assumes that you cleared the MPFR flags and then
13558 calculated M to see if anything subsequently set a flag prior to
13559 entering this function. Return NULL_TREE if any checks fail, if
13560 FORCE_CONVERT is true, then bypass the checks. */
13561
13562 static tree
13563 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13564 {
13565 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13566 overflow/underflow occurred. If -frounding-math, proceed iff the
13567 result of calling FUNC was exact. */
13568 if (force_convert
13569 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13570 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13571 && (!flag_rounding_math || !inexact)))
13572 {
13573 REAL_VALUE_TYPE re, im;
13574
13575 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13576 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13577 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13578 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13579 but the mpft_t is not, then we underflowed in the
13580 conversion. */
13581 if (force_convert
13582 || (real_isfinite (&re) && real_isfinite (&im)
13583 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13584 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13585 {
13586 REAL_VALUE_TYPE re_mode, im_mode;
13587
13588 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13589 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13590 /* Proceed iff the specified mode can hold the value. */
13591 if (force_convert
13592 || (real_identical (&re_mode, &re)
13593 && real_identical (&im_mode, &im)))
13594 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13595 build_real (TREE_TYPE (type), im_mode));
13596 }
13597 }
13598 return NULL_TREE;
13599 }
13600
13601 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13602 FUNC on it and return the resulting value as a tree with type TYPE.
13603 If MIN and/or MAX are not NULL, then the supplied ARG must be
13604 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13605 acceptable values, otherwise they are not. The mpfr precision is
13606 set to the precision of TYPE. We assume that function FUNC returns
13607 zero if the result could be calculated exactly within the requested
13608 precision. */
13609
13610 static tree
13611 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13612 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13613 bool inclusive)
13614 {
13615 tree result = NULL_TREE;
13616
13617 STRIP_NOPS (arg);
13618
13619 /* To proceed, MPFR must exactly represent the target floating point
13620 format, which only happens when the target base equals two. */
13621 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13622 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13623 {
13624 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13625
13626 if (real_isfinite (ra)
13627 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13628 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13629 {
13630 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13631 const int prec = fmt->p;
13632 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13633 int inexact;
13634 mpfr_t m;
13635
13636 mpfr_init2 (m, prec);
13637 mpfr_from_real (m, ra, GMP_RNDN);
13638 mpfr_clear_flags ();
13639 inexact = func (m, m, rnd);
13640 result = do_mpfr_ckconv (m, type, inexact);
13641 mpfr_clear (m);
13642 }
13643 }
13644
13645 return result;
13646 }
13647
13648 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13649 FUNC on it and return the resulting value as a tree with type TYPE.
13650 The mpfr precision is set to the precision of TYPE. We assume that
13651 function FUNC returns zero if the result could be calculated
13652 exactly within the requested precision. */
13653
13654 static tree
13655 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13656 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13657 {
13658 tree result = NULL_TREE;
13659
13660 STRIP_NOPS (arg1);
13661 STRIP_NOPS (arg2);
13662
13663 /* To proceed, MPFR must exactly represent the target floating point
13664 format, which only happens when the target base equals two. */
13665 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13666 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13667 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13668 {
13669 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13670 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13671
13672 if (real_isfinite (ra1) && real_isfinite (ra2))
13673 {
13674 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13675 const int prec = fmt->p;
13676 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13677 int inexact;
13678 mpfr_t m1, m2;
13679
13680 mpfr_inits2 (prec, m1, m2, NULL);
13681 mpfr_from_real (m1, ra1, GMP_RNDN);
13682 mpfr_from_real (m2, ra2, GMP_RNDN);
13683 mpfr_clear_flags ();
13684 inexact = func (m1, m1, m2, rnd);
13685 result = do_mpfr_ckconv (m1, type, inexact);
13686 mpfr_clears (m1, m2, NULL);
13687 }
13688 }
13689
13690 return result;
13691 }
13692
13693 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13694 FUNC on it and return the resulting value as a tree with type TYPE.
13695 The mpfr precision is set to the precision of TYPE. We assume that
13696 function FUNC returns zero if the result could be calculated
13697 exactly within the requested precision. */
13698
13699 static tree
13700 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13701 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13702 {
13703 tree result = NULL_TREE;
13704
13705 STRIP_NOPS (arg1);
13706 STRIP_NOPS (arg2);
13707 STRIP_NOPS (arg3);
13708
13709 /* To proceed, MPFR must exactly represent the target floating point
13710 format, which only happens when the target base equals two. */
13711 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13712 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13713 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13714 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13715 {
13716 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13717 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13718 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13719
13720 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13721 {
13722 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13723 const int prec = fmt->p;
13724 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13725 int inexact;
13726 mpfr_t m1, m2, m3;
13727
13728 mpfr_inits2 (prec, m1, m2, m3, NULL);
13729 mpfr_from_real (m1, ra1, GMP_RNDN);
13730 mpfr_from_real (m2, ra2, GMP_RNDN);
13731 mpfr_from_real (m3, ra3, GMP_RNDN);
13732 mpfr_clear_flags ();
13733 inexact = func (m1, m1, m2, m3, rnd);
13734 result = do_mpfr_ckconv (m1, type, inexact);
13735 mpfr_clears (m1, m2, m3, NULL);
13736 }
13737 }
13738
13739 return result;
13740 }
13741
13742 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13743 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13744 If ARG_SINP and ARG_COSP are NULL then the result is returned
13745 as a complex value.
13746 The type is taken from the type of ARG and is used for setting the
13747 precision of the calculation and results. */
13748
13749 static tree
13750 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13751 {
13752 tree const type = TREE_TYPE (arg);
13753 tree result = NULL_TREE;
13754
13755 STRIP_NOPS (arg);
13756
13757 /* To proceed, MPFR must exactly represent the target floating point
13758 format, which only happens when the target base equals two. */
13759 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13760 && TREE_CODE (arg) == REAL_CST
13761 && !TREE_OVERFLOW (arg))
13762 {
13763 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13764
13765 if (real_isfinite (ra))
13766 {
13767 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13768 const int prec = fmt->p;
13769 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13770 tree result_s, result_c;
13771 int inexact;
13772 mpfr_t m, ms, mc;
13773
13774 mpfr_inits2 (prec, m, ms, mc, NULL);
13775 mpfr_from_real (m, ra, GMP_RNDN);
13776 mpfr_clear_flags ();
13777 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13778 result_s = do_mpfr_ckconv (ms, type, inexact);
13779 result_c = do_mpfr_ckconv (mc, type, inexact);
13780 mpfr_clears (m, ms, mc, NULL);
13781 if (result_s && result_c)
13782 {
13783 /* If we are to return in a complex value do so. */
13784 if (!arg_sinp && !arg_cosp)
13785 return build_complex (build_complex_type (type),
13786 result_c, result_s);
13787
13788 /* Dereference the sin/cos pointer arguments. */
13789 arg_sinp = build_fold_indirect_ref (arg_sinp);
13790 arg_cosp = build_fold_indirect_ref (arg_cosp);
13791 /* Proceed if valid pointer type were passed in. */
13792 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13793 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13794 {
13795 /* Set the values. */
13796 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13797 result_s);
13798 TREE_SIDE_EFFECTS (result_s) = 1;
13799 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13800 result_c);
13801 TREE_SIDE_EFFECTS (result_c) = 1;
13802 /* Combine the assignments into a compound expr. */
13803 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13804 result_s, result_c));
13805 }
13806 }
13807 }
13808 }
13809 return result;
13810 }
13811
13812 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13813 two-argument mpfr order N Bessel function FUNC on them and return
13814 the resulting value as a tree with type TYPE. The mpfr precision
13815 is set to the precision of TYPE. We assume that function FUNC
13816 returns zero if the result could be calculated exactly within the
13817 requested precision. */
13818 static tree
13819 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13820 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13821 const REAL_VALUE_TYPE *min, bool inclusive)
13822 {
13823 tree result = NULL_TREE;
13824
13825 STRIP_NOPS (arg1);
13826 STRIP_NOPS (arg2);
13827
13828 /* To proceed, MPFR must exactly represent the target floating point
13829 format, which only happens when the target base equals two. */
13830 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13831 && host_integerp (arg1, 0)
13832 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13833 {
13834 const HOST_WIDE_INT n = tree_low_cst (arg1, 0);
13835 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13836
13837 if (n == (long)n
13838 && real_isfinite (ra)
13839 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13840 {
13841 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13842 const int prec = fmt->p;
13843 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13844 int inexact;
13845 mpfr_t m;
13846
13847 mpfr_init2 (m, prec);
13848 mpfr_from_real (m, ra, GMP_RNDN);
13849 mpfr_clear_flags ();
13850 inexact = func (m, n, m, rnd);
13851 result = do_mpfr_ckconv (m, type, inexact);
13852 mpfr_clear (m);
13853 }
13854 }
13855
13856 return result;
13857 }
13858
13859 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13860 the pointer *(ARG_QUO) and return the result. The type is taken
13861 from the type of ARG0 and is used for setting the precision of the
13862 calculation and results. */
13863
13864 static tree
13865 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13866 {
13867 tree const type = TREE_TYPE (arg0);
13868 tree result = NULL_TREE;
13869
13870 STRIP_NOPS (arg0);
13871 STRIP_NOPS (arg1);
13872
13873 /* To proceed, MPFR must exactly represent the target floating point
13874 format, which only happens when the target base equals two. */
13875 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13876 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13877 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13878 {
13879 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13880 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13881
13882 if (real_isfinite (ra0) && real_isfinite (ra1))
13883 {
13884 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13885 const int prec = fmt->p;
13886 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13887 tree result_rem;
13888 long integer_quo;
13889 mpfr_t m0, m1;
13890
13891 mpfr_inits2 (prec, m0, m1, NULL);
13892 mpfr_from_real (m0, ra0, GMP_RNDN);
13893 mpfr_from_real (m1, ra1, GMP_RNDN);
13894 mpfr_clear_flags ();
13895 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13896 /* Remquo is independent of the rounding mode, so pass
13897 inexact=0 to do_mpfr_ckconv(). */
13898 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13899 mpfr_clears (m0, m1, NULL);
13900 if (result_rem)
13901 {
13902 /* MPFR calculates quo in the host's long so it may
13903 return more bits in quo than the target int can hold
13904 if sizeof(host long) > sizeof(target int). This can
13905 happen even for native compilers in LP64 mode. In
13906 these cases, modulo the quo value with the largest
13907 number that the target int can hold while leaving one
13908 bit for the sign. */
13909 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13910 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13911
13912 /* Dereference the quo pointer argument. */
13913 arg_quo = build_fold_indirect_ref (arg_quo);
13914 /* Proceed iff a valid pointer type was passed in. */
13915 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13916 {
13917 /* Set the value. */
13918 tree result_quo
13919 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13920 build_int_cst (TREE_TYPE (arg_quo),
13921 integer_quo));
13922 TREE_SIDE_EFFECTS (result_quo) = 1;
13923 /* Combine the quo assignment with the rem. */
13924 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13925 result_quo, result_rem));
13926 }
13927 }
13928 }
13929 }
13930 return result;
13931 }
13932
13933 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13934 resulting value as a tree with type TYPE. The mpfr precision is
13935 set to the precision of TYPE. We assume that this mpfr function
13936 returns zero if the result could be calculated exactly within the
13937 requested precision. In addition, the integer pointer represented
13938 by ARG_SG will be dereferenced and set to the appropriate signgam
13939 (-1,1) value. */
13940
13941 static tree
13942 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13943 {
13944 tree result = NULL_TREE;
13945
13946 STRIP_NOPS (arg);
13947
13948 /* To proceed, MPFR must exactly represent the target floating point
13949 format, which only happens when the target base equals two. Also
13950 verify ARG is a constant and that ARG_SG is an int pointer. */
13951 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13952 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13953 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13954 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13955 {
13956 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13957
13958 /* In addition to NaN and Inf, the argument cannot be zero or a
13959 negative integer. */
13960 if (real_isfinite (ra)
13961 && ra->cl != rvc_zero
13962 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
13963 {
13964 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13965 const int prec = fmt->p;
13966 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13967 int inexact, sg;
13968 mpfr_t m;
13969 tree result_lg;
13970
13971 mpfr_init2 (m, prec);
13972 mpfr_from_real (m, ra, GMP_RNDN);
13973 mpfr_clear_flags ();
13974 inexact = mpfr_lgamma (m, &sg, m, rnd);
13975 result_lg = do_mpfr_ckconv (m, type, inexact);
13976 mpfr_clear (m);
13977 if (result_lg)
13978 {
13979 tree result_sg;
13980
13981 /* Dereference the arg_sg pointer argument. */
13982 arg_sg = build_fold_indirect_ref (arg_sg);
13983 /* Assign the signgam value into *arg_sg. */
13984 result_sg = fold_build2 (MODIFY_EXPR,
13985 TREE_TYPE (arg_sg), arg_sg,
13986 build_int_cst (TREE_TYPE (arg_sg), sg));
13987 TREE_SIDE_EFFECTS (result_sg) = 1;
13988 /* Combine the signgam assignment with the lgamma result. */
13989 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13990 result_sg, result_lg));
13991 }
13992 }
13993 }
13994
13995 return result;
13996 }
13997
13998 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13999 function FUNC on it and return the resulting value as a tree with
14000 type TYPE. The mpfr precision is set to the precision of TYPE. We
14001 assume that function FUNC returns zero if the result could be
14002 calculated exactly within the requested precision. */
14003
14004 static tree
14005 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
14006 {
14007 tree result = NULL_TREE;
14008
14009 STRIP_NOPS (arg);
14010
14011 /* To proceed, MPFR must exactly represent the target floating point
14012 format, which only happens when the target base equals two. */
14013 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
14014 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
14015 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
14016 {
14017 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
14018 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
14019
14020 if (real_isfinite (re) && real_isfinite (im))
14021 {
14022 const struct real_format *const fmt =
14023 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14024 const int prec = fmt->p;
14025 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14026 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14027 int inexact;
14028 mpc_t m;
14029
14030 mpc_init2 (m, prec);
14031 mpfr_from_real (mpc_realref (m), re, rnd);
14032 mpfr_from_real (mpc_imagref (m), im, rnd);
14033 mpfr_clear_flags ();
14034 inexact = func (m, m, crnd);
14035 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
14036 mpc_clear (m);
14037 }
14038 }
14039
14040 return result;
14041 }
14042
14043 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
14044 mpc function FUNC on it and return the resulting value as a tree
14045 with type TYPE. The mpfr precision is set to the precision of
14046 TYPE. We assume that function FUNC returns zero if the result
14047 could be calculated exactly within the requested precision. If
14048 DO_NONFINITE is true, then fold expressions containing Inf or NaN
14049 in the arguments and/or results. */
14050
14051 tree
14052 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
14053 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
14054 {
14055 tree result = NULL_TREE;
14056
14057 STRIP_NOPS (arg0);
14058 STRIP_NOPS (arg1);
14059
14060 /* To proceed, MPFR must exactly represent the target floating point
14061 format, which only happens when the target base equals two. */
14062 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
14063 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
14064 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
14065 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
14066 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
14067 {
14068 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
14069 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
14070 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
14071 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
14072
14073 if (do_nonfinite
14074 || (real_isfinite (re0) && real_isfinite (im0)
14075 && real_isfinite (re1) && real_isfinite (im1)))
14076 {
14077 const struct real_format *const fmt =
14078 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14079 const int prec = fmt->p;
14080 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14081 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14082 int inexact;
14083 mpc_t m0, m1;
14084
14085 mpc_init2 (m0, prec);
14086 mpc_init2 (m1, prec);
14087 mpfr_from_real (mpc_realref (m0), re0, rnd);
14088 mpfr_from_real (mpc_imagref (m0), im0, rnd);
14089 mpfr_from_real (mpc_realref (m1), re1, rnd);
14090 mpfr_from_real (mpc_imagref (m1), im1, rnd);
14091 mpfr_clear_flags ();
14092 inexact = func (m0, m0, m1, crnd);
14093 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
14094 mpc_clear (m0);
14095 mpc_clear (m1);
14096 }
14097 }
14098
14099 return result;
14100 }
14101
14102 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14103 a normal call should be emitted rather than expanding the function
14104 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14105
14106 static tree
14107 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
14108 {
14109 int nargs = gimple_call_num_args (stmt);
14110
14111 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
14112 (nargs > 0
14113 ? gimple_call_arg_ptr (stmt, 0)
14114 : &error_mark_node), fcode);
14115 }
14116
14117 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14118 a normal call should be emitted rather than expanding the function
14119 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14120 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14121 passed as second argument. */
14122
14123 tree
14124 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14125 enum built_in_function fcode)
14126 {
14127 int nargs = gimple_call_num_args (stmt);
14128
14129 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
14130 (nargs > 0
14131 ? gimple_call_arg_ptr (stmt, 0)
14132 : &error_mark_node), maxlen, fcode);
14133 }
14134
14135 /* Builtins with folding operations that operate on "..." arguments
14136 need special handling; we need to store the arguments in a convenient
14137 data structure before attempting any folding. Fortunately there are
14138 only a few builtins that fall into this category. FNDECL is the
14139 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14140 result of the function call is ignored. */
14141
14142 static tree
14143 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14144 bool ignore ATTRIBUTE_UNUSED)
14145 {
14146 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14147 tree ret = NULL_TREE;
14148
14149 switch (fcode)
14150 {
14151 case BUILT_IN_SPRINTF_CHK:
14152 case BUILT_IN_VSPRINTF_CHK:
14153 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14154 break;
14155
14156 case BUILT_IN_SNPRINTF_CHK:
14157 case BUILT_IN_VSNPRINTF_CHK:
14158 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14159
14160 default:
14161 break;
14162 }
14163 if (ret)
14164 {
14165 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14166 TREE_NO_WARNING (ret) = 1;
14167 return ret;
14168 }
14169 return NULL_TREE;
14170 }
14171
14172 /* A wrapper function for builtin folding that prevents warnings for
14173 "statement without effect" and the like, caused by removing the
14174 call node earlier than the warning is generated. */
14175
14176 tree
14177 fold_call_stmt (gimple stmt, bool ignore)
14178 {
14179 tree ret = NULL_TREE;
14180 tree fndecl = gimple_call_fndecl (stmt);
14181 location_t loc = gimple_location (stmt);
14182 if (fndecl
14183 && TREE_CODE (fndecl) == FUNCTION_DECL
14184 && DECL_BUILT_IN (fndecl)
14185 && !gimple_call_va_arg_pack_p (stmt))
14186 {
14187 int nargs = gimple_call_num_args (stmt);
14188 tree *args = (nargs > 0
14189 ? gimple_call_arg_ptr (stmt, 0)
14190 : &error_mark_node);
14191
14192 if (avoid_folding_inline_builtin (fndecl))
14193 return NULL_TREE;
14194 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14195 {
14196 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14197 }
14198 else
14199 {
14200 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14201 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14202 if (!ret)
14203 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14204 if (ret)
14205 {
14206 /* Propagate location information from original call to
14207 expansion of builtin. Otherwise things like
14208 maybe_emit_chk_warning, that operate on the expansion
14209 of a builtin, will use the wrong location information. */
14210 if (gimple_has_location (stmt))
14211 {
14212 tree realret = ret;
14213 if (TREE_CODE (ret) == NOP_EXPR)
14214 realret = TREE_OPERAND (ret, 0);
14215 if (CAN_HAVE_LOCATION_P (realret)
14216 && !EXPR_HAS_LOCATION (realret))
14217 SET_EXPR_LOCATION (realret, loc);
14218 return realret;
14219 }
14220 return ret;
14221 }
14222 }
14223 }
14224 return NULL_TREE;
14225 }
14226
14227 /* Look up the function in builtin_decl that corresponds to DECL
14228 and set ASMSPEC as its user assembler name. DECL must be a
14229 function decl that declares a builtin. */
14230
14231 void
14232 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14233 {
14234 tree builtin;
14235 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
14236 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
14237 && asmspec != 0);
14238
14239 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14240 set_user_assembler_name (builtin, asmspec);
14241 switch (DECL_FUNCTION_CODE (decl))
14242 {
14243 case BUILT_IN_MEMCPY:
14244 init_block_move_fn (asmspec);
14245 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
14246 break;
14247 case BUILT_IN_MEMSET:
14248 init_block_clear_fn (asmspec);
14249 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
14250 break;
14251 case BUILT_IN_MEMMOVE:
14252 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
14253 break;
14254 case BUILT_IN_MEMCMP:
14255 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
14256 break;
14257 case BUILT_IN_ABORT:
14258 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
14259 break;
14260 case BUILT_IN_FFS:
14261 if (INT_TYPE_SIZE < BITS_PER_WORD)
14262 {
14263 set_user_assembler_libfunc ("ffs", asmspec);
14264 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
14265 MODE_INT, 0), "ffs");
14266 }
14267 break;
14268 default:
14269 break;
14270 }
14271 }
14272
14273 /* Return true if DECL is a builtin that expands to a constant or similarly
14274 simple code. */
14275 bool
14276 is_simple_builtin (tree decl)
14277 {
14278 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14279 switch (DECL_FUNCTION_CODE (decl))
14280 {
14281 /* Builtins that expand to constants. */
14282 case BUILT_IN_CONSTANT_P:
14283 case BUILT_IN_EXPECT:
14284 case BUILT_IN_OBJECT_SIZE:
14285 case BUILT_IN_UNREACHABLE:
14286 /* Simple register moves or loads from stack. */
14287 case BUILT_IN_ASSUME_ALIGNED:
14288 case BUILT_IN_RETURN_ADDRESS:
14289 case BUILT_IN_EXTRACT_RETURN_ADDR:
14290 case BUILT_IN_FROB_RETURN_ADDR:
14291 case BUILT_IN_RETURN:
14292 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14293 case BUILT_IN_FRAME_ADDRESS:
14294 case BUILT_IN_VA_END:
14295 case BUILT_IN_STACK_SAVE:
14296 case BUILT_IN_STACK_RESTORE:
14297 /* Exception state returns or moves registers around. */
14298 case BUILT_IN_EH_FILTER:
14299 case BUILT_IN_EH_POINTER:
14300 case BUILT_IN_EH_COPY_VALUES:
14301 return true;
14302
14303 default:
14304 return false;
14305 }
14306
14307 return false;
14308 }
14309
14310 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14311 most probably expanded inline into reasonably simple code. This is a
14312 superset of is_simple_builtin. */
14313 bool
14314 is_inexpensive_builtin (tree decl)
14315 {
14316 if (!decl)
14317 return false;
14318 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14319 return true;
14320 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14321 switch (DECL_FUNCTION_CODE (decl))
14322 {
14323 case BUILT_IN_ABS:
14324 case BUILT_IN_ALLOCA:
14325 case BUILT_IN_ALLOCA_WITH_ALIGN:
14326 case BUILT_IN_BSWAP16:
14327 case BUILT_IN_BSWAP32:
14328 case BUILT_IN_BSWAP64:
14329 case BUILT_IN_CLZ:
14330 case BUILT_IN_CLZIMAX:
14331 case BUILT_IN_CLZL:
14332 case BUILT_IN_CLZLL:
14333 case BUILT_IN_CTZ:
14334 case BUILT_IN_CTZIMAX:
14335 case BUILT_IN_CTZL:
14336 case BUILT_IN_CTZLL:
14337 case BUILT_IN_FFS:
14338 case BUILT_IN_FFSIMAX:
14339 case BUILT_IN_FFSL:
14340 case BUILT_IN_FFSLL:
14341 case BUILT_IN_IMAXABS:
14342 case BUILT_IN_FINITE:
14343 case BUILT_IN_FINITEF:
14344 case BUILT_IN_FINITEL:
14345 case BUILT_IN_FINITED32:
14346 case BUILT_IN_FINITED64:
14347 case BUILT_IN_FINITED128:
14348 case BUILT_IN_FPCLASSIFY:
14349 case BUILT_IN_ISFINITE:
14350 case BUILT_IN_ISINF_SIGN:
14351 case BUILT_IN_ISINF:
14352 case BUILT_IN_ISINFF:
14353 case BUILT_IN_ISINFL:
14354 case BUILT_IN_ISINFD32:
14355 case BUILT_IN_ISINFD64:
14356 case BUILT_IN_ISINFD128:
14357 case BUILT_IN_ISNAN:
14358 case BUILT_IN_ISNANF:
14359 case BUILT_IN_ISNANL:
14360 case BUILT_IN_ISNAND32:
14361 case BUILT_IN_ISNAND64:
14362 case BUILT_IN_ISNAND128:
14363 case BUILT_IN_ISNORMAL:
14364 case BUILT_IN_ISGREATER:
14365 case BUILT_IN_ISGREATEREQUAL:
14366 case BUILT_IN_ISLESS:
14367 case BUILT_IN_ISLESSEQUAL:
14368 case BUILT_IN_ISLESSGREATER:
14369 case BUILT_IN_ISUNORDERED:
14370 case BUILT_IN_VA_ARG_PACK:
14371 case BUILT_IN_VA_ARG_PACK_LEN:
14372 case BUILT_IN_VA_COPY:
14373 case BUILT_IN_TRAP:
14374 case BUILT_IN_SAVEREGS:
14375 case BUILT_IN_POPCOUNTL:
14376 case BUILT_IN_POPCOUNTLL:
14377 case BUILT_IN_POPCOUNTIMAX:
14378 case BUILT_IN_POPCOUNT:
14379 case BUILT_IN_PARITYL:
14380 case BUILT_IN_PARITYLL:
14381 case BUILT_IN_PARITYIMAX:
14382 case BUILT_IN_PARITY:
14383 case BUILT_IN_LABS:
14384 case BUILT_IN_LLABS:
14385 case BUILT_IN_PREFETCH:
14386 return true;
14387
14388 default:
14389 return is_simple_builtin (decl);
14390 }
14391
14392 return false;
14393 }