alloc-pool.c, [...]: Add missing whitespace before "(".
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "realmpfr.h"
28 #include "gimple.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "expr.h"
36 #include "optabs.h"
37 #include "libfuncs.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "predict.h"
42 #include "tm_p.h"
43 #include "target.h"
44 #include "langhooks.h"
45 #include "basic-block.h"
46 #include "tree-mudflap.h"
47 #include "tree-ssa.h"
48 #include "value-prof.h"
49 #include "diagnostic-core.h"
50 #include "builtins.h"
51 #include "ubsan.h"
52
53
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
56 #endif
57 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
58
59 struct target_builtins default_target_builtins;
60 #if SWITCHABLE_TARGET
61 struct target_builtins *this_target_builtins = &default_target_builtins;
62 #endif
63
64 /* Define the names of the builtin function types and codes. */
65 const char *const built_in_class_names[BUILT_IN_LAST]
66 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
67
68 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
69 const char * built_in_names[(int) END_BUILTINS] =
70 {
71 #include "builtins.def"
72 };
73 #undef DEF_BUILTIN
74
75 /* Setup an array of _DECL trees, make sure each element is
76 initialized to NULL_TREE. */
77 builtin_info_type builtin_info;
78
79 /* Non-zero if __builtin_constant_p should be folded right away. */
80 bool force_folding_builtin_constant_p;
81
82 static const char *c_getstr (tree);
83 static rtx c_readstr (const char *, enum machine_mode);
84 static int target_char_cast (tree, char *);
85 static rtx get_memory_rtx (tree, tree);
86 static int apply_args_size (void);
87 static int apply_result_size (void);
88 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
89 static rtx result_vector (int, rtx);
90 #endif
91 static void expand_builtin_update_setjmp_buf (rtx);
92 static void expand_builtin_prefetch (tree);
93 static rtx expand_builtin_apply_args (void);
94 static rtx expand_builtin_apply_args_1 (void);
95 static rtx expand_builtin_apply (rtx, rtx, rtx);
96 static void expand_builtin_return (rtx);
97 static enum type_class type_to_class (tree);
98 static rtx expand_builtin_classify_type (tree);
99 static void expand_errno_check (tree, rtx);
100 static rtx expand_builtin_mathfn (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
102 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
103 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
104 static rtx expand_builtin_interclass_mathfn (tree, rtx);
105 static rtx expand_builtin_sincos (tree);
106 static rtx expand_builtin_cexpi (tree, rtx);
107 static rtx expand_builtin_int_roundingfn (tree, rtx);
108 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
109 static rtx expand_builtin_next_arg (void);
110 static rtx expand_builtin_va_start (tree);
111 static rtx expand_builtin_va_end (tree);
112 static rtx expand_builtin_va_copy (tree);
113 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strcmp (tree, rtx);
115 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
116 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_strcpy (tree, rtx);
122 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
123 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_strncpy (tree, rtx);
125 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
126 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
127 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
128 static rtx expand_builtin_bzero (tree);
129 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_alloca (tree, bool);
131 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
132 static rtx expand_builtin_frame_address (tree, tree);
133 static tree stabilize_va_list_loc (location_t, tree, int);
134 static rtx expand_builtin_expect (tree, rtx);
135 static tree fold_builtin_constant_p (tree);
136 static tree fold_builtin_expect (location_t, tree, tree);
137 static tree fold_builtin_classify_type (tree);
138 static tree fold_builtin_strlen (location_t, tree, tree);
139 static tree fold_builtin_inf (location_t, tree, int);
140 static tree fold_builtin_nan (tree, tree, int);
141 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
142 static bool validate_arg (const_tree, enum tree_code code);
143 static bool integer_valued_real_p (tree);
144 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
145 static bool readonly_data_expr (tree);
146 static rtx expand_builtin_fabs (tree, rtx, rtx);
147 static rtx expand_builtin_signbit (tree, rtx);
148 static tree fold_builtin_sqrt (location_t, tree, tree);
149 static tree fold_builtin_cbrt (location_t, tree, tree);
150 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
151 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
152 static tree fold_builtin_cos (location_t, tree, tree, tree);
153 static tree fold_builtin_cosh (location_t, tree, tree, tree);
154 static tree fold_builtin_tan (tree, tree);
155 static tree fold_builtin_trunc (location_t, tree, tree);
156 static tree fold_builtin_floor (location_t, tree, tree);
157 static tree fold_builtin_ceil (location_t, tree, tree);
158 static tree fold_builtin_round (location_t, tree, tree);
159 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
160 static tree fold_builtin_bitop (tree, tree);
161 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
162 static tree fold_builtin_strchr (location_t, tree, tree, tree);
163 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
164 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
165 static tree fold_builtin_strcmp (location_t, tree, tree);
166 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
167 static tree fold_builtin_signbit (location_t, tree, tree);
168 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
169 static tree fold_builtin_isascii (location_t, tree);
170 static tree fold_builtin_toascii (location_t, tree);
171 static tree fold_builtin_isdigit (location_t, tree);
172 static tree fold_builtin_fabs (location_t, tree, tree);
173 static tree fold_builtin_abs (location_t, tree, tree);
174 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
175 enum tree_code);
176 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
177 static tree fold_builtin_0 (location_t, tree, bool);
178 static tree fold_builtin_1 (location_t, tree, tree, bool);
179 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
180 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
181 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
182 static tree fold_builtin_varargs (location_t, tree, tree, bool);
183
184 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
185 static tree fold_builtin_strstr (location_t, tree, tree, tree);
186 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
187 static tree fold_builtin_strcat (location_t, tree, tree);
188 static tree fold_builtin_strncat (location_t, tree, tree, tree);
189 static tree fold_builtin_strspn (location_t, tree, tree);
190 static tree fold_builtin_strcspn (location_t, tree, tree);
191 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
192 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
193
194 static rtx expand_builtin_object_size (tree);
195 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
196 enum built_in_function);
197 static void maybe_emit_chk_warning (tree, enum built_in_function);
198 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
199 static void maybe_emit_free_warning (tree);
200 static tree fold_builtin_object_size (tree, tree);
201 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
202 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
203 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
204 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
205 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
206 enum built_in_function);
207 static bool init_target_chars (void);
208
209 static unsigned HOST_WIDE_INT target_newline;
210 static unsigned HOST_WIDE_INT target_percent;
211 static unsigned HOST_WIDE_INT target_c;
212 static unsigned HOST_WIDE_INT target_s;
213 static char target_percent_c[3];
214 static char target_percent_s[3];
215 static char target_percent_s_newline[4];
216 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
217 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
218 static tree do_mpfr_arg2 (tree, tree, tree,
219 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
220 static tree do_mpfr_arg3 (tree, tree, tree, tree,
221 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
222 static tree do_mpfr_sincos (tree, tree, tree);
223 static tree do_mpfr_bessel_n (tree, tree, tree,
224 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
225 const REAL_VALUE_TYPE *, bool);
226 static tree do_mpfr_remquo (tree, tree, tree);
227 static tree do_mpfr_lgamma_r (tree, tree, tree);
228 static void expand_builtin_sync_synchronize (void);
229
230 /* Return true if NAME starts with __builtin_ or __sync_. */
231
232 static bool
233 is_builtin_name (const char *name)
234 {
235 if (strncmp (name, "__builtin_", 10) == 0)
236 return true;
237 if (strncmp (name, "__sync_", 7) == 0)
238 return true;
239 if (strncmp (name, "__atomic_", 9) == 0)
240 return true;
241 return false;
242 }
243
244
245 /* Return true if DECL is a function symbol representing a built-in. */
246
247 bool
248 is_builtin_fn (tree decl)
249 {
250 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
251 }
252
253 /* By default we assume that c99 functions are present at the runtime,
254 but sincos is not. */
255 bool
256 default_libc_has_function (enum function_class fn_class)
257 {
258 if (fn_class == function_c94
259 || fn_class == function_c99_misc
260 || fn_class == function_c99_math_complex)
261 return true;
262
263 return false;
264 }
265
266 bool
267 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
268 {
269 return true;
270 }
271
272 bool
273 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
274 {
275 return false;
276 }
277
278 /* Return true if NODE should be considered for inline expansion regardless
279 of the optimization level. This means whenever a function is invoked with
280 its "internal" name, which normally contains the prefix "__builtin". */
281
282 static bool
283 called_as_built_in (tree node)
284 {
285 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
286 we want the name used to call the function, not the name it
287 will have. */
288 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
289 return is_builtin_name (name);
290 }
291
292 /* Compute values M and N such that M divides (address of EXP - N) and such
293 that N < M. If these numbers can be determined, store M in alignp and N in
294 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
295 *alignp and any bit-offset to *bitposp.
296
297 Note that the address (and thus the alignment) computed here is based
298 on the address to which a symbol resolves, whereas DECL_ALIGN is based
299 on the address at which an object is actually located. These two
300 addresses are not always the same. For example, on ARM targets,
301 the address &foo of a Thumb function foo() has the lowest bit set,
302 whereas foo() itself starts on an even address.
303
304 If ADDR_P is true we are taking the address of the memory reference EXP
305 and thus cannot rely on the access taking place. */
306
307 static bool
308 get_object_alignment_2 (tree exp, unsigned int *alignp,
309 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
310 {
311 HOST_WIDE_INT bitsize, bitpos;
312 tree offset;
313 enum machine_mode mode;
314 int unsignedp, volatilep;
315 unsigned int inner, align = BITS_PER_UNIT;
316 bool known_alignment = false;
317
318 /* Get the innermost object and the constant (bitpos) and possibly
319 variable (offset) offset of the access. */
320 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
321 &mode, &unsignedp, &volatilep, true);
322
323 /* Extract alignment information from the innermost object and
324 possibly adjust bitpos and offset. */
325 if (TREE_CODE (exp) == FUNCTION_DECL)
326 {
327 /* Function addresses can encode extra information besides their
328 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
329 allows the low bit to be used as a virtual bit, we know
330 that the address itself must be at least 2-byte aligned. */
331 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
332 align = 2 * BITS_PER_UNIT;
333 }
334 else if (TREE_CODE (exp) == LABEL_DECL)
335 ;
336 else if (TREE_CODE (exp) == CONST_DECL)
337 {
338 /* The alignment of a CONST_DECL is determined by its initializer. */
339 exp = DECL_INITIAL (exp);
340 align = TYPE_ALIGN (TREE_TYPE (exp));
341 #ifdef CONSTANT_ALIGNMENT
342 if (CONSTANT_CLASS_P (exp))
343 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
344 #endif
345 known_alignment = true;
346 }
347 else if (DECL_P (exp))
348 {
349 align = DECL_ALIGN (exp);
350 known_alignment = true;
351 }
352 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
353 {
354 align = TYPE_ALIGN (TREE_TYPE (exp));
355 }
356 else if (TREE_CODE (exp) == INDIRECT_REF
357 || TREE_CODE (exp) == MEM_REF
358 || TREE_CODE (exp) == TARGET_MEM_REF)
359 {
360 tree addr = TREE_OPERAND (exp, 0);
361 unsigned ptr_align;
362 unsigned HOST_WIDE_INT ptr_bitpos;
363
364 if (TREE_CODE (addr) == BIT_AND_EXPR
365 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
366 {
367 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
368 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
369 align *= BITS_PER_UNIT;
370 addr = TREE_OPERAND (addr, 0);
371 }
372
373 known_alignment
374 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
375 align = MAX (ptr_align, align);
376
377 /* The alignment of the pointer operand in a TARGET_MEM_REF
378 has to take the variable offset parts into account. */
379 if (TREE_CODE (exp) == TARGET_MEM_REF)
380 {
381 if (TMR_INDEX (exp))
382 {
383 unsigned HOST_WIDE_INT step = 1;
384 if (TMR_STEP (exp))
385 step = TREE_INT_CST_LOW (TMR_STEP (exp));
386 align = MIN (align, (step & -step) * BITS_PER_UNIT);
387 }
388 if (TMR_INDEX2 (exp))
389 align = BITS_PER_UNIT;
390 known_alignment = false;
391 }
392
393 /* When EXP is an actual memory reference then we can use
394 TYPE_ALIGN of a pointer indirection to derive alignment.
395 Do so only if get_pointer_alignment_1 did not reveal absolute
396 alignment knowledge and if using that alignment would
397 improve the situation. */
398 if (!addr_p && !known_alignment
399 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
400 align = TYPE_ALIGN (TREE_TYPE (exp));
401 else
402 {
403 /* Else adjust bitpos accordingly. */
404 bitpos += ptr_bitpos;
405 if (TREE_CODE (exp) == MEM_REF
406 || TREE_CODE (exp) == TARGET_MEM_REF)
407 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
408 }
409 }
410 else if (TREE_CODE (exp) == STRING_CST)
411 {
412 /* STRING_CST are the only constant objects we allow to be not
413 wrapped inside a CONST_DECL. */
414 align = TYPE_ALIGN (TREE_TYPE (exp));
415 #ifdef CONSTANT_ALIGNMENT
416 if (CONSTANT_CLASS_P (exp))
417 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
418 #endif
419 known_alignment = true;
420 }
421
422 /* If there is a non-constant offset part extract the maximum
423 alignment that can prevail. */
424 inner = ~0U;
425 while (offset)
426 {
427 tree next_offset;
428
429 if (TREE_CODE (offset) == PLUS_EXPR)
430 {
431 next_offset = TREE_OPERAND (offset, 0);
432 offset = TREE_OPERAND (offset, 1);
433 }
434 else
435 next_offset = NULL;
436 if (host_integerp (offset, 1))
437 {
438 /* Any overflow in calculating offset_bits won't change
439 the alignment. */
440 unsigned offset_bits
441 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
442
443 if (offset_bits)
444 inner = MIN (inner, (offset_bits & -offset_bits));
445 }
446 else if (TREE_CODE (offset) == MULT_EXPR
447 && host_integerp (TREE_OPERAND (offset, 1), 1))
448 {
449 /* Any overflow in calculating offset_factor won't change
450 the alignment. */
451 unsigned offset_factor
452 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
453 * BITS_PER_UNIT);
454
455 if (offset_factor)
456 inner = MIN (inner, (offset_factor & -offset_factor));
457 }
458 else
459 {
460 inner = MIN (inner, BITS_PER_UNIT);
461 break;
462 }
463 offset = next_offset;
464 }
465 /* Alignment is innermost object alignment adjusted by the constant
466 and non-constant offset parts. */
467 align = MIN (align, inner);
468
469 *alignp = align;
470 *bitposp = bitpos & (*alignp - 1);
471 return known_alignment;
472 }
473
474 /* For a memory reference expression EXP compute values M and N such that M
475 divides (&EXP - N) and such that N < M. If these numbers can be determined,
476 store M in alignp and N in *BITPOSP and return true. Otherwise return false
477 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
478
479 bool
480 get_object_alignment_1 (tree exp, unsigned int *alignp,
481 unsigned HOST_WIDE_INT *bitposp)
482 {
483 return get_object_alignment_2 (exp, alignp, bitposp, false);
484 }
485
486 /* Return the alignment in bits of EXP, an object. */
487
488 unsigned int
489 get_object_alignment (tree exp)
490 {
491 unsigned HOST_WIDE_INT bitpos = 0;
492 unsigned int align;
493
494 get_object_alignment_1 (exp, &align, &bitpos);
495
496 /* align and bitpos now specify known low bits of the pointer.
497 ptr & (align - 1) == bitpos. */
498
499 if (bitpos != 0)
500 align = (bitpos & -bitpos);
501 return align;
502 }
503
504 /* For a pointer valued expression EXP compute values M and N such that M
505 divides (EXP - N) and such that N < M. If these numbers can be determined,
506 store M in alignp and N in *BITPOSP and return true. Return false if
507 the results are just a conservative approximation.
508
509 If EXP is not a pointer, false is returned too. */
510
511 bool
512 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
513 unsigned HOST_WIDE_INT *bitposp)
514 {
515 STRIP_NOPS (exp);
516
517 if (TREE_CODE (exp) == ADDR_EXPR)
518 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
519 alignp, bitposp, true);
520 else if (TREE_CODE (exp) == SSA_NAME
521 && POINTER_TYPE_P (TREE_TYPE (exp)))
522 {
523 unsigned int ptr_align, ptr_misalign;
524 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
525
526 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
527 {
528 *bitposp = ptr_misalign * BITS_PER_UNIT;
529 *alignp = ptr_align * BITS_PER_UNIT;
530 /* We cannot really tell whether this result is an approximation. */
531 return true;
532 }
533 else
534 {
535 *bitposp = 0;
536 *alignp = BITS_PER_UNIT;
537 return false;
538 }
539 }
540 else if (TREE_CODE (exp) == INTEGER_CST)
541 {
542 *alignp = BIGGEST_ALIGNMENT;
543 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
544 & (BIGGEST_ALIGNMENT - 1));
545 return true;
546 }
547
548 *bitposp = 0;
549 *alignp = BITS_PER_UNIT;
550 return false;
551 }
552
553 /* Return the alignment in bits of EXP, a pointer valued expression.
554 The alignment returned is, by default, the alignment of the thing that
555 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
556
557 Otherwise, look at the expression to see if we can do better, i.e., if the
558 expression is actually pointing at an object whose alignment is tighter. */
559
560 unsigned int
561 get_pointer_alignment (tree exp)
562 {
563 unsigned HOST_WIDE_INT bitpos = 0;
564 unsigned int align;
565
566 get_pointer_alignment_1 (exp, &align, &bitpos);
567
568 /* align and bitpos now specify known low bits of the pointer.
569 ptr & (align - 1) == bitpos. */
570
571 if (bitpos != 0)
572 align = (bitpos & -bitpos);
573
574 return align;
575 }
576
577 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
578 way, because it could contain a zero byte in the middle.
579 TREE_STRING_LENGTH is the size of the character array, not the string.
580
581 ONLY_VALUE should be nonzero if the result is not going to be emitted
582 into the instruction stream and zero if it is going to be expanded.
583 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
584 is returned, otherwise NULL, since
585 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
586 evaluate the side-effects.
587
588 The value returned is of type `ssizetype'.
589
590 Unfortunately, string_constant can't access the values of const char
591 arrays with initializers, so neither can we do so here. */
592
593 tree
594 c_strlen (tree src, int only_value)
595 {
596 tree offset_node;
597 HOST_WIDE_INT offset;
598 int max;
599 const char *ptr;
600 location_t loc;
601
602 STRIP_NOPS (src);
603 if (TREE_CODE (src) == COND_EXPR
604 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
605 {
606 tree len1, len2;
607
608 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
609 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
610 if (tree_int_cst_equal (len1, len2))
611 return len1;
612 }
613
614 if (TREE_CODE (src) == COMPOUND_EXPR
615 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
616 return c_strlen (TREE_OPERAND (src, 1), only_value);
617
618 loc = EXPR_LOC_OR_HERE (src);
619
620 src = string_constant (src, &offset_node);
621 if (src == 0)
622 return NULL_TREE;
623
624 max = TREE_STRING_LENGTH (src) - 1;
625 ptr = TREE_STRING_POINTER (src);
626
627 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
628 {
629 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
630 compute the offset to the following null if we don't know where to
631 start searching for it. */
632 int i;
633
634 for (i = 0; i < max; i++)
635 if (ptr[i] == 0)
636 return NULL_TREE;
637
638 /* We don't know the starting offset, but we do know that the string
639 has no internal zero bytes. We can assume that the offset falls
640 within the bounds of the string; otherwise, the programmer deserves
641 what he gets. Subtract the offset from the length of the string,
642 and return that. This would perhaps not be valid if we were dealing
643 with named arrays in addition to literal string constants. */
644
645 return size_diffop_loc (loc, size_int (max), offset_node);
646 }
647
648 /* We have a known offset into the string. Start searching there for
649 a null character if we can represent it as a single HOST_WIDE_INT. */
650 if (offset_node == 0)
651 offset = 0;
652 else if (! host_integerp (offset_node, 0))
653 offset = -1;
654 else
655 offset = tree_low_cst (offset_node, 0);
656
657 /* If the offset is known to be out of bounds, warn, and call strlen at
658 runtime. */
659 if (offset < 0 || offset > max)
660 {
661 /* Suppress multiple warnings for propagated constant strings. */
662 if (! TREE_NO_WARNING (src))
663 {
664 warning_at (loc, 0, "offset outside bounds of constant string");
665 TREE_NO_WARNING (src) = 1;
666 }
667 return NULL_TREE;
668 }
669
670 /* Use strlen to search for the first zero byte. Since any strings
671 constructed with build_string will have nulls appended, we win even
672 if we get handed something like (char[4])"abcd".
673
674 Since OFFSET is our starting index into the string, no further
675 calculation is needed. */
676 return ssize_int (strlen (ptr + offset));
677 }
678
679 /* Return a char pointer for a C string if it is a string constant
680 or sum of string constant and integer constant. */
681
682 static const char *
683 c_getstr (tree src)
684 {
685 tree offset_node;
686
687 src = string_constant (src, &offset_node);
688 if (src == 0)
689 return 0;
690
691 if (offset_node == 0)
692 return TREE_STRING_POINTER (src);
693 else if (!host_integerp (offset_node, 1)
694 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
695 return 0;
696
697 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
698 }
699
700 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
701 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
702
703 static rtx
704 c_readstr (const char *str, enum machine_mode mode)
705 {
706 HOST_WIDE_INT c[2];
707 HOST_WIDE_INT ch;
708 unsigned int i, j;
709
710 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
711
712 c[0] = 0;
713 c[1] = 0;
714 ch = 1;
715 for (i = 0; i < GET_MODE_SIZE (mode); i++)
716 {
717 j = i;
718 if (WORDS_BIG_ENDIAN)
719 j = GET_MODE_SIZE (mode) - i - 1;
720 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
721 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
722 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
723 j *= BITS_PER_UNIT;
724 gcc_assert (j < HOST_BITS_PER_DOUBLE_INT);
725
726 if (ch)
727 ch = (unsigned char) str[i];
728 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
729 }
730 return immed_double_const (c[0], c[1], mode);
731 }
732
733 /* Cast a target constant CST to target CHAR and if that value fits into
734 host char type, return zero and put that value into variable pointed to by
735 P. */
736
737 static int
738 target_char_cast (tree cst, char *p)
739 {
740 unsigned HOST_WIDE_INT val, hostval;
741
742 if (TREE_CODE (cst) != INTEGER_CST
743 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
744 return 1;
745
746 val = TREE_INT_CST_LOW (cst);
747 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
748 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
749
750 hostval = val;
751 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
752 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
753
754 if (val != hostval)
755 return 1;
756
757 *p = hostval;
758 return 0;
759 }
760
761 /* Similar to save_expr, but assumes that arbitrary code is not executed
762 in between the multiple evaluations. In particular, we assume that a
763 non-addressable local variable will not be modified. */
764
765 static tree
766 builtin_save_expr (tree exp)
767 {
768 if (TREE_CODE (exp) == SSA_NAME
769 || (TREE_ADDRESSABLE (exp) == 0
770 && (TREE_CODE (exp) == PARM_DECL
771 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
772 return exp;
773
774 return save_expr (exp);
775 }
776
777 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
778 times to get the address of either a higher stack frame, or a return
779 address located within it (depending on FNDECL_CODE). */
780
781 static rtx
782 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
783 {
784 int i;
785
786 #ifdef INITIAL_FRAME_ADDRESS_RTX
787 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
788 #else
789 rtx tem;
790
791 /* For a zero count with __builtin_return_address, we don't care what
792 frame address we return, because target-specific definitions will
793 override us. Therefore frame pointer elimination is OK, and using
794 the soft frame pointer is OK.
795
796 For a nonzero count, or a zero count with __builtin_frame_address,
797 we require a stable offset from the current frame pointer to the
798 previous one, so we must use the hard frame pointer, and
799 we must disable frame pointer elimination. */
800 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
801 tem = frame_pointer_rtx;
802 else
803 {
804 tem = hard_frame_pointer_rtx;
805
806 /* Tell reload not to eliminate the frame pointer. */
807 crtl->accesses_prior_frames = 1;
808 }
809 #endif
810
811 /* Some machines need special handling before we can access
812 arbitrary frames. For example, on the SPARC, we must first flush
813 all register windows to the stack. */
814 #ifdef SETUP_FRAME_ADDRESSES
815 if (count > 0)
816 SETUP_FRAME_ADDRESSES ();
817 #endif
818
819 /* On the SPARC, the return address is not in the frame, it is in a
820 register. There is no way to access it off of the current frame
821 pointer, but it can be accessed off the previous frame pointer by
822 reading the value from the register window save area. */
823 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
824 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
825 count--;
826 #endif
827
828 /* Scan back COUNT frames to the specified frame. */
829 for (i = 0; i < count; i++)
830 {
831 /* Assume the dynamic chain pointer is in the word that the
832 frame address points to, unless otherwise specified. */
833 #ifdef DYNAMIC_CHAIN_ADDRESS
834 tem = DYNAMIC_CHAIN_ADDRESS (tem);
835 #endif
836 tem = memory_address (Pmode, tem);
837 tem = gen_frame_mem (Pmode, tem);
838 tem = copy_to_reg (tem);
839 }
840
841 /* For __builtin_frame_address, return what we've got. But, on
842 the SPARC for example, we may have to add a bias. */
843 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
844 #ifdef FRAME_ADDR_RTX
845 return FRAME_ADDR_RTX (tem);
846 #else
847 return tem;
848 #endif
849
850 /* For __builtin_return_address, get the return address from that frame. */
851 #ifdef RETURN_ADDR_RTX
852 tem = RETURN_ADDR_RTX (count, tem);
853 #else
854 tem = memory_address (Pmode,
855 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
856 tem = gen_frame_mem (Pmode, tem);
857 #endif
858 return tem;
859 }
860
861 /* Alias set used for setjmp buffer. */
862 static alias_set_type setjmp_alias_set = -1;
863
864 /* Construct the leading half of a __builtin_setjmp call. Control will
865 return to RECEIVER_LABEL. This is also called directly by the SJLJ
866 exception handling code. */
867
868 void
869 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
870 {
871 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
872 rtx stack_save;
873 rtx mem;
874
875 if (setjmp_alias_set == -1)
876 setjmp_alias_set = new_alias_set ();
877
878 buf_addr = convert_memory_address (Pmode, buf_addr);
879
880 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
881
882 /* We store the frame pointer and the address of receiver_label in
883 the buffer and use the rest of it for the stack save area, which
884 is machine-dependent. */
885
886 mem = gen_rtx_MEM (Pmode, buf_addr);
887 set_mem_alias_set (mem, setjmp_alias_set);
888 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
889
890 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
891 GET_MODE_SIZE (Pmode))),
892 set_mem_alias_set (mem, setjmp_alias_set);
893
894 emit_move_insn (validize_mem (mem),
895 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
896
897 stack_save = gen_rtx_MEM (sa_mode,
898 plus_constant (Pmode, buf_addr,
899 2 * GET_MODE_SIZE (Pmode)));
900 set_mem_alias_set (stack_save, setjmp_alias_set);
901 emit_stack_save (SAVE_NONLOCAL, &stack_save);
902
903 /* If there is further processing to do, do it. */
904 #ifdef HAVE_builtin_setjmp_setup
905 if (HAVE_builtin_setjmp_setup)
906 emit_insn (gen_builtin_setjmp_setup (buf_addr));
907 #endif
908
909 /* We have a nonlocal label. */
910 cfun->has_nonlocal_label = 1;
911 }
912
913 /* Construct the trailing part of a __builtin_setjmp call. This is
914 also called directly by the SJLJ exception handling code.
915 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
916
917 void
918 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
919 {
920 rtx chain;
921
922 /* Mark the FP as used when we get here, so we have to make sure it's
923 marked as used by this function. */
924 emit_use (hard_frame_pointer_rtx);
925
926 /* Mark the static chain as clobbered here so life information
927 doesn't get messed up for it. */
928 chain = targetm.calls.static_chain (current_function_decl, true);
929 if (chain && REG_P (chain))
930 emit_clobber (chain);
931
932 /* Now put in the code to restore the frame pointer, and argument
933 pointer, if needed. */
934 #ifdef HAVE_nonlocal_goto
935 if (! HAVE_nonlocal_goto)
936 #endif
937 /* First adjust our frame pointer to its actual value. It was
938 previously set to the start of the virtual area corresponding to
939 the stacked variables when we branched here and now needs to be
940 adjusted to the actual hardware fp value.
941
942 Assignments to virtual registers are converted by
943 instantiate_virtual_regs into the corresponding assignment
944 to the underlying register (fp in this case) that makes
945 the original assignment true.
946 So the following insn will actually be decrementing fp by
947 STARTING_FRAME_OFFSET. */
948 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
949
950 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
951 if (fixed_regs[ARG_POINTER_REGNUM])
952 {
953 #ifdef ELIMINABLE_REGS
954 /* If the argument pointer can be eliminated in favor of the
955 frame pointer, we don't need to restore it. We assume here
956 that if such an elimination is present, it can always be used.
957 This is the case on all known machines; if we don't make this
958 assumption, we do unnecessary saving on many machines. */
959 size_t i;
960 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
961
962 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
963 if (elim_regs[i].from == ARG_POINTER_REGNUM
964 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
965 break;
966
967 if (i == ARRAY_SIZE (elim_regs))
968 #endif
969 {
970 /* Now restore our arg pointer from the address at which it
971 was saved in our stack frame. */
972 emit_move_insn (crtl->args.internal_arg_pointer,
973 copy_to_reg (get_arg_pointer_save_area ()));
974 }
975 }
976 #endif
977
978 #ifdef HAVE_builtin_setjmp_receiver
979 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
980 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
981 else
982 #endif
983 #ifdef HAVE_nonlocal_goto_receiver
984 if (HAVE_nonlocal_goto_receiver)
985 emit_insn (gen_nonlocal_goto_receiver ());
986 else
987 #endif
988 { /* Nothing */ }
989
990 /* We must not allow the code we just generated to be reordered by
991 scheduling. Specifically, the update of the frame pointer must
992 happen immediately, not later. Similarly, we must block
993 (frame-related) register values to be used across this code. */
994 emit_insn (gen_blockage ());
995 }
996
997 /* __builtin_longjmp is passed a pointer to an array of five words (not
998 all will be used on all machines). It operates similarly to the C
999 library function of the same name, but is more efficient. Much of
1000 the code below is copied from the handling of non-local gotos. */
1001
1002 static void
1003 expand_builtin_longjmp (rtx buf_addr, rtx value)
1004 {
1005 rtx fp, lab, stack, insn, last;
1006 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1007
1008 /* DRAP is needed for stack realign if longjmp is expanded to current
1009 function */
1010 if (SUPPORTS_STACK_ALIGNMENT)
1011 crtl->need_drap = true;
1012
1013 if (setjmp_alias_set == -1)
1014 setjmp_alias_set = new_alias_set ();
1015
1016 buf_addr = convert_memory_address (Pmode, buf_addr);
1017
1018 buf_addr = force_reg (Pmode, buf_addr);
1019
1020 /* We require that the user must pass a second argument of 1, because
1021 that is what builtin_setjmp will return. */
1022 gcc_assert (value == const1_rtx);
1023
1024 last = get_last_insn ();
1025 #ifdef HAVE_builtin_longjmp
1026 if (HAVE_builtin_longjmp)
1027 emit_insn (gen_builtin_longjmp (buf_addr));
1028 else
1029 #endif
1030 {
1031 fp = gen_rtx_MEM (Pmode, buf_addr);
1032 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1033 GET_MODE_SIZE (Pmode)));
1034
1035 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1036 2 * GET_MODE_SIZE (Pmode)));
1037 set_mem_alias_set (fp, setjmp_alias_set);
1038 set_mem_alias_set (lab, setjmp_alias_set);
1039 set_mem_alias_set (stack, setjmp_alias_set);
1040
1041 /* Pick up FP, label, and SP from the block and jump. This code is
1042 from expand_goto in stmt.c; see there for detailed comments. */
1043 #ifdef HAVE_nonlocal_goto
1044 if (HAVE_nonlocal_goto)
1045 /* We have to pass a value to the nonlocal_goto pattern that will
1046 get copied into the static_chain pointer, but it does not matter
1047 what that value is, because builtin_setjmp does not use it. */
1048 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1049 else
1050 #endif
1051 {
1052 lab = copy_to_reg (lab);
1053
1054 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1055 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1056
1057 emit_move_insn (hard_frame_pointer_rtx, fp);
1058 emit_stack_restore (SAVE_NONLOCAL, stack);
1059
1060 emit_use (hard_frame_pointer_rtx);
1061 emit_use (stack_pointer_rtx);
1062 emit_indirect_jump (lab);
1063 }
1064 }
1065
1066 /* Search backwards and mark the jump insn as a non-local goto.
1067 Note that this precludes the use of __builtin_longjmp to a
1068 __builtin_setjmp target in the same function. However, we've
1069 already cautioned the user that these functions are for
1070 internal exception handling use only. */
1071 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1072 {
1073 gcc_assert (insn != last);
1074
1075 if (JUMP_P (insn))
1076 {
1077 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1078 break;
1079 }
1080 else if (CALL_P (insn))
1081 break;
1082 }
1083 }
1084
1085 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1086 and the address of the save area. */
1087
1088 static rtx
1089 expand_builtin_nonlocal_goto (tree exp)
1090 {
1091 tree t_label, t_save_area;
1092 rtx r_label, r_save_area, r_fp, r_sp, insn;
1093
1094 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1095 return NULL_RTX;
1096
1097 t_label = CALL_EXPR_ARG (exp, 0);
1098 t_save_area = CALL_EXPR_ARG (exp, 1);
1099
1100 r_label = expand_normal (t_label);
1101 r_label = convert_memory_address (Pmode, r_label);
1102 r_save_area = expand_normal (t_save_area);
1103 r_save_area = convert_memory_address (Pmode, r_save_area);
1104 /* Copy the address of the save location to a register just in case it was
1105 based on the frame pointer. */
1106 r_save_area = copy_to_reg (r_save_area);
1107 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1108 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1109 plus_constant (Pmode, r_save_area,
1110 GET_MODE_SIZE (Pmode)));
1111
1112 crtl->has_nonlocal_goto = 1;
1113
1114 #ifdef HAVE_nonlocal_goto
1115 /* ??? We no longer need to pass the static chain value, afaik. */
1116 if (HAVE_nonlocal_goto)
1117 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1118 else
1119 #endif
1120 {
1121 r_label = copy_to_reg (r_label);
1122
1123 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1124 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1125
1126 /* Restore frame pointer for containing function. */
1127 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1128 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1129
1130 /* USE of hard_frame_pointer_rtx added for consistency;
1131 not clear if really needed. */
1132 emit_use (hard_frame_pointer_rtx);
1133 emit_use (stack_pointer_rtx);
1134
1135 /* If the architecture is using a GP register, we must
1136 conservatively assume that the target function makes use of it.
1137 The prologue of functions with nonlocal gotos must therefore
1138 initialize the GP register to the appropriate value, and we
1139 must then make sure that this value is live at the point
1140 of the jump. (Note that this doesn't necessarily apply
1141 to targets with a nonlocal_goto pattern; they are free
1142 to implement it in their own way. Note also that this is
1143 a no-op if the GP register is a global invariant.) */
1144 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1145 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1146 emit_use (pic_offset_table_rtx);
1147
1148 emit_indirect_jump (r_label);
1149 }
1150
1151 /* Search backwards to the jump insn and mark it as a
1152 non-local goto. */
1153 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1154 {
1155 if (JUMP_P (insn))
1156 {
1157 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1158 break;
1159 }
1160 else if (CALL_P (insn))
1161 break;
1162 }
1163
1164 return const0_rtx;
1165 }
1166
1167 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1168 (not all will be used on all machines) that was passed to __builtin_setjmp.
1169 It updates the stack pointer in that block to correspond to the current
1170 stack pointer. */
1171
1172 static void
1173 expand_builtin_update_setjmp_buf (rtx buf_addr)
1174 {
1175 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1176 rtx stack_save
1177 = gen_rtx_MEM (sa_mode,
1178 memory_address
1179 (sa_mode,
1180 plus_constant (Pmode, buf_addr,
1181 2 * GET_MODE_SIZE (Pmode))));
1182
1183 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1184 }
1185
1186 /* Expand a call to __builtin_prefetch. For a target that does not support
1187 data prefetch, evaluate the memory address argument in case it has side
1188 effects. */
1189
1190 static void
1191 expand_builtin_prefetch (tree exp)
1192 {
1193 tree arg0, arg1, arg2;
1194 int nargs;
1195 rtx op0, op1, op2;
1196
1197 if (!validate_arglist (exp, POINTER_TYPE, 0))
1198 return;
1199
1200 arg0 = CALL_EXPR_ARG (exp, 0);
1201
1202 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1203 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1204 locality). */
1205 nargs = call_expr_nargs (exp);
1206 if (nargs > 1)
1207 arg1 = CALL_EXPR_ARG (exp, 1);
1208 else
1209 arg1 = integer_zero_node;
1210 if (nargs > 2)
1211 arg2 = CALL_EXPR_ARG (exp, 2);
1212 else
1213 arg2 = integer_three_node;
1214
1215 /* Argument 0 is an address. */
1216 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1217
1218 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1219 if (TREE_CODE (arg1) != INTEGER_CST)
1220 {
1221 error ("second argument to %<__builtin_prefetch%> must be a constant");
1222 arg1 = integer_zero_node;
1223 }
1224 op1 = expand_normal (arg1);
1225 /* Argument 1 must be either zero or one. */
1226 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1227 {
1228 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1229 " using zero");
1230 op1 = const0_rtx;
1231 }
1232
1233 /* Argument 2 (locality) must be a compile-time constant int. */
1234 if (TREE_CODE (arg2) != INTEGER_CST)
1235 {
1236 error ("third argument to %<__builtin_prefetch%> must be a constant");
1237 arg2 = integer_zero_node;
1238 }
1239 op2 = expand_normal (arg2);
1240 /* Argument 2 must be 0, 1, 2, or 3. */
1241 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1242 {
1243 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1244 op2 = const0_rtx;
1245 }
1246
1247 #ifdef HAVE_prefetch
1248 if (HAVE_prefetch)
1249 {
1250 struct expand_operand ops[3];
1251
1252 create_address_operand (&ops[0], op0);
1253 create_integer_operand (&ops[1], INTVAL (op1));
1254 create_integer_operand (&ops[2], INTVAL (op2));
1255 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1256 return;
1257 }
1258 #endif
1259
1260 /* Don't do anything with direct references to volatile memory, but
1261 generate code to handle other side effects. */
1262 if (!MEM_P (op0) && side_effects_p (op0))
1263 emit_insn (op0);
1264 }
1265
1266 /* Get a MEM rtx for expression EXP which is the address of an operand
1267 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1268 the maximum length of the block of memory that might be accessed or
1269 NULL if unknown. */
1270
1271 static rtx
1272 get_memory_rtx (tree exp, tree len)
1273 {
1274 tree orig_exp = exp;
1275 rtx addr, mem;
1276
1277 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1278 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1279 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1280 exp = TREE_OPERAND (exp, 0);
1281
1282 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1283 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1284
1285 /* Get an expression we can use to find the attributes to assign to MEM.
1286 First remove any nops. */
1287 while (CONVERT_EXPR_P (exp)
1288 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1289 exp = TREE_OPERAND (exp, 0);
1290
1291 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1292 (as builtin stringops may alias with anything). */
1293 exp = fold_build2 (MEM_REF,
1294 build_array_type (char_type_node,
1295 build_range_type (sizetype,
1296 size_one_node, len)),
1297 exp, build_int_cst (ptr_type_node, 0));
1298
1299 /* If the MEM_REF has no acceptable address, try to get the base object
1300 from the original address we got, and build an all-aliasing
1301 unknown-sized access to that one. */
1302 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1303 set_mem_attributes (mem, exp, 0);
1304 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1305 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1306 0))))
1307 {
1308 exp = build_fold_addr_expr (exp);
1309 exp = fold_build2 (MEM_REF,
1310 build_array_type (char_type_node,
1311 build_range_type (sizetype,
1312 size_zero_node,
1313 NULL)),
1314 exp, build_int_cst (ptr_type_node, 0));
1315 set_mem_attributes (mem, exp, 0);
1316 }
1317 set_mem_alias_set (mem, 0);
1318 return mem;
1319 }
1320 \f
1321 /* Built-in functions to perform an untyped call and return. */
1322
1323 #define apply_args_mode \
1324 (this_target_builtins->x_apply_args_mode)
1325 #define apply_result_mode \
1326 (this_target_builtins->x_apply_result_mode)
1327
1328 /* Return the size required for the block returned by __builtin_apply_args,
1329 and initialize apply_args_mode. */
1330
1331 static int
1332 apply_args_size (void)
1333 {
1334 static int size = -1;
1335 int align;
1336 unsigned int regno;
1337 enum machine_mode mode;
1338
1339 /* The values computed by this function never change. */
1340 if (size < 0)
1341 {
1342 /* The first value is the incoming arg-pointer. */
1343 size = GET_MODE_SIZE (Pmode);
1344
1345 /* The second value is the structure value address unless this is
1346 passed as an "invisible" first argument. */
1347 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1348 size += GET_MODE_SIZE (Pmode);
1349
1350 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1351 if (FUNCTION_ARG_REGNO_P (regno))
1352 {
1353 mode = targetm.calls.get_raw_arg_mode (regno);
1354
1355 gcc_assert (mode != VOIDmode);
1356
1357 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1358 if (size % align != 0)
1359 size = CEIL (size, align) * align;
1360 size += GET_MODE_SIZE (mode);
1361 apply_args_mode[regno] = mode;
1362 }
1363 else
1364 {
1365 apply_args_mode[regno] = VOIDmode;
1366 }
1367 }
1368 return size;
1369 }
1370
1371 /* Return the size required for the block returned by __builtin_apply,
1372 and initialize apply_result_mode. */
1373
1374 static int
1375 apply_result_size (void)
1376 {
1377 static int size = -1;
1378 int align, regno;
1379 enum machine_mode mode;
1380
1381 /* The values computed by this function never change. */
1382 if (size < 0)
1383 {
1384 size = 0;
1385
1386 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1387 if (targetm.calls.function_value_regno_p (regno))
1388 {
1389 mode = targetm.calls.get_raw_result_mode (regno);
1390
1391 gcc_assert (mode != VOIDmode);
1392
1393 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1394 if (size % align != 0)
1395 size = CEIL (size, align) * align;
1396 size += GET_MODE_SIZE (mode);
1397 apply_result_mode[regno] = mode;
1398 }
1399 else
1400 apply_result_mode[regno] = VOIDmode;
1401
1402 /* Allow targets that use untyped_call and untyped_return to override
1403 the size so that machine-specific information can be stored here. */
1404 #ifdef APPLY_RESULT_SIZE
1405 size = APPLY_RESULT_SIZE;
1406 #endif
1407 }
1408 return size;
1409 }
1410
1411 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1412 /* Create a vector describing the result block RESULT. If SAVEP is true,
1413 the result block is used to save the values; otherwise it is used to
1414 restore the values. */
1415
1416 static rtx
1417 result_vector (int savep, rtx result)
1418 {
1419 int regno, size, align, nelts;
1420 enum machine_mode mode;
1421 rtx reg, mem;
1422 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1423
1424 size = nelts = 0;
1425 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1426 if ((mode = apply_result_mode[regno]) != VOIDmode)
1427 {
1428 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1429 if (size % align != 0)
1430 size = CEIL (size, align) * align;
1431 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1432 mem = adjust_address (result, mode, size);
1433 savevec[nelts++] = (savep
1434 ? gen_rtx_SET (VOIDmode, mem, reg)
1435 : gen_rtx_SET (VOIDmode, reg, mem));
1436 size += GET_MODE_SIZE (mode);
1437 }
1438 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1439 }
1440 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1441
1442 /* Save the state required to perform an untyped call with the same
1443 arguments as were passed to the current function. */
1444
1445 static rtx
1446 expand_builtin_apply_args_1 (void)
1447 {
1448 rtx registers, tem;
1449 int size, align, regno;
1450 enum machine_mode mode;
1451 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1452
1453 /* Create a block where the arg-pointer, structure value address,
1454 and argument registers can be saved. */
1455 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1456
1457 /* Walk past the arg-pointer and structure value address. */
1458 size = GET_MODE_SIZE (Pmode);
1459 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1460 size += GET_MODE_SIZE (Pmode);
1461
1462 /* Save each register used in calling a function to the block. */
1463 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1464 if ((mode = apply_args_mode[regno]) != VOIDmode)
1465 {
1466 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1467 if (size % align != 0)
1468 size = CEIL (size, align) * align;
1469
1470 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1471
1472 emit_move_insn (adjust_address (registers, mode, size), tem);
1473 size += GET_MODE_SIZE (mode);
1474 }
1475
1476 /* Save the arg pointer to the block. */
1477 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1478 #ifdef STACK_GROWS_DOWNWARD
1479 /* We need the pointer as the caller actually passed them to us, not
1480 as we might have pretended they were passed. Make sure it's a valid
1481 operand, as emit_move_insn isn't expected to handle a PLUS. */
1482 tem
1483 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1484 NULL_RTX);
1485 #endif
1486 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1487
1488 size = GET_MODE_SIZE (Pmode);
1489
1490 /* Save the structure value address unless this is passed as an
1491 "invisible" first argument. */
1492 if (struct_incoming_value)
1493 {
1494 emit_move_insn (adjust_address (registers, Pmode, size),
1495 copy_to_reg (struct_incoming_value));
1496 size += GET_MODE_SIZE (Pmode);
1497 }
1498
1499 /* Return the address of the block. */
1500 return copy_addr_to_reg (XEXP (registers, 0));
1501 }
1502
1503 /* __builtin_apply_args returns block of memory allocated on
1504 the stack into which is stored the arg pointer, structure
1505 value address, static chain, and all the registers that might
1506 possibly be used in performing a function call. The code is
1507 moved to the start of the function so the incoming values are
1508 saved. */
1509
1510 static rtx
1511 expand_builtin_apply_args (void)
1512 {
1513 /* Don't do __builtin_apply_args more than once in a function.
1514 Save the result of the first call and reuse it. */
1515 if (apply_args_value != 0)
1516 return apply_args_value;
1517 {
1518 /* When this function is called, it means that registers must be
1519 saved on entry to this function. So we migrate the
1520 call to the first insn of this function. */
1521 rtx temp;
1522 rtx seq;
1523
1524 start_sequence ();
1525 temp = expand_builtin_apply_args_1 ();
1526 seq = get_insns ();
1527 end_sequence ();
1528
1529 apply_args_value = temp;
1530
1531 /* Put the insns after the NOTE that starts the function.
1532 If this is inside a start_sequence, make the outer-level insn
1533 chain current, so the code is placed at the start of the
1534 function. If internal_arg_pointer is a non-virtual pseudo,
1535 it needs to be placed after the function that initializes
1536 that pseudo. */
1537 push_topmost_sequence ();
1538 if (REG_P (crtl->args.internal_arg_pointer)
1539 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1540 emit_insn_before (seq, parm_birth_insn);
1541 else
1542 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1543 pop_topmost_sequence ();
1544 return temp;
1545 }
1546 }
1547
1548 /* Perform an untyped call and save the state required to perform an
1549 untyped return of whatever value was returned by the given function. */
1550
1551 static rtx
1552 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1553 {
1554 int size, align, regno;
1555 enum machine_mode mode;
1556 rtx incoming_args, result, reg, dest, src, call_insn;
1557 rtx old_stack_level = 0;
1558 rtx call_fusage = 0;
1559 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1560
1561 arguments = convert_memory_address (Pmode, arguments);
1562
1563 /* Create a block where the return registers can be saved. */
1564 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1565
1566 /* Fetch the arg pointer from the ARGUMENTS block. */
1567 incoming_args = gen_reg_rtx (Pmode);
1568 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1569 #ifndef STACK_GROWS_DOWNWARD
1570 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1571 incoming_args, 0, OPTAB_LIB_WIDEN);
1572 #endif
1573
1574 /* Push a new argument block and copy the arguments. Do not allow
1575 the (potential) memcpy call below to interfere with our stack
1576 manipulations. */
1577 do_pending_stack_adjust ();
1578 NO_DEFER_POP;
1579
1580 /* Save the stack with nonlocal if available. */
1581 #ifdef HAVE_save_stack_nonlocal
1582 if (HAVE_save_stack_nonlocal)
1583 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1584 else
1585 #endif
1586 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1587
1588 /* Allocate a block of memory onto the stack and copy the memory
1589 arguments to the outgoing arguments address. We can pass TRUE
1590 as the 4th argument because we just saved the stack pointer
1591 and will restore it right after the call. */
1592 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1593
1594 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1595 may have already set current_function_calls_alloca to true.
1596 current_function_calls_alloca won't be set if argsize is zero,
1597 so we have to guarantee need_drap is true here. */
1598 if (SUPPORTS_STACK_ALIGNMENT)
1599 crtl->need_drap = true;
1600
1601 dest = virtual_outgoing_args_rtx;
1602 #ifndef STACK_GROWS_DOWNWARD
1603 if (CONST_INT_P (argsize))
1604 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1605 else
1606 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1607 #endif
1608 dest = gen_rtx_MEM (BLKmode, dest);
1609 set_mem_align (dest, PARM_BOUNDARY);
1610 src = gen_rtx_MEM (BLKmode, incoming_args);
1611 set_mem_align (src, PARM_BOUNDARY);
1612 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1613
1614 /* Refer to the argument block. */
1615 apply_args_size ();
1616 arguments = gen_rtx_MEM (BLKmode, arguments);
1617 set_mem_align (arguments, PARM_BOUNDARY);
1618
1619 /* Walk past the arg-pointer and structure value address. */
1620 size = GET_MODE_SIZE (Pmode);
1621 if (struct_value)
1622 size += GET_MODE_SIZE (Pmode);
1623
1624 /* Restore each of the registers previously saved. Make USE insns
1625 for each of these registers for use in making the call. */
1626 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1627 if ((mode = apply_args_mode[regno]) != VOIDmode)
1628 {
1629 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1630 if (size % align != 0)
1631 size = CEIL (size, align) * align;
1632 reg = gen_rtx_REG (mode, regno);
1633 emit_move_insn (reg, adjust_address (arguments, mode, size));
1634 use_reg (&call_fusage, reg);
1635 size += GET_MODE_SIZE (mode);
1636 }
1637
1638 /* Restore the structure value address unless this is passed as an
1639 "invisible" first argument. */
1640 size = GET_MODE_SIZE (Pmode);
1641 if (struct_value)
1642 {
1643 rtx value = gen_reg_rtx (Pmode);
1644 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1645 emit_move_insn (struct_value, value);
1646 if (REG_P (struct_value))
1647 use_reg (&call_fusage, struct_value);
1648 size += GET_MODE_SIZE (Pmode);
1649 }
1650
1651 /* All arguments and registers used for the call are set up by now! */
1652 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1653
1654 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1655 and we don't want to load it into a register as an optimization,
1656 because prepare_call_address already did it if it should be done. */
1657 if (GET_CODE (function) != SYMBOL_REF)
1658 function = memory_address (FUNCTION_MODE, function);
1659
1660 /* Generate the actual call instruction and save the return value. */
1661 #ifdef HAVE_untyped_call
1662 if (HAVE_untyped_call)
1663 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1664 result, result_vector (1, result)));
1665 else
1666 #endif
1667 #ifdef HAVE_call_value
1668 if (HAVE_call_value)
1669 {
1670 rtx valreg = 0;
1671
1672 /* Locate the unique return register. It is not possible to
1673 express a call that sets more than one return register using
1674 call_value; use untyped_call for that. In fact, untyped_call
1675 only needs to save the return registers in the given block. */
1676 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1677 if ((mode = apply_result_mode[regno]) != VOIDmode)
1678 {
1679 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1680
1681 valreg = gen_rtx_REG (mode, regno);
1682 }
1683
1684 emit_call_insn (GEN_CALL_VALUE (valreg,
1685 gen_rtx_MEM (FUNCTION_MODE, function),
1686 const0_rtx, NULL_RTX, const0_rtx));
1687
1688 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1689 }
1690 else
1691 #endif
1692 gcc_unreachable ();
1693
1694 /* Find the CALL insn we just emitted, and attach the register usage
1695 information. */
1696 call_insn = last_call_insn ();
1697 add_function_usage_to (call_insn, call_fusage);
1698
1699 /* Restore the stack. */
1700 #ifdef HAVE_save_stack_nonlocal
1701 if (HAVE_save_stack_nonlocal)
1702 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1703 else
1704 #endif
1705 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1706 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1707
1708 OK_DEFER_POP;
1709
1710 /* Return the address of the result block. */
1711 result = copy_addr_to_reg (XEXP (result, 0));
1712 return convert_memory_address (ptr_mode, result);
1713 }
1714
1715 /* Perform an untyped return. */
1716
1717 static void
1718 expand_builtin_return (rtx result)
1719 {
1720 int size, align, regno;
1721 enum machine_mode mode;
1722 rtx reg;
1723 rtx call_fusage = 0;
1724
1725 result = convert_memory_address (Pmode, result);
1726
1727 apply_result_size ();
1728 result = gen_rtx_MEM (BLKmode, result);
1729
1730 #ifdef HAVE_untyped_return
1731 if (HAVE_untyped_return)
1732 {
1733 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1734 emit_barrier ();
1735 return;
1736 }
1737 #endif
1738
1739 /* Restore the return value and note that each value is used. */
1740 size = 0;
1741 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1742 if ((mode = apply_result_mode[regno]) != VOIDmode)
1743 {
1744 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1745 if (size % align != 0)
1746 size = CEIL (size, align) * align;
1747 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1748 emit_move_insn (reg, adjust_address (result, mode, size));
1749
1750 push_to_sequence (call_fusage);
1751 emit_use (reg);
1752 call_fusage = get_insns ();
1753 end_sequence ();
1754 size += GET_MODE_SIZE (mode);
1755 }
1756
1757 /* Put the USE insns before the return. */
1758 emit_insn (call_fusage);
1759
1760 /* Return whatever values was restored by jumping directly to the end
1761 of the function. */
1762 expand_naked_return ();
1763 }
1764
1765 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1766
1767 static enum type_class
1768 type_to_class (tree type)
1769 {
1770 switch (TREE_CODE (type))
1771 {
1772 case VOID_TYPE: return void_type_class;
1773 case INTEGER_TYPE: return integer_type_class;
1774 case ENUMERAL_TYPE: return enumeral_type_class;
1775 case BOOLEAN_TYPE: return boolean_type_class;
1776 case POINTER_TYPE: return pointer_type_class;
1777 case REFERENCE_TYPE: return reference_type_class;
1778 case OFFSET_TYPE: return offset_type_class;
1779 case REAL_TYPE: return real_type_class;
1780 case COMPLEX_TYPE: return complex_type_class;
1781 case FUNCTION_TYPE: return function_type_class;
1782 case METHOD_TYPE: return method_type_class;
1783 case RECORD_TYPE: return record_type_class;
1784 case UNION_TYPE:
1785 case QUAL_UNION_TYPE: return union_type_class;
1786 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1787 ? string_type_class : array_type_class);
1788 case LANG_TYPE: return lang_type_class;
1789 default: return no_type_class;
1790 }
1791 }
1792
1793 /* Expand a call EXP to __builtin_classify_type. */
1794
1795 static rtx
1796 expand_builtin_classify_type (tree exp)
1797 {
1798 if (call_expr_nargs (exp))
1799 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1800 return GEN_INT (no_type_class);
1801 }
1802
1803 /* This helper macro, meant to be used in mathfn_built_in below,
1804 determines which among a set of three builtin math functions is
1805 appropriate for a given type mode. The `F' and `L' cases are
1806 automatically generated from the `double' case. */
1807 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1808 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1809 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1810 fcodel = BUILT_IN_MATHFN##L ; break;
1811 /* Similar to above, but appends _R after any F/L suffix. */
1812 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1813 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1814 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1815 fcodel = BUILT_IN_MATHFN##L_R ; break;
1816
1817 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1818 if available. If IMPLICIT is true use the implicit builtin declaration,
1819 otherwise use the explicit declaration. If we can't do the conversion,
1820 return zero. */
1821
1822 static tree
1823 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1824 {
1825 enum built_in_function fcode, fcodef, fcodel, fcode2;
1826
1827 switch (fn)
1828 {
1829 CASE_MATHFN (BUILT_IN_ACOS)
1830 CASE_MATHFN (BUILT_IN_ACOSH)
1831 CASE_MATHFN (BUILT_IN_ASIN)
1832 CASE_MATHFN (BUILT_IN_ASINH)
1833 CASE_MATHFN (BUILT_IN_ATAN)
1834 CASE_MATHFN (BUILT_IN_ATAN2)
1835 CASE_MATHFN (BUILT_IN_ATANH)
1836 CASE_MATHFN (BUILT_IN_CBRT)
1837 CASE_MATHFN (BUILT_IN_CEIL)
1838 CASE_MATHFN (BUILT_IN_CEXPI)
1839 CASE_MATHFN (BUILT_IN_COPYSIGN)
1840 CASE_MATHFN (BUILT_IN_COS)
1841 CASE_MATHFN (BUILT_IN_COSH)
1842 CASE_MATHFN (BUILT_IN_DREM)
1843 CASE_MATHFN (BUILT_IN_ERF)
1844 CASE_MATHFN (BUILT_IN_ERFC)
1845 CASE_MATHFN (BUILT_IN_EXP)
1846 CASE_MATHFN (BUILT_IN_EXP10)
1847 CASE_MATHFN (BUILT_IN_EXP2)
1848 CASE_MATHFN (BUILT_IN_EXPM1)
1849 CASE_MATHFN (BUILT_IN_FABS)
1850 CASE_MATHFN (BUILT_IN_FDIM)
1851 CASE_MATHFN (BUILT_IN_FLOOR)
1852 CASE_MATHFN (BUILT_IN_FMA)
1853 CASE_MATHFN (BUILT_IN_FMAX)
1854 CASE_MATHFN (BUILT_IN_FMIN)
1855 CASE_MATHFN (BUILT_IN_FMOD)
1856 CASE_MATHFN (BUILT_IN_FREXP)
1857 CASE_MATHFN (BUILT_IN_GAMMA)
1858 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1859 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1860 CASE_MATHFN (BUILT_IN_HYPOT)
1861 CASE_MATHFN (BUILT_IN_ILOGB)
1862 CASE_MATHFN (BUILT_IN_ICEIL)
1863 CASE_MATHFN (BUILT_IN_IFLOOR)
1864 CASE_MATHFN (BUILT_IN_INF)
1865 CASE_MATHFN (BUILT_IN_IRINT)
1866 CASE_MATHFN (BUILT_IN_IROUND)
1867 CASE_MATHFN (BUILT_IN_ISINF)
1868 CASE_MATHFN (BUILT_IN_J0)
1869 CASE_MATHFN (BUILT_IN_J1)
1870 CASE_MATHFN (BUILT_IN_JN)
1871 CASE_MATHFN (BUILT_IN_LCEIL)
1872 CASE_MATHFN (BUILT_IN_LDEXP)
1873 CASE_MATHFN (BUILT_IN_LFLOOR)
1874 CASE_MATHFN (BUILT_IN_LGAMMA)
1875 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1876 CASE_MATHFN (BUILT_IN_LLCEIL)
1877 CASE_MATHFN (BUILT_IN_LLFLOOR)
1878 CASE_MATHFN (BUILT_IN_LLRINT)
1879 CASE_MATHFN (BUILT_IN_LLROUND)
1880 CASE_MATHFN (BUILT_IN_LOG)
1881 CASE_MATHFN (BUILT_IN_LOG10)
1882 CASE_MATHFN (BUILT_IN_LOG1P)
1883 CASE_MATHFN (BUILT_IN_LOG2)
1884 CASE_MATHFN (BUILT_IN_LOGB)
1885 CASE_MATHFN (BUILT_IN_LRINT)
1886 CASE_MATHFN (BUILT_IN_LROUND)
1887 CASE_MATHFN (BUILT_IN_MODF)
1888 CASE_MATHFN (BUILT_IN_NAN)
1889 CASE_MATHFN (BUILT_IN_NANS)
1890 CASE_MATHFN (BUILT_IN_NEARBYINT)
1891 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1892 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1893 CASE_MATHFN (BUILT_IN_POW)
1894 CASE_MATHFN (BUILT_IN_POWI)
1895 CASE_MATHFN (BUILT_IN_POW10)
1896 CASE_MATHFN (BUILT_IN_REMAINDER)
1897 CASE_MATHFN (BUILT_IN_REMQUO)
1898 CASE_MATHFN (BUILT_IN_RINT)
1899 CASE_MATHFN (BUILT_IN_ROUND)
1900 CASE_MATHFN (BUILT_IN_SCALB)
1901 CASE_MATHFN (BUILT_IN_SCALBLN)
1902 CASE_MATHFN (BUILT_IN_SCALBN)
1903 CASE_MATHFN (BUILT_IN_SIGNBIT)
1904 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1905 CASE_MATHFN (BUILT_IN_SIN)
1906 CASE_MATHFN (BUILT_IN_SINCOS)
1907 CASE_MATHFN (BUILT_IN_SINH)
1908 CASE_MATHFN (BUILT_IN_SQRT)
1909 CASE_MATHFN (BUILT_IN_TAN)
1910 CASE_MATHFN (BUILT_IN_TANH)
1911 CASE_MATHFN (BUILT_IN_TGAMMA)
1912 CASE_MATHFN (BUILT_IN_TRUNC)
1913 CASE_MATHFN (BUILT_IN_Y0)
1914 CASE_MATHFN (BUILT_IN_Y1)
1915 CASE_MATHFN (BUILT_IN_YN)
1916
1917 default:
1918 return NULL_TREE;
1919 }
1920
1921 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1922 fcode2 = fcode;
1923 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1924 fcode2 = fcodef;
1925 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1926 fcode2 = fcodel;
1927 else
1928 return NULL_TREE;
1929
1930 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1931 return NULL_TREE;
1932
1933 return builtin_decl_explicit (fcode2);
1934 }
1935
1936 /* Like mathfn_built_in_1(), but always use the implicit array. */
1937
1938 tree
1939 mathfn_built_in (tree type, enum built_in_function fn)
1940 {
1941 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1942 }
1943
1944 /* If errno must be maintained, expand the RTL to check if the result,
1945 TARGET, of a built-in function call, EXP, is NaN, and if so set
1946 errno to EDOM. */
1947
1948 static void
1949 expand_errno_check (tree exp, rtx target)
1950 {
1951 rtx lab = gen_label_rtx ();
1952
1953 /* Test the result; if it is NaN, set errno=EDOM because
1954 the argument was not in the domain. */
1955 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1956 NULL_RTX, NULL_RTX, lab,
1957 /* The jump is very likely. */
1958 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1959
1960 #ifdef TARGET_EDOM
1961 /* If this built-in doesn't throw an exception, set errno directly. */
1962 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1963 {
1964 #ifdef GEN_ERRNO_RTX
1965 rtx errno_rtx = GEN_ERRNO_RTX;
1966 #else
1967 rtx errno_rtx
1968 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1969 #endif
1970 emit_move_insn (errno_rtx,
1971 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1972 emit_label (lab);
1973 return;
1974 }
1975 #endif
1976
1977 /* Make sure the library call isn't expanded as a tail call. */
1978 CALL_EXPR_TAILCALL (exp) = 0;
1979
1980 /* We can't set errno=EDOM directly; let the library call do it.
1981 Pop the arguments right away in case the call gets deleted. */
1982 NO_DEFER_POP;
1983 expand_call (exp, target, 0);
1984 OK_DEFER_POP;
1985 emit_label (lab);
1986 }
1987
1988 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1989 Return NULL_RTX if a normal call should be emitted rather than expanding
1990 the function in-line. EXP is the expression that is a call to the builtin
1991 function; if convenient, the result should be placed in TARGET.
1992 SUBTARGET may be used as the target for computing one of EXP's operands. */
1993
1994 static rtx
1995 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1996 {
1997 optab builtin_optab;
1998 rtx op0, insns;
1999 tree fndecl = get_callee_fndecl (exp);
2000 enum machine_mode mode;
2001 bool errno_set = false;
2002 bool try_widening = false;
2003 tree arg;
2004
2005 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2006 return NULL_RTX;
2007
2008 arg = CALL_EXPR_ARG (exp, 0);
2009
2010 switch (DECL_FUNCTION_CODE (fndecl))
2011 {
2012 CASE_FLT_FN (BUILT_IN_SQRT):
2013 errno_set = ! tree_expr_nonnegative_p (arg);
2014 try_widening = true;
2015 builtin_optab = sqrt_optab;
2016 break;
2017 CASE_FLT_FN (BUILT_IN_EXP):
2018 errno_set = true; builtin_optab = exp_optab; break;
2019 CASE_FLT_FN (BUILT_IN_EXP10):
2020 CASE_FLT_FN (BUILT_IN_POW10):
2021 errno_set = true; builtin_optab = exp10_optab; break;
2022 CASE_FLT_FN (BUILT_IN_EXP2):
2023 errno_set = true; builtin_optab = exp2_optab; break;
2024 CASE_FLT_FN (BUILT_IN_EXPM1):
2025 errno_set = true; builtin_optab = expm1_optab; break;
2026 CASE_FLT_FN (BUILT_IN_LOGB):
2027 errno_set = true; builtin_optab = logb_optab; break;
2028 CASE_FLT_FN (BUILT_IN_LOG):
2029 errno_set = true; builtin_optab = log_optab; break;
2030 CASE_FLT_FN (BUILT_IN_LOG10):
2031 errno_set = true; builtin_optab = log10_optab; break;
2032 CASE_FLT_FN (BUILT_IN_LOG2):
2033 errno_set = true; builtin_optab = log2_optab; break;
2034 CASE_FLT_FN (BUILT_IN_LOG1P):
2035 errno_set = true; builtin_optab = log1p_optab; break;
2036 CASE_FLT_FN (BUILT_IN_ASIN):
2037 builtin_optab = asin_optab; break;
2038 CASE_FLT_FN (BUILT_IN_ACOS):
2039 builtin_optab = acos_optab; break;
2040 CASE_FLT_FN (BUILT_IN_TAN):
2041 builtin_optab = tan_optab; break;
2042 CASE_FLT_FN (BUILT_IN_ATAN):
2043 builtin_optab = atan_optab; break;
2044 CASE_FLT_FN (BUILT_IN_FLOOR):
2045 builtin_optab = floor_optab; break;
2046 CASE_FLT_FN (BUILT_IN_CEIL):
2047 builtin_optab = ceil_optab; break;
2048 CASE_FLT_FN (BUILT_IN_TRUNC):
2049 builtin_optab = btrunc_optab; break;
2050 CASE_FLT_FN (BUILT_IN_ROUND):
2051 builtin_optab = round_optab; break;
2052 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2053 builtin_optab = nearbyint_optab;
2054 if (flag_trapping_math)
2055 break;
2056 /* Else fallthrough and expand as rint. */
2057 CASE_FLT_FN (BUILT_IN_RINT):
2058 builtin_optab = rint_optab; break;
2059 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2060 builtin_optab = significand_optab; break;
2061 default:
2062 gcc_unreachable ();
2063 }
2064
2065 /* Make a suitable register to place result in. */
2066 mode = TYPE_MODE (TREE_TYPE (exp));
2067
2068 if (! flag_errno_math || ! HONOR_NANS (mode))
2069 errno_set = false;
2070
2071 /* Before working hard, check whether the instruction is available, but try
2072 to widen the mode for specific operations. */
2073 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2074 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2075 && (!errno_set || !optimize_insn_for_size_p ()))
2076 {
2077 rtx result = gen_reg_rtx (mode);
2078
2079 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2080 need to expand the argument again. This way, we will not perform
2081 side-effects more the once. */
2082 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2083
2084 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2085
2086 start_sequence ();
2087
2088 /* Compute into RESULT.
2089 Set RESULT to wherever the result comes back. */
2090 result = expand_unop (mode, builtin_optab, op0, result, 0);
2091
2092 if (result != 0)
2093 {
2094 if (errno_set)
2095 expand_errno_check (exp, result);
2096
2097 /* Output the entire sequence. */
2098 insns = get_insns ();
2099 end_sequence ();
2100 emit_insn (insns);
2101 return result;
2102 }
2103
2104 /* If we were unable to expand via the builtin, stop the sequence
2105 (without outputting the insns) and call to the library function
2106 with the stabilized argument list. */
2107 end_sequence ();
2108 }
2109
2110 return expand_call (exp, target, target == const0_rtx);
2111 }
2112
2113 /* Expand a call to the builtin binary math functions (pow and atan2).
2114 Return NULL_RTX if a normal call should be emitted rather than expanding the
2115 function in-line. EXP is the expression that is a call to the builtin
2116 function; if convenient, the result should be placed in TARGET.
2117 SUBTARGET may be used as the target for computing one of EXP's
2118 operands. */
2119
2120 static rtx
2121 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2122 {
2123 optab builtin_optab;
2124 rtx op0, op1, insns, result;
2125 int op1_type = REAL_TYPE;
2126 tree fndecl = get_callee_fndecl (exp);
2127 tree arg0, arg1;
2128 enum machine_mode mode;
2129 bool errno_set = true;
2130
2131 switch (DECL_FUNCTION_CODE (fndecl))
2132 {
2133 CASE_FLT_FN (BUILT_IN_SCALBN):
2134 CASE_FLT_FN (BUILT_IN_SCALBLN):
2135 CASE_FLT_FN (BUILT_IN_LDEXP):
2136 op1_type = INTEGER_TYPE;
2137 default:
2138 break;
2139 }
2140
2141 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2142 return NULL_RTX;
2143
2144 arg0 = CALL_EXPR_ARG (exp, 0);
2145 arg1 = CALL_EXPR_ARG (exp, 1);
2146
2147 switch (DECL_FUNCTION_CODE (fndecl))
2148 {
2149 CASE_FLT_FN (BUILT_IN_POW):
2150 builtin_optab = pow_optab; break;
2151 CASE_FLT_FN (BUILT_IN_ATAN2):
2152 builtin_optab = atan2_optab; break;
2153 CASE_FLT_FN (BUILT_IN_SCALB):
2154 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2155 return 0;
2156 builtin_optab = scalb_optab; break;
2157 CASE_FLT_FN (BUILT_IN_SCALBN):
2158 CASE_FLT_FN (BUILT_IN_SCALBLN):
2159 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2160 return 0;
2161 /* Fall through... */
2162 CASE_FLT_FN (BUILT_IN_LDEXP):
2163 builtin_optab = ldexp_optab; break;
2164 CASE_FLT_FN (BUILT_IN_FMOD):
2165 builtin_optab = fmod_optab; break;
2166 CASE_FLT_FN (BUILT_IN_REMAINDER):
2167 CASE_FLT_FN (BUILT_IN_DREM):
2168 builtin_optab = remainder_optab; break;
2169 default:
2170 gcc_unreachable ();
2171 }
2172
2173 /* Make a suitable register to place result in. */
2174 mode = TYPE_MODE (TREE_TYPE (exp));
2175
2176 /* Before working hard, check whether the instruction is available. */
2177 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2178 return NULL_RTX;
2179
2180 result = gen_reg_rtx (mode);
2181
2182 if (! flag_errno_math || ! HONOR_NANS (mode))
2183 errno_set = false;
2184
2185 if (errno_set && optimize_insn_for_size_p ())
2186 return 0;
2187
2188 /* Always stabilize the argument list. */
2189 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2190 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2191
2192 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2193 op1 = expand_normal (arg1);
2194
2195 start_sequence ();
2196
2197 /* Compute into RESULT.
2198 Set RESULT to wherever the result comes back. */
2199 result = expand_binop (mode, builtin_optab, op0, op1,
2200 result, 0, OPTAB_DIRECT);
2201
2202 /* If we were unable to expand via the builtin, stop the sequence
2203 (without outputting the insns) and call to the library function
2204 with the stabilized argument list. */
2205 if (result == 0)
2206 {
2207 end_sequence ();
2208 return expand_call (exp, target, target == const0_rtx);
2209 }
2210
2211 if (errno_set)
2212 expand_errno_check (exp, result);
2213
2214 /* Output the entire sequence. */
2215 insns = get_insns ();
2216 end_sequence ();
2217 emit_insn (insns);
2218
2219 return result;
2220 }
2221
2222 /* Expand a call to the builtin trinary math functions (fma).
2223 Return NULL_RTX if a normal call should be emitted rather than expanding the
2224 function in-line. EXP is the expression that is a call to the builtin
2225 function; if convenient, the result should be placed in TARGET.
2226 SUBTARGET may be used as the target for computing one of EXP's
2227 operands. */
2228
2229 static rtx
2230 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2231 {
2232 optab builtin_optab;
2233 rtx op0, op1, op2, insns, result;
2234 tree fndecl = get_callee_fndecl (exp);
2235 tree arg0, arg1, arg2;
2236 enum machine_mode mode;
2237
2238 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2239 return NULL_RTX;
2240
2241 arg0 = CALL_EXPR_ARG (exp, 0);
2242 arg1 = CALL_EXPR_ARG (exp, 1);
2243 arg2 = CALL_EXPR_ARG (exp, 2);
2244
2245 switch (DECL_FUNCTION_CODE (fndecl))
2246 {
2247 CASE_FLT_FN (BUILT_IN_FMA):
2248 builtin_optab = fma_optab; break;
2249 default:
2250 gcc_unreachable ();
2251 }
2252
2253 /* Make a suitable register to place result in. */
2254 mode = TYPE_MODE (TREE_TYPE (exp));
2255
2256 /* Before working hard, check whether the instruction is available. */
2257 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2258 return NULL_RTX;
2259
2260 result = gen_reg_rtx (mode);
2261
2262 /* Always stabilize the argument list. */
2263 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2264 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2265 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2266
2267 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2268 op1 = expand_normal (arg1);
2269 op2 = expand_normal (arg2);
2270
2271 start_sequence ();
2272
2273 /* Compute into RESULT.
2274 Set RESULT to wherever the result comes back. */
2275 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2276 result, 0);
2277
2278 /* If we were unable to expand via the builtin, stop the sequence
2279 (without outputting the insns) and call to the library function
2280 with the stabilized argument list. */
2281 if (result == 0)
2282 {
2283 end_sequence ();
2284 return expand_call (exp, target, target == const0_rtx);
2285 }
2286
2287 /* Output the entire sequence. */
2288 insns = get_insns ();
2289 end_sequence ();
2290 emit_insn (insns);
2291
2292 return result;
2293 }
2294
2295 /* Expand a call to the builtin sin and cos math functions.
2296 Return NULL_RTX if a normal call should be emitted rather than expanding the
2297 function in-line. EXP is the expression that is a call to the builtin
2298 function; if convenient, the result should be placed in TARGET.
2299 SUBTARGET may be used as the target for computing one of EXP's
2300 operands. */
2301
2302 static rtx
2303 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2304 {
2305 optab builtin_optab;
2306 rtx op0, insns;
2307 tree fndecl = get_callee_fndecl (exp);
2308 enum machine_mode mode;
2309 tree arg;
2310
2311 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2312 return NULL_RTX;
2313
2314 arg = CALL_EXPR_ARG (exp, 0);
2315
2316 switch (DECL_FUNCTION_CODE (fndecl))
2317 {
2318 CASE_FLT_FN (BUILT_IN_SIN):
2319 CASE_FLT_FN (BUILT_IN_COS):
2320 builtin_optab = sincos_optab; break;
2321 default:
2322 gcc_unreachable ();
2323 }
2324
2325 /* Make a suitable register to place result in. */
2326 mode = TYPE_MODE (TREE_TYPE (exp));
2327
2328 /* Check if sincos insn is available, otherwise fallback
2329 to sin or cos insn. */
2330 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2331 switch (DECL_FUNCTION_CODE (fndecl))
2332 {
2333 CASE_FLT_FN (BUILT_IN_SIN):
2334 builtin_optab = sin_optab; break;
2335 CASE_FLT_FN (BUILT_IN_COS):
2336 builtin_optab = cos_optab; break;
2337 default:
2338 gcc_unreachable ();
2339 }
2340
2341 /* Before working hard, check whether the instruction is available. */
2342 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2343 {
2344 rtx result = gen_reg_rtx (mode);
2345
2346 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2347 need to expand the argument again. This way, we will not perform
2348 side-effects more the once. */
2349 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2350
2351 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2352
2353 start_sequence ();
2354
2355 /* Compute into RESULT.
2356 Set RESULT to wherever the result comes back. */
2357 if (builtin_optab == sincos_optab)
2358 {
2359 int ok;
2360
2361 switch (DECL_FUNCTION_CODE (fndecl))
2362 {
2363 CASE_FLT_FN (BUILT_IN_SIN):
2364 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2365 break;
2366 CASE_FLT_FN (BUILT_IN_COS):
2367 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2368 break;
2369 default:
2370 gcc_unreachable ();
2371 }
2372 gcc_assert (ok);
2373 }
2374 else
2375 result = expand_unop (mode, builtin_optab, op0, result, 0);
2376
2377 if (result != 0)
2378 {
2379 /* Output the entire sequence. */
2380 insns = get_insns ();
2381 end_sequence ();
2382 emit_insn (insns);
2383 return result;
2384 }
2385
2386 /* If we were unable to expand via the builtin, stop the sequence
2387 (without outputting the insns) and call to the library function
2388 with the stabilized argument list. */
2389 end_sequence ();
2390 }
2391
2392 return expand_call (exp, target, target == const0_rtx);
2393 }
2394
2395 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2396 return an RTL instruction code that implements the functionality.
2397 If that isn't possible or available return CODE_FOR_nothing. */
2398
2399 static enum insn_code
2400 interclass_mathfn_icode (tree arg, tree fndecl)
2401 {
2402 bool errno_set = false;
2403 optab builtin_optab = unknown_optab;
2404 enum machine_mode mode;
2405
2406 switch (DECL_FUNCTION_CODE (fndecl))
2407 {
2408 CASE_FLT_FN (BUILT_IN_ILOGB):
2409 errno_set = true; builtin_optab = ilogb_optab; break;
2410 CASE_FLT_FN (BUILT_IN_ISINF):
2411 builtin_optab = isinf_optab; break;
2412 case BUILT_IN_ISNORMAL:
2413 case BUILT_IN_ISFINITE:
2414 CASE_FLT_FN (BUILT_IN_FINITE):
2415 case BUILT_IN_FINITED32:
2416 case BUILT_IN_FINITED64:
2417 case BUILT_IN_FINITED128:
2418 case BUILT_IN_ISINFD32:
2419 case BUILT_IN_ISINFD64:
2420 case BUILT_IN_ISINFD128:
2421 /* These builtins have no optabs (yet). */
2422 break;
2423 default:
2424 gcc_unreachable ();
2425 }
2426
2427 /* There's no easy way to detect the case we need to set EDOM. */
2428 if (flag_errno_math && errno_set)
2429 return CODE_FOR_nothing;
2430
2431 /* Optab mode depends on the mode of the input argument. */
2432 mode = TYPE_MODE (TREE_TYPE (arg));
2433
2434 if (builtin_optab)
2435 return optab_handler (builtin_optab, mode);
2436 return CODE_FOR_nothing;
2437 }
2438
2439 /* Expand a call to one of the builtin math functions that operate on
2440 floating point argument and output an integer result (ilogb, isinf,
2441 isnan, etc).
2442 Return 0 if a normal call should be emitted rather than expanding the
2443 function in-line. EXP is the expression that is a call to the builtin
2444 function; if convenient, the result should be placed in TARGET. */
2445
2446 static rtx
2447 expand_builtin_interclass_mathfn (tree exp, rtx target)
2448 {
2449 enum insn_code icode = CODE_FOR_nothing;
2450 rtx op0;
2451 tree fndecl = get_callee_fndecl (exp);
2452 enum machine_mode mode;
2453 tree arg;
2454
2455 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2456 return NULL_RTX;
2457
2458 arg = CALL_EXPR_ARG (exp, 0);
2459 icode = interclass_mathfn_icode (arg, fndecl);
2460 mode = TYPE_MODE (TREE_TYPE (arg));
2461
2462 if (icode != CODE_FOR_nothing)
2463 {
2464 struct expand_operand ops[1];
2465 rtx last = get_last_insn ();
2466 tree orig_arg = arg;
2467
2468 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2469 need to expand the argument again. This way, we will not perform
2470 side-effects more the once. */
2471 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2472
2473 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2474
2475 if (mode != GET_MODE (op0))
2476 op0 = convert_to_mode (mode, op0, 0);
2477
2478 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2479 if (maybe_legitimize_operands (icode, 0, 1, ops)
2480 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2481 return ops[0].value;
2482
2483 delete_insns_since (last);
2484 CALL_EXPR_ARG (exp, 0) = orig_arg;
2485 }
2486
2487 return NULL_RTX;
2488 }
2489
2490 /* Expand a call to the builtin sincos math function.
2491 Return NULL_RTX if a normal call should be emitted rather than expanding the
2492 function in-line. EXP is the expression that is a call to the builtin
2493 function. */
2494
2495 static rtx
2496 expand_builtin_sincos (tree exp)
2497 {
2498 rtx op0, op1, op2, target1, target2;
2499 enum machine_mode mode;
2500 tree arg, sinp, cosp;
2501 int result;
2502 location_t loc = EXPR_LOCATION (exp);
2503 tree alias_type, alias_off;
2504
2505 if (!validate_arglist (exp, REAL_TYPE,
2506 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2507 return NULL_RTX;
2508
2509 arg = CALL_EXPR_ARG (exp, 0);
2510 sinp = CALL_EXPR_ARG (exp, 1);
2511 cosp = CALL_EXPR_ARG (exp, 2);
2512
2513 /* Make a suitable register to place result in. */
2514 mode = TYPE_MODE (TREE_TYPE (arg));
2515
2516 /* Check if sincos insn is available, otherwise emit the call. */
2517 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2518 return NULL_RTX;
2519
2520 target1 = gen_reg_rtx (mode);
2521 target2 = gen_reg_rtx (mode);
2522
2523 op0 = expand_normal (arg);
2524 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2525 alias_off = build_int_cst (alias_type, 0);
2526 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2527 sinp, alias_off));
2528 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2529 cosp, alias_off));
2530
2531 /* Compute into target1 and target2.
2532 Set TARGET to wherever the result comes back. */
2533 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2534 gcc_assert (result);
2535
2536 /* Move target1 and target2 to the memory locations indicated
2537 by op1 and op2. */
2538 emit_move_insn (op1, target1);
2539 emit_move_insn (op2, target2);
2540
2541 return const0_rtx;
2542 }
2543
2544 /* Expand a call to the internal cexpi builtin to the sincos math function.
2545 EXP is the expression that is a call to the builtin function; if convenient,
2546 the result should be placed in TARGET. */
2547
2548 static rtx
2549 expand_builtin_cexpi (tree exp, rtx target)
2550 {
2551 tree fndecl = get_callee_fndecl (exp);
2552 tree arg, type;
2553 enum machine_mode mode;
2554 rtx op0, op1, op2;
2555 location_t loc = EXPR_LOCATION (exp);
2556
2557 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2558 return NULL_RTX;
2559
2560 arg = CALL_EXPR_ARG (exp, 0);
2561 type = TREE_TYPE (arg);
2562 mode = TYPE_MODE (TREE_TYPE (arg));
2563
2564 /* Try expanding via a sincos optab, fall back to emitting a libcall
2565 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2566 is only generated from sincos, cexp or if we have either of them. */
2567 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2568 {
2569 op1 = gen_reg_rtx (mode);
2570 op2 = gen_reg_rtx (mode);
2571
2572 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2573
2574 /* Compute into op1 and op2. */
2575 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2576 }
2577 else if (targetm.libc_has_function (function_sincos))
2578 {
2579 tree call, fn = NULL_TREE;
2580 tree top1, top2;
2581 rtx op1a, op2a;
2582
2583 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2584 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2585 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2586 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2587 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2588 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2589 else
2590 gcc_unreachable ();
2591
2592 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2593 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2594 op1a = copy_addr_to_reg (XEXP (op1, 0));
2595 op2a = copy_addr_to_reg (XEXP (op2, 0));
2596 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2597 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2598
2599 /* Make sure not to fold the sincos call again. */
2600 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2601 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2602 call, 3, arg, top1, top2));
2603 }
2604 else
2605 {
2606 tree call, fn = NULL_TREE, narg;
2607 tree ctype = build_complex_type (type);
2608
2609 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2610 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2611 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2612 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2613 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2614 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2615 else
2616 gcc_unreachable ();
2617
2618 /* If we don't have a decl for cexp create one. This is the
2619 friendliest fallback if the user calls __builtin_cexpi
2620 without full target C99 function support. */
2621 if (fn == NULL_TREE)
2622 {
2623 tree fntype;
2624 const char *name = NULL;
2625
2626 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2627 name = "cexpf";
2628 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2629 name = "cexp";
2630 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2631 name = "cexpl";
2632
2633 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2634 fn = build_fn_decl (name, fntype);
2635 }
2636
2637 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2638 build_real (type, dconst0), arg);
2639
2640 /* Make sure not to fold the cexp call again. */
2641 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2642 return expand_expr (build_call_nary (ctype, call, 1, narg),
2643 target, VOIDmode, EXPAND_NORMAL);
2644 }
2645
2646 /* Now build the proper return type. */
2647 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2648 make_tree (TREE_TYPE (arg), op2),
2649 make_tree (TREE_TYPE (arg), op1)),
2650 target, VOIDmode, EXPAND_NORMAL);
2651 }
2652
2653 /* Conveniently construct a function call expression. FNDECL names the
2654 function to be called, N is the number of arguments, and the "..."
2655 parameters are the argument expressions. Unlike build_call_exr
2656 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2657
2658 static tree
2659 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2660 {
2661 va_list ap;
2662 tree fntype = TREE_TYPE (fndecl);
2663 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2664
2665 va_start (ap, n);
2666 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2667 va_end (ap);
2668 SET_EXPR_LOCATION (fn, loc);
2669 return fn;
2670 }
2671
2672 /* Expand a call to one of the builtin rounding functions gcc defines
2673 as an extension (lfloor and lceil). As these are gcc extensions we
2674 do not need to worry about setting errno to EDOM.
2675 If expanding via optab fails, lower expression to (int)(floor(x)).
2676 EXP is the expression that is a call to the builtin function;
2677 if convenient, the result should be placed in TARGET. */
2678
2679 static rtx
2680 expand_builtin_int_roundingfn (tree exp, rtx target)
2681 {
2682 convert_optab builtin_optab;
2683 rtx op0, insns, tmp;
2684 tree fndecl = get_callee_fndecl (exp);
2685 enum built_in_function fallback_fn;
2686 tree fallback_fndecl;
2687 enum machine_mode mode;
2688 tree arg;
2689
2690 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2691 gcc_unreachable ();
2692
2693 arg = CALL_EXPR_ARG (exp, 0);
2694
2695 switch (DECL_FUNCTION_CODE (fndecl))
2696 {
2697 CASE_FLT_FN (BUILT_IN_ICEIL):
2698 CASE_FLT_FN (BUILT_IN_LCEIL):
2699 CASE_FLT_FN (BUILT_IN_LLCEIL):
2700 builtin_optab = lceil_optab;
2701 fallback_fn = BUILT_IN_CEIL;
2702 break;
2703
2704 CASE_FLT_FN (BUILT_IN_IFLOOR):
2705 CASE_FLT_FN (BUILT_IN_LFLOOR):
2706 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2707 builtin_optab = lfloor_optab;
2708 fallback_fn = BUILT_IN_FLOOR;
2709 break;
2710
2711 default:
2712 gcc_unreachable ();
2713 }
2714
2715 /* Make a suitable register to place result in. */
2716 mode = TYPE_MODE (TREE_TYPE (exp));
2717
2718 target = gen_reg_rtx (mode);
2719
2720 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2721 need to expand the argument again. This way, we will not perform
2722 side-effects more the once. */
2723 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2724
2725 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2726
2727 start_sequence ();
2728
2729 /* Compute into TARGET. */
2730 if (expand_sfix_optab (target, op0, builtin_optab))
2731 {
2732 /* Output the entire sequence. */
2733 insns = get_insns ();
2734 end_sequence ();
2735 emit_insn (insns);
2736 return target;
2737 }
2738
2739 /* If we were unable to expand via the builtin, stop the sequence
2740 (without outputting the insns). */
2741 end_sequence ();
2742
2743 /* Fall back to floating point rounding optab. */
2744 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2745
2746 /* For non-C99 targets we may end up without a fallback fndecl here
2747 if the user called __builtin_lfloor directly. In this case emit
2748 a call to the floor/ceil variants nevertheless. This should result
2749 in the best user experience for not full C99 targets. */
2750 if (fallback_fndecl == NULL_TREE)
2751 {
2752 tree fntype;
2753 const char *name = NULL;
2754
2755 switch (DECL_FUNCTION_CODE (fndecl))
2756 {
2757 case BUILT_IN_ICEIL:
2758 case BUILT_IN_LCEIL:
2759 case BUILT_IN_LLCEIL:
2760 name = "ceil";
2761 break;
2762 case BUILT_IN_ICEILF:
2763 case BUILT_IN_LCEILF:
2764 case BUILT_IN_LLCEILF:
2765 name = "ceilf";
2766 break;
2767 case BUILT_IN_ICEILL:
2768 case BUILT_IN_LCEILL:
2769 case BUILT_IN_LLCEILL:
2770 name = "ceill";
2771 break;
2772 case BUILT_IN_IFLOOR:
2773 case BUILT_IN_LFLOOR:
2774 case BUILT_IN_LLFLOOR:
2775 name = "floor";
2776 break;
2777 case BUILT_IN_IFLOORF:
2778 case BUILT_IN_LFLOORF:
2779 case BUILT_IN_LLFLOORF:
2780 name = "floorf";
2781 break;
2782 case BUILT_IN_IFLOORL:
2783 case BUILT_IN_LFLOORL:
2784 case BUILT_IN_LLFLOORL:
2785 name = "floorl";
2786 break;
2787 default:
2788 gcc_unreachable ();
2789 }
2790
2791 fntype = build_function_type_list (TREE_TYPE (arg),
2792 TREE_TYPE (arg), NULL_TREE);
2793 fallback_fndecl = build_fn_decl (name, fntype);
2794 }
2795
2796 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2797
2798 tmp = expand_normal (exp);
2799 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2800
2801 /* Truncate the result of floating point optab to integer
2802 via expand_fix (). */
2803 target = gen_reg_rtx (mode);
2804 expand_fix (target, tmp, 0);
2805
2806 return target;
2807 }
2808
2809 /* Expand a call to one of the builtin math functions doing integer
2810 conversion (lrint).
2811 Return 0 if a normal call should be emitted rather than expanding the
2812 function in-line. EXP is the expression that is a call to the builtin
2813 function; if convenient, the result should be placed in TARGET. */
2814
2815 static rtx
2816 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2817 {
2818 convert_optab builtin_optab;
2819 rtx op0, insns;
2820 tree fndecl = get_callee_fndecl (exp);
2821 tree arg;
2822 enum machine_mode mode;
2823 enum built_in_function fallback_fn = BUILT_IN_NONE;
2824
2825 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2826 gcc_unreachable ();
2827
2828 arg = CALL_EXPR_ARG (exp, 0);
2829
2830 switch (DECL_FUNCTION_CODE (fndecl))
2831 {
2832 CASE_FLT_FN (BUILT_IN_IRINT):
2833 fallback_fn = BUILT_IN_LRINT;
2834 /* FALLTHRU */
2835 CASE_FLT_FN (BUILT_IN_LRINT):
2836 CASE_FLT_FN (BUILT_IN_LLRINT):
2837 builtin_optab = lrint_optab;
2838 break;
2839
2840 CASE_FLT_FN (BUILT_IN_IROUND):
2841 fallback_fn = BUILT_IN_LROUND;
2842 /* FALLTHRU */
2843 CASE_FLT_FN (BUILT_IN_LROUND):
2844 CASE_FLT_FN (BUILT_IN_LLROUND):
2845 builtin_optab = lround_optab;
2846 break;
2847
2848 default:
2849 gcc_unreachable ();
2850 }
2851
2852 /* There's no easy way to detect the case we need to set EDOM. */
2853 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2854 return NULL_RTX;
2855
2856 /* Make a suitable register to place result in. */
2857 mode = TYPE_MODE (TREE_TYPE (exp));
2858
2859 /* There's no easy way to detect the case we need to set EDOM. */
2860 if (!flag_errno_math)
2861 {
2862 rtx result = gen_reg_rtx (mode);
2863
2864 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2865 need to expand the argument again. This way, we will not perform
2866 side-effects more the once. */
2867 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2868
2869 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2870
2871 start_sequence ();
2872
2873 if (expand_sfix_optab (result, op0, builtin_optab))
2874 {
2875 /* Output the entire sequence. */
2876 insns = get_insns ();
2877 end_sequence ();
2878 emit_insn (insns);
2879 return result;
2880 }
2881
2882 /* If we were unable to expand via the builtin, stop the sequence
2883 (without outputting the insns) and call to the library function
2884 with the stabilized argument list. */
2885 end_sequence ();
2886 }
2887
2888 if (fallback_fn != BUILT_IN_NONE)
2889 {
2890 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2891 targets, (int) round (x) should never be transformed into
2892 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2893 a call to lround in the hope that the target provides at least some
2894 C99 functions. This should result in the best user experience for
2895 not full C99 targets. */
2896 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2897 fallback_fn, 0);
2898
2899 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2900 fallback_fndecl, 1, arg);
2901
2902 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2903 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2904 return convert_to_mode (mode, target, 0);
2905 }
2906
2907 return expand_call (exp, target, target == const0_rtx);
2908 }
2909
2910 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2911 a normal call should be emitted rather than expanding the function
2912 in-line. EXP is the expression that is a call to the builtin
2913 function; if convenient, the result should be placed in TARGET. */
2914
2915 static rtx
2916 expand_builtin_powi (tree exp, rtx target)
2917 {
2918 tree arg0, arg1;
2919 rtx op0, op1;
2920 enum machine_mode mode;
2921 enum machine_mode mode2;
2922
2923 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2924 return NULL_RTX;
2925
2926 arg0 = CALL_EXPR_ARG (exp, 0);
2927 arg1 = CALL_EXPR_ARG (exp, 1);
2928 mode = TYPE_MODE (TREE_TYPE (exp));
2929
2930 /* Emit a libcall to libgcc. */
2931
2932 /* Mode of the 2nd argument must match that of an int. */
2933 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2934
2935 if (target == NULL_RTX)
2936 target = gen_reg_rtx (mode);
2937
2938 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2939 if (GET_MODE (op0) != mode)
2940 op0 = convert_to_mode (mode, op0, 0);
2941 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2942 if (GET_MODE (op1) != mode2)
2943 op1 = convert_to_mode (mode2, op1, 0);
2944
2945 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2946 target, LCT_CONST, mode, 2,
2947 op0, mode, op1, mode2);
2948
2949 return target;
2950 }
2951
2952 /* Expand expression EXP which is a call to the strlen builtin. Return
2953 NULL_RTX if we failed the caller should emit a normal call, otherwise
2954 try to get the result in TARGET, if convenient. */
2955
2956 static rtx
2957 expand_builtin_strlen (tree exp, rtx target,
2958 enum machine_mode target_mode)
2959 {
2960 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2961 return NULL_RTX;
2962 else
2963 {
2964 struct expand_operand ops[4];
2965 rtx pat;
2966 tree len;
2967 tree src = CALL_EXPR_ARG (exp, 0);
2968 rtx src_reg, before_strlen;
2969 enum machine_mode insn_mode = target_mode;
2970 enum insn_code icode = CODE_FOR_nothing;
2971 unsigned int align;
2972
2973 /* If the length can be computed at compile-time, return it. */
2974 len = c_strlen (src, 0);
2975 if (len)
2976 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2977
2978 /* If the length can be computed at compile-time and is constant
2979 integer, but there are side-effects in src, evaluate
2980 src for side-effects, then return len.
2981 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2982 can be optimized into: i++; x = 3; */
2983 len = c_strlen (src, 1);
2984 if (len && TREE_CODE (len) == INTEGER_CST)
2985 {
2986 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2987 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2988 }
2989
2990 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2991
2992 /* If SRC is not a pointer type, don't do this operation inline. */
2993 if (align == 0)
2994 return NULL_RTX;
2995
2996 /* Bail out if we can't compute strlen in the right mode. */
2997 while (insn_mode != VOIDmode)
2998 {
2999 icode = optab_handler (strlen_optab, insn_mode);
3000 if (icode != CODE_FOR_nothing)
3001 break;
3002
3003 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3004 }
3005 if (insn_mode == VOIDmode)
3006 return NULL_RTX;
3007
3008 /* Make a place to hold the source address. We will not expand
3009 the actual source until we are sure that the expansion will
3010 not fail -- there are trees that cannot be expanded twice. */
3011 src_reg = gen_reg_rtx (Pmode);
3012
3013 /* Mark the beginning of the strlen sequence so we can emit the
3014 source operand later. */
3015 before_strlen = get_last_insn ();
3016
3017 create_output_operand (&ops[0], target, insn_mode);
3018 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3019 create_integer_operand (&ops[2], 0);
3020 create_integer_operand (&ops[3], align);
3021 if (!maybe_expand_insn (icode, 4, ops))
3022 return NULL_RTX;
3023
3024 /* Now that we are assured of success, expand the source. */
3025 start_sequence ();
3026 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3027 if (pat != src_reg)
3028 {
3029 #ifdef POINTERS_EXTEND_UNSIGNED
3030 if (GET_MODE (pat) != Pmode)
3031 pat = convert_to_mode (Pmode, pat,
3032 POINTERS_EXTEND_UNSIGNED);
3033 #endif
3034 emit_move_insn (src_reg, pat);
3035 }
3036 pat = get_insns ();
3037 end_sequence ();
3038
3039 if (before_strlen)
3040 emit_insn_after (pat, before_strlen);
3041 else
3042 emit_insn_before (pat, get_insns ());
3043
3044 /* Return the value in the proper mode for this function. */
3045 if (GET_MODE (ops[0].value) == target_mode)
3046 target = ops[0].value;
3047 else if (target != 0)
3048 convert_move (target, ops[0].value, 0);
3049 else
3050 target = convert_to_mode (target_mode, ops[0].value, 0);
3051
3052 return target;
3053 }
3054 }
3055
3056 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3057 bytes from constant string DATA + OFFSET and return it as target
3058 constant. */
3059
3060 static rtx
3061 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3062 enum machine_mode mode)
3063 {
3064 const char *str = (const char *) data;
3065
3066 gcc_assert (offset >= 0
3067 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3068 <= strlen (str) + 1));
3069
3070 return c_readstr (str + offset, mode);
3071 }
3072
3073 /* Expand a call EXP to the memcpy builtin.
3074 Return NULL_RTX if we failed, the caller should emit a normal call,
3075 otherwise try to get the result in TARGET, if convenient (and in
3076 mode MODE if that's convenient). */
3077
3078 static rtx
3079 expand_builtin_memcpy (tree exp, rtx target)
3080 {
3081 if (!validate_arglist (exp,
3082 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3083 return NULL_RTX;
3084 else
3085 {
3086 tree dest = CALL_EXPR_ARG (exp, 0);
3087 tree src = CALL_EXPR_ARG (exp, 1);
3088 tree len = CALL_EXPR_ARG (exp, 2);
3089 const char *src_str;
3090 unsigned int src_align = get_pointer_alignment (src);
3091 unsigned int dest_align = get_pointer_alignment (dest);
3092 rtx dest_mem, src_mem, dest_addr, len_rtx;
3093 HOST_WIDE_INT expected_size = -1;
3094 unsigned int expected_align = 0;
3095
3096 /* If DEST is not a pointer type, call the normal function. */
3097 if (dest_align == 0)
3098 return NULL_RTX;
3099
3100 /* If either SRC is not a pointer type, don't do this
3101 operation in-line. */
3102 if (src_align == 0)
3103 return NULL_RTX;
3104
3105 if (currently_expanding_gimple_stmt)
3106 stringop_block_profile (currently_expanding_gimple_stmt,
3107 &expected_align, &expected_size);
3108
3109 if (expected_align < dest_align)
3110 expected_align = dest_align;
3111 dest_mem = get_memory_rtx (dest, len);
3112 set_mem_align (dest_mem, dest_align);
3113 len_rtx = expand_normal (len);
3114 src_str = c_getstr (src);
3115
3116 /* If SRC is a string constant and block move would be done
3117 by pieces, we can avoid loading the string from memory
3118 and only stored the computed constants. */
3119 if (src_str
3120 && CONST_INT_P (len_rtx)
3121 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3122 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3123 CONST_CAST (char *, src_str),
3124 dest_align, false))
3125 {
3126 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3127 builtin_memcpy_read_str,
3128 CONST_CAST (char *, src_str),
3129 dest_align, false, 0);
3130 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3131 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3132 return dest_mem;
3133 }
3134
3135 src_mem = get_memory_rtx (src, len);
3136 set_mem_align (src_mem, src_align);
3137
3138 /* Copy word part most expediently. */
3139 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3140 CALL_EXPR_TAILCALL (exp)
3141 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3142 expected_align, expected_size);
3143
3144 if (dest_addr == 0)
3145 {
3146 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3147 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3148 }
3149 return dest_addr;
3150 }
3151 }
3152
3153 /* Expand a call EXP to the mempcpy builtin.
3154 Return NULL_RTX if we failed; the caller should emit a normal call,
3155 otherwise try to get the result in TARGET, if convenient (and in
3156 mode MODE if that's convenient). If ENDP is 0 return the
3157 destination pointer, if ENDP is 1 return the end pointer ala
3158 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3159 stpcpy. */
3160
3161 static rtx
3162 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3163 {
3164 if (!validate_arglist (exp,
3165 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3166 return NULL_RTX;
3167 else
3168 {
3169 tree dest = CALL_EXPR_ARG (exp, 0);
3170 tree src = CALL_EXPR_ARG (exp, 1);
3171 tree len = CALL_EXPR_ARG (exp, 2);
3172 return expand_builtin_mempcpy_args (dest, src, len,
3173 target, mode, /*endp=*/ 1);
3174 }
3175 }
3176
3177 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3178 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3179 so that this can also be called without constructing an actual CALL_EXPR.
3180 The other arguments and return value are the same as for
3181 expand_builtin_mempcpy. */
3182
3183 static rtx
3184 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3185 rtx target, enum machine_mode mode, int endp)
3186 {
3187 /* If return value is ignored, transform mempcpy into memcpy. */
3188 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3189 {
3190 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3191 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3192 dest, src, len);
3193 return expand_expr (result, target, mode, EXPAND_NORMAL);
3194 }
3195 else
3196 {
3197 const char *src_str;
3198 unsigned int src_align = get_pointer_alignment (src);
3199 unsigned int dest_align = get_pointer_alignment (dest);
3200 rtx dest_mem, src_mem, len_rtx;
3201
3202 /* If either SRC or DEST is not a pointer type, don't do this
3203 operation in-line. */
3204 if (dest_align == 0 || src_align == 0)
3205 return NULL_RTX;
3206
3207 /* If LEN is not constant, call the normal function. */
3208 if (! host_integerp (len, 1))
3209 return NULL_RTX;
3210
3211 len_rtx = expand_normal (len);
3212 src_str = c_getstr (src);
3213
3214 /* If SRC is a string constant and block move would be done
3215 by pieces, we can avoid loading the string from memory
3216 and only stored the computed constants. */
3217 if (src_str
3218 && CONST_INT_P (len_rtx)
3219 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3220 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3221 CONST_CAST (char *, src_str),
3222 dest_align, false))
3223 {
3224 dest_mem = get_memory_rtx (dest, len);
3225 set_mem_align (dest_mem, dest_align);
3226 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3227 builtin_memcpy_read_str,
3228 CONST_CAST (char *, src_str),
3229 dest_align, false, endp);
3230 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3231 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3232 return dest_mem;
3233 }
3234
3235 if (CONST_INT_P (len_rtx)
3236 && can_move_by_pieces (INTVAL (len_rtx),
3237 MIN (dest_align, src_align)))
3238 {
3239 dest_mem = get_memory_rtx (dest, len);
3240 set_mem_align (dest_mem, dest_align);
3241 src_mem = get_memory_rtx (src, len);
3242 set_mem_align (src_mem, src_align);
3243 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3244 MIN (dest_align, src_align), endp);
3245 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3246 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3247 return dest_mem;
3248 }
3249
3250 return NULL_RTX;
3251 }
3252 }
3253
3254 #ifndef HAVE_movstr
3255 # define HAVE_movstr 0
3256 # define CODE_FOR_movstr CODE_FOR_nothing
3257 #endif
3258
3259 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3260 we failed, the caller should emit a normal call, otherwise try to
3261 get the result in TARGET, if convenient. If ENDP is 0 return the
3262 destination pointer, if ENDP is 1 return the end pointer ala
3263 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3264 stpcpy. */
3265
3266 static rtx
3267 expand_movstr (tree dest, tree src, rtx target, int endp)
3268 {
3269 struct expand_operand ops[3];
3270 rtx dest_mem;
3271 rtx src_mem;
3272
3273 if (!HAVE_movstr)
3274 return NULL_RTX;
3275
3276 dest_mem = get_memory_rtx (dest, NULL);
3277 src_mem = get_memory_rtx (src, NULL);
3278 if (!endp)
3279 {
3280 target = force_reg (Pmode, XEXP (dest_mem, 0));
3281 dest_mem = replace_equiv_address (dest_mem, target);
3282 }
3283
3284 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3285 create_fixed_operand (&ops[1], dest_mem);
3286 create_fixed_operand (&ops[2], src_mem);
3287 expand_insn (CODE_FOR_movstr, 3, ops);
3288
3289 if (endp && target != const0_rtx)
3290 {
3291 target = ops[0].value;
3292 /* movstr is supposed to set end to the address of the NUL
3293 terminator. If the caller requested a mempcpy-like return value,
3294 adjust it. */
3295 if (endp == 1)
3296 {
3297 rtx tem = plus_constant (GET_MODE (target),
3298 gen_lowpart (GET_MODE (target), target), 1);
3299 emit_move_insn (target, force_operand (tem, NULL_RTX));
3300 }
3301 }
3302 return target;
3303 }
3304
3305 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3306 NULL_RTX if we failed the caller should emit a normal call, otherwise
3307 try to get the result in TARGET, if convenient (and in mode MODE if that's
3308 convenient). */
3309
3310 static rtx
3311 expand_builtin_strcpy (tree exp, rtx target)
3312 {
3313 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3314 {
3315 tree dest = CALL_EXPR_ARG (exp, 0);
3316 tree src = CALL_EXPR_ARG (exp, 1);
3317 return expand_builtin_strcpy_args (dest, src, target);
3318 }
3319 return NULL_RTX;
3320 }
3321
3322 /* Helper function to do the actual work for expand_builtin_strcpy. The
3323 arguments to the builtin_strcpy call DEST and SRC are broken out
3324 so that this can also be called without constructing an actual CALL_EXPR.
3325 The other arguments and return value are the same as for
3326 expand_builtin_strcpy. */
3327
3328 static rtx
3329 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3330 {
3331 return expand_movstr (dest, src, target, /*endp=*/0);
3332 }
3333
3334 /* Expand a call EXP to the stpcpy builtin.
3335 Return NULL_RTX if we failed the caller should emit a normal call,
3336 otherwise try to get the result in TARGET, if convenient (and in
3337 mode MODE if that's convenient). */
3338
3339 static rtx
3340 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3341 {
3342 tree dst, src;
3343 location_t loc = EXPR_LOCATION (exp);
3344
3345 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3346 return NULL_RTX;
3347
3348 dst = CALL_EXPR_ARG (exp, 0);
3349 src = CALL_EXPR_ARG (exp, 1);
3350
3351 /* If return value is ignored, transform stpcpy into strcpy. */
3352 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3353 {
3354 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3355 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3356 return expand_expr (result, target, mode, EXPAND_NORMAL);
3357 }
3358 else
3359 {
3360 tree len, lenp1;
3361 rtx ret;
3362
3363 /* Ensure we get an actual string whose length can be evaluated at
3364 compile-time, not an expression containing a string. This is
3365 because the latter will potentially produce pessimized code
3366 when used to produce the return value. */
3367 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3368 return expand_movstr (dst, src, target, /*endp=*/2);
3369
3370 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3371 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3372 target, mode, /*endp=*/2);
3373
3374 if (ret)
3375 return ret;
3376
3377 if (TREE_CODE (len) == INTEGER_CST)
3378 {
3379 rtx len_rtx = expand_normal (len);
3380
3381 if (CONST_INT_P (len_rtx))
3382 {
3383 ret = expand_builtin_strcpy_args (dst, src, target);
3384
3385 if (ret)
3386 {
3387 if (! target)
3388 {
3389 if (mode != VOIDmode)
3390 target = gen_reg_rtx (mode);
3391 else
3392 target = gen_reg_rtx (GET_MODE (ret));
3393 }
3394 if (GET_MODE (target) != GET_MODE (ret))
3395 ret = gen_lowpart (GET_MODE (target), ret);
3396
3397 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3398 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3399 gcc_assert (ret);
3400
3401 return target;
3402 }
3403 }
3404 }
3405
3406 return expand_movstr (dst, src, target, /*endp=*/2);
3407 }
3408 }
3409
3410 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3411 bytes from constant string DATA + OFFSET and return it as target
3412 constant. */
3413
3414 rtx
3415 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3416 enum machine_mode mode)
3417 {
3418 const char *str = (const char *) data;
3419
3420 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3421 return const0_rtx;
3422
3423 return c_readstr (str + offset, mode);
3424 }
3425
3426 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3427 NULL_RTX if we failed the caller should emit a normal call. */
3428
3429 static rtx
3430 expand_builtin_strncpy (tree exp, rtx target)
3431 {
3432 location_t loc = EXPR_LOCATION (exp);
3433
3434 if (validate_arglist (exp,
3435 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3436 {
3437 tree dest = CALL_EXPR_ARG (exp, 0);
3438 tree src = CALL_EXPR_ARG (exp, 1);
3439 tree len = CALL_EXPR_ARG (exp, 2);
3440 tree slen = c_strlen (src, 1);
3441
3442 /* We must be passed a constant len and src parameter. */
3443 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3444 return NULL_RTX;
3445
3446 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3447
3448 /* We're required to pad with trailing zeros if the requested
3449 len is greater than strlen(s2)+1. In that case try to
3450 use store_by_pieces, if it fails, punt. */
3451 if (tree_int_cst_lt (slen, len))
3452 {
3453 unsigned int dest_align = get_pointer_alignment (dest);
3454 const char *p = c_getstr (src);
3455 rtx dest_mem;
3456
3457 if (!p || dest_align == 0 || !host_integerp (len, 1)
3458 || !can_store_by_pieces (tree_low_cst (len, 1),
3459 builtin_strncpy_read_str,
3460 CONST_CAST (char *, p),
3461 dest_align, false))
3462 return NULL_RTX;
3463
3464 dest_mem = get_memory_rtx (dest, len);
3465 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3466 builtin_strncpy_read_str,
3467 CONST_CAST (char *, p), dest_align, false, 0);
3468 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3469 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3470 return dest_mem;
3471 }
3472 }
3473 return NULL_RTX;
3474 }
3475
3476 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3477 bytes from constant string DATA + OFFSET and return it as target
3478 constant. */
3479
3480 rtx
3481 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3482 enum machine_mode mode)
3483 {
3484 const char *c = (const char *) data;
3485 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3486
3487 memset (p, *c, GET_MODE_SIZE (mode));
3488
3489 return c_readstr (p, mode);
3490 }
3491
3492 /* Callback routine for store_by_pieces. Return the RTL of a register
3493 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3494 char value given in the RTL register data. For example, if mode is
3495 4 bytes wide, return the RTL for 0x01010101*data. */
3496
3497 static rtx
3498 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3499 enum machine_mode mode)
3500 {
3501 rtx target, coeff;
3502 size_t size;
3503 char *p;
3504
3505 size = GET_MODE_SIZE (mode);
3506 if (size == 1)
3507 return (rtx) data;
3508
3509 p = XALLOCAVEC (char, size);
3510 memset (p, 1, size);
3511 coeff = c_readstr (p, mode);
3512
3513 target = convert_to_mode (mode, (rtx) data, 1);
3514 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3515 return force_reg (mode, target);
3516 }
3517
3518 /* Expand expression EXP, which is a call to the memset builtin. Return
3519 NULL_RTX if we failed the caller should emit a normal call, otherwise
3520 try to get the result in TARGET, if convenient (and in mode MODE if that's
3521 convenient). */
3522
3523 static rtx
3524 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3525 {
3526 if (!validate_arglist (exp,
3527 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3528 return NULL_RTX;
3529 else
3530 {
3531 tree dest = CALL_EXPR_ARG (exp, 0);
3532 tree val = CALL_EXPR_ARG (exp, 1);
3533 tree len = CALL_EXPR_ARG (exp, 2);
3534 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3535 }
3536 }
3537
3538 /* Helper function to do the actual work for expand_builtin_memset. The
3539 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3540 so that this can also be called without constructing an actual CALL_EXPR.
3541 The other arguments and return value are the same as for
3542 expand_builtin_memset. */
3543
3544 static rtx
3545 expand_builtin_memset_args (tree dest, tree val, tree len,
3546 rtx target, enum machine_mode mode, tree orig_exp)
3547 {
3548 tree fndecl, fn;
3549 enum built_in_function fcode;
3550 enum machine_mode val_mode;
3551 char c;
3552 unsigned int dest_align;
3553 rtx dest_mem, dest_addr, len_rtx;
3554 HOST_WIDE_INT expected_size = -1;
3555 unsigned int expected_align = 0;
3556
3557 dest_align = get_pointer_alignment (dest);
3558
3559 /* If DEST is not a pointer type, don't do this operation in-line. */
3560 if (dest_align == 0)
3561 return NULL_RTX;
3562
3563 if (currently_expanding_gimple_stmt)
3564 stringop_block_profile (currently_expanding_gimple_stmt,
3565 &expected_align, &expected_size);
3566
3567 if (expected_align < dest_align)
3568 expected_align = dest_align;
3569
3570 /* If the LEN parameter is zero, return DEST. */
3571 if (integer_zerop (len))
3572 {
3573 /* Evaluate and ignore VAL in case it has side-effects. */
3574 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3575 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3576 }
3577
3578 /* Stabilize the arguments in case we fail. */
3579 dest = builtin_save_expr (dest);
3580 val = builtin_save_expr (val);
3581 len = builtin_save_expr (len);
3582
3583 len_rtx = expand_normal (len);
3584 dest_mem = get_memory_rtx (dest, len);
3585 val_mode = TYPE_MODE (unsigned_char_type_node);
3586
3587 if (TREE_CODE (val) != INTEGER_CST)
3588 {
3589 rtx val_rtx;
3590
3591 val_rtx = expand_normal (val);
3592 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3593
3594 /* Assume that we can memset by pieces if we can store
3595 * the coefficients by pieces (in the required modes).
3596 * We can't pass builtin_memset_gen_str as that emits RTL. */
3597 c = 1;
3598 if (host_integerp (len, 1)
3599 && can_store_by_pieces (tree_low_cst (len, 1),
3600 builtin_memset_read_str, &c, dest_align,
3601 true))
3602 {
3603 val_rtx = force_reg (val_mode, val_rtx);
3604 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3605 builtin_memset_gen_str, val_rtx, dest_align,
3606 true, 0);
3607 }
3608 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3609 dest_align, expected_align,
3610 expected_size))
3611 goto do_libcall;
3612
3613 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3614 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3615 return dest_mem;
3616 }
3617
3618 if (target_char_cast (val, &c))
3619 goto do_libcall;
3620
3621 if (c)
3622 {
3623 if (host_integerp (len, 1)
3624 && can_store_by_pieces (tree_low_cst (len, 1),
3625 builtin_memset_read_str, &c, dest_align,
3626 true))
3627 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3628 builtin_memset_read_str, &c, dest_align, true, 0);
3629 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3630 gen_int_mode (c, val_mode),
3631 dest_align, expected_align,
3632 expected_size))
3633 goto do_libcall;
3634
3635 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3636 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3637 return dest_mem;
3638 }
3639
3640 set_mem_align (dest_mem, dest_align);
3641 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3642 CALL_EXPR_TAILCALL (orig_exp)
3643 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3644 expected_align, expected_size);
3645
3646 if (dest_addr == 0)
3647 {
3648 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3649 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3650 }
3651
3652 return dest_addr;
3653
3654 do_libcall:
3655 fndecl = get_callee_fndecl (orig_exp);
3656 fcode = DECL_FUNCTION_CODE (fndecl);
3657 if (fcode == BUILT_IN_MEMSET)
3658 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3659 dest, val, len);
3660 else if (fcode == BUILT_IN_BZERO)
3661 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3662 dest, len);
3663 else
3664 gcc_unreachable ();
3665 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3666 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3667 return expand_call (fn, target, target == const0_rtx);
3668 }
3669
3670 /* Expand expression EXP, which is a call to the bzero builtin. Return
3671 NULL_RTX if we failed the caller should emit a normal call. */
3672
3673 static rtx
3674 expand_builtin_bzero (tree exp)
3675 {
3676 tree dest, size;
3677 location_t loc = EXPR_LOCATION (exp);
3678
3679 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3680 return NULL_RTX;
3681
3682 dest = CALL_EXPR_ARG (exp, 0);
3683 size = CALL_EXPR_ARG (exp, 1);
3684
3685 /* New argument list transforming bzero(ptr x, int y) to
3686 memset(ptr x, int 0, size_t y). This is done this way
3687 so that if it isn't expanded inline, we fallback to
3688 calling bzero instead of memset. */
3689
3690 return expand_builtin_memset_args (dest, integer_zero_node,
3691 fold_convert_loc (loc,
3692 size_type_node, size),
3693 const0_rtx, VOIDmode, exp);
3694 }
3695
3696 /* Expand expression EXP, which is a call to the memcmp built-in function.
3697 Return NULL_RTX if we failed and the caller should emit a normal call,
3698 otherwise try to get the result in TARGET, if convenient (and in mode
3699 MODE, if that's convenient). */
3700
3701 static rtx
3702 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3703 ATTRIBUTE_UNUSED enum machine_mode mode)
3704 {
3705 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3706
3707 if (!validate_arglist (exp,
3708 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3709 return NULL_RTX;
3710
3711 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3712 implementing memcmp because it will stop if it encounters two
3713 zero bytes. */
3714 #if defined HAVE_cmpmemsi
3715 {
3716 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3717 rtx result;
3718 rtx insn;
3719 tree arg1 = CALL_EXPR_ARG (exp, 0);
3720 tree arg2 = CALL_EXPR_ARG (exp, 1);
3721 tree len = CALL_EXPR_ARG (exp, 2);
3722
3723 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3724 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3725 enum machine_mode insn_mode;
3726
3727 if (HAVE_cmpmemsi)
3728 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3729 else
3730 return NULL_RTX;
3731
3732 /* If we don't have POINTER_TYPE, call the function. */
3733 if (arg1_align == 0 || arg2_align == 0)
3734 return NULL_RTX;
3735
3736 /* Make a place to write the result of the instruction. */
3737 result = target;
3738 if (! (result != 0
3739 && REG_P (result) && GET_MODE (result) == insn_mode
3740 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3741 result = gen_reg_rtx (insn_mode);
3742
3743 arg1_rtx = get_memory_rtx (arg1, len);
3744 arg2_rtx = get_memory_rtx (arg2, len);
3745 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3746
3747 /* Set MEM_SIZE as appropriate. */
3748 if (CONST_INT_P (arg3_rtx))
3749 {
3750 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3751 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3752 }
3753
3754 if (HAVE_cmpmemsi)
3755 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3756 GEN_INT (MIN (arg1_align, arg2_align)));
3757 else
3758 gcc_unreachable ();
3759
3760 if (insn)
3761 emit_insn (insn);
3762 else
3763 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3764 TYPE_MODE (integer_type_node), 3,
3765 XEXP (arg1_rtx, 0), Pmode,
3766 XEXP (arg2_rtx, 0), Pmode,
3767 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3768 TYPE_UNSIGNED (sizetype)),
3769 TYPE_MODE (sizetype));
3770
3771 /* Return the value in the proper mode for this function. */
3772 mode = TYPE_MODE (TREE_TYPE (exp));
3773 if (GET_MODE (result) == mode)
3774 return result;
3775 else if (target != 0)
3776 {
3777 convert_move (target, result, 0);
3778 return target;
3779 }
3780 else
3781 return convert_to_mode (mode, result, 0);
3782 }
3783 #endif /* HAVE_cmpmemsi. */
3784
3785 return NULL_RTX;
3786 }
3787
3788 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3789 if we failed the caller should emit a normal call, otherwise try to get
3790 the result in TARGET, if convenient. */
3791
3792 static rtx
3793 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3794 {
3795 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3796 return NULL_RTX;
3797
3798 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3799 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3800 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3801 {
3802 rtx arg1_rtx, arg2_rtx;
3803 rtx result, insn = NULL_RTX;
3804 tree fndecl, fn;
3805 tree arg1 = CALL_EXPR_ARG (exp, 0);
3806 tree arg2 = CALL_EXPR_ARG (exp, 1);
3807
3808 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3809 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3810
3811 /* If we don't have POINTER_TYPE, call the function. */
3812 if (arg1_align == 0 || arg2_align == 0)
3813 return NULL_RTX;
3814
3815 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3816 arg1 = builtin_save_expr (arg1);
3817 arg2 = builtin_save_expr (arg2);
3818
3819 arg1_rtx = get_memory_rtx (arg1, NULL);
3820 arg2_rtx = get_memory_rtx (arg2, NULL);
3821
3822 #ifdef HAVE_cmpstrsi
3823 /* Try to call cmpstrsi. */
3824 if (HAVE_cmpstrsi)
3825 {
3826 enum machine_mode insn_mode
3827 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3828
3829 /* Make a place to write the result of the instruction. */
3830 result = target;
3831 if (! (result != 0
3832 && REG_P (result) && GET_MODE (result) == insn_mode
3833 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3834 result = gen_reg_rtx (insn_mode);
3835
3836 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3837 GEN_INT (MIN (arg1_align, arg2_align)));
3838 }
3839 #endif
3840 #ifdef HAVE_cmpstrnsi
3841 /* Try to determine at least one length and call cmpstrnsi. */
3842 if (!insn && HAVE_cmpstrnsi)
3843 {
3844 tree len;
3845 rtx arg3_rtx;
3846
3847 enum machine_mode insn_mode
3848 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3849 tree len1 = c_strlen (arg1, 1);
3850 tree len2 = c_strlen (arg2, 1);
3851
3852 if (len1)
3853 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3854 if (len2)
3855 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3856
3857 /* If we don't have a constant length for the first, use the length
3858 of the second, if we know it. We don't require a constant for
3859 this case; some cost analysis could be done if both are available
3860 but neither is constant. For now, assume they're equally cheap,
3861 unless one has side effects. If both strings have constant lengths,
3862 use the smaller. */
3863
3864 if (!len1)
3865 len = len2;
3866 else if (!len2)
3867 len = len1;
3868 else if (TREE_SIDE_EFFECTS (len1))
3869 len = len2;
3870 else if (TREE_SIDE_EFFECTS (len2))
3871 len = len1;
3872 else if (TREE_CODE (len1) != INTEGER_CST)
3873 len = len2;
3874 else if (TREE_CODE (len2) != INTEGER_CST)
3875 len = len1;
3876 else if (tree_int_cst_lt (len1, len2))
3877 len = len1;
3878 else
3879 len = len2;
3880
3881 /* If both arguments have side effects, we cannot optimize. */
3882 if (!len || TREE_SIDE_EFFECTS (len))
3883 goto do_libcall;
3884
3885 arg3_rtx = expand_normal (len);
3886
3887 /* Make a place to write the result of the instruction. */
3888 result = target;
3889 if (! (result != 0
3890 && REG_P (result) && GET_MODE (result) == insn_mode
3891 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3892 result = gen_reg_rtx (insn_mode);
3893
3894 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3895 GEN_INT (MIN (arg1_align, arg2_align)));
3896 }
3897 #endif
3898
3899 if (insn)
3900 {
3901 enum machine_mode mode;
3902 emit_insn (insn);
3903
3904 /* Return the value in the proper mode for this function. */
3905 mode = TYPE_MODE (TREE_TYPE (exp));
3906 if (GET_MODE (result) == mode)
3907 return result;
3908 if (target == 0)
3909 return convert_to_mode (mode, result, 0);
3910 convert_move (target, result, 0);
3911 return target;
3912 }
3913
3914 /* Expand the library call ourselves using a stabilized argument
3915 list to avoid re-evaluating the function's arguments twice. */
3916 #ifdef HAVE_cmpstrnsi
3917 do_libcall:
3918 #endif
3919 fndecl = get_callee_fndecl (exp);
3920 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3921 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3922 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3923 return expand_call (fn, target, target == const0_rtx);
3924 }
3925 #endif
3926 return NULL_RTX;
3927 }
3928
3929 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3930 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3931 the result in TARGET, if convenient. */
3932
3933 static rtx
3934 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3935 ATTRIBUTE_UNUSED enum machine_mode mode)
3936 {
3937 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3938
3939 if (!validate_arglist (exp,
3940 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3941 return NULL_RTX;
3942
3943 /* If c_strlen can determine an expression for one of the string
3944 lengths, and it doesn't have side effects, then emit cmpstrnsi
3945 using length MIN(strlen(string)+1, arg3). */
3946 #ifdef HAVE_cmpstrnsi
3947 if (HAVE_cmpstrnsi)
3948 {
3949 tree len, len1, len2;
3950 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3951 rtx result, insn;
3952 tree fndecl, fn;
3953 tree arg1 = CALL_EXPR_ARG (exp, 0);
3954 tree arg2 = CALL_EXPR_ARG (exp, 1);
3955 tree arg3 = CALL_EXPR_ARG (exp, 2);
3956
3957 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3958 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3959 enum machine_mode insn_mode
3960 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3961
3962 len1 = c_strlen (arg1, 1);
3963 len2 = c_strlen (arg2, 1);
3964
3965 if (len1)
3966 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3967 if (len2)
3968 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3969
3970 /* If we don't have a constant length for the first, use the length
3971 of the second, if we know it. We don't require a constant for
3972 this case; some cost analysis could be done if both are available
3973 but neither is constant. For now, assume they're equally cheap,
3974 unless one has side effects. If both strings have constant lengths,
3975 use the smaller. */
3976
3977 if (!len1)
3978 len = len2;
3979 else if (!len2)
3980 len = len1;
3981 else if (TREE_SIDE_EFFECTS (len1))
3982 len = len2;
3983 else if (TREE_SIDE_EFFECTS (len2))
3984 len = len1;
3985 else if (TREE_CODE (len1) != INTEGER_CST)
3986 len = len2;
3987 else if (TREE_CODE (len2) != INTEGER_CST)
3988 len = len1;
3989 else if (tree_int_cst_lt (len1, len2))
3990 len = len1;
3991 else
3992 len = len2;
3993
3994 /* If both arguments have side effects, we cannot optimize. */
3995 if (!len || TREE_SIDE_EFFECTS (len))
3996 return NULL_RTX;
3997
3998 /* The actual new length parameter is MIN(len,arg3). */
3999 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4000 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4001
4002 /* If we don't have POINTER_TYPE, call the function. */
4003 if (arg1_align == 0 || arg2_align == 0)
4004 return NULL_RTX;
4005
4006 /* Make a place to write the result of the instruction. */
4007 result = target;
4008 if (! (result != 0
4009 && REG_P (result) && GET_MODE (result) == insn_mode
4010 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4011 result = gen_reg_rtx (insn_mode);
4012
4013 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4014 arg1 = builtin_save_expr (arg1);
4015 arg2 = builtin_save_expr (arg2);
4016 len = builtin_save_expr (len);
4017
4018 arg1_rtx = get_memory_rtx (arg1, len);
4019 arg2_rtx = get_memory_rtx (arg2, len);
4020 arg3_rtx = expand_normal (len);
4021 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4022 GEN_INT (MIN (arg1_align, arg2_align)));
4023 if (insn)
4024 {
4025 emit_insn (insn);
4026
4027 /* Return the value in the proper mode for this function. */
4028 mode = TYPE_MODE (TREE_TYPE (exp));
4029 if (GET_MODE (result) == mode)
4030 return result;
4031 if (target == 0)
4032 return convert_to_mode (mode, result, 0);
4033 convert_move (target, result, 0);
4034 return target;
4035 }
4036
4037 /* Expand the library call ourselves using a stabilized argument
4038 list to avoid re-evaluating the function's arguments twice. */
4039 fndecl = get_callee_fndecl (exp);
4040 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4041 arg1, arg2, len);
4042 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4043 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4044 return expand_call (fn, target, target == const0_rtx);
4045 }
4046 #endif
4047 return NULL_RTX;
4048 }
4049
4050 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4051 if that's convenient. */
4052
4053 rtx
4054 expand_builtin_saveregs (void)
4055 {
4056 rtx val, seq;
4057
4058 /* Don't do __builtin_saveregs more than once in a function.
4059 Save the result of the first call and reuse it. */
4060 if (saveregs_value != 0)
4061 return saveregs_value;
4062
4063 /* When this function is called, it means that registers must be
4064 saved on entry to this function. So we migrate the call to the
4065 first insn of this function. */
4066
4067 start_sequence ();
4068
4069 /* Do whatever the machine needs done in this case. */
4070 val = targetm.calls.expand_builtin_saveregs ();
4071
4072 seq = get_insns ();
4073 end_sequence ();
4074
4075 saveregs_value = val;
4076
4077 /* Put the insns after the NOTE that starts the function. If this
4078 is inside a start_sequence, make the outer-level insn chain current, so
4079 the code is placed at the start of the function. */
4080 push_topmost_sequence ();
4081 emit_insn_after (seq, entry_of_function ());
4082 pop_topmost_sequence ();
4083
4084 return val;
4085 }
4086
4087 /* Expand a call to __builtin_next_arg. */
4088
4089 static rtx
4090 expand_builtin_next_arg (void)
4091 {
4092 /* Checking arguments is already done in fold_builtin_next_arg
4093 that must be called before this function. */
4094 return expand_binop (ptr_mode, add_optab,
4095 crtl->args.internal_arg_pointer,
4096 crtl->args.arg_offset_rtx,
4097 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4098 }
4099
4100 /* Make it easier for the backends by protecting the valist argument
4101 from multiple evaluations. */
4102
4103 static tree
4104 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4105 {
4106 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4107
4108 /* The current way of determining the type of valist is completely
4109 bogus. We should have the information on the va builtin instead. */
4110 if (!vatype)
4111 vatype = targetm.fn_abi_va_list (cfun->decl);
4112
4113 if (TREE_CODE (vatype) == ARRAY_TYPE)
4114 {
4115 if (TREE_SIDE_EFFECTS (valist))
4116 valist = save_expr (valist);
4117
4118 /* For this case, the backends will be expecting a pointer to
4119 vatype, but it's possible we've actually been given an array
4120 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4121 So fix it. */
4122 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4123 {
4124 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4125 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4126 }
4127 }
4128 else
4129 {
4130 tree pt = build_pointer_type (vatype);
4131
4132 if (! needs_lvalue)
4133 {
4134 if (! TREE_SIDE_EFFECTS (valist))
4135 return valist;
4136
4137 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4138 TREE_SIDE_EFFECTS (valist) = 1;
4139 }
4140
4141 if (TREE_SIDE_EFFECTS (valist))
4142 valist = save_expr (valist);
4143 valist = fold_build2_loc (loc, MEM_REF,
4144 vatype, valist, build_int_cst (pt, 0));
4145 }
4146
4147 return valist;
4148 }
4149
4150 /* The "standard" definition of va_list is void*. */
4151
4152 tree
4153 std_build_builtin_va_list (void)
4154 {
4155 return ptr_type_node;
4156 }
4157
4158 /* The "standard" abi va_list is va_list_type_node. */
4159
4160 tree
4161 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4162 {
4163 return va_list_type_node;
4164 }
4165
4166 /* The "standard" type of va_list is va_list_type_node. */
4167
4168 tree
4169 std_canonical_va_list_type (tree type)
4170 {
4171 tree wtype, htype;
4172
4173 if (INDIRECT_REF_P (type))
4174 type = TREE_TYPE (type);
4175 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4176 type = TREE_TYPE (type);
4177 wtype = va_list_type_node;
4178 htype = type;
4179 /* Treat structure va_list types. */
4180 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4181 htype = TREE_TYPE (htype);
4182 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4183 {
4184 /* If va_list is an array type, the argument may have decayed
4185 to a pointer type, e.g. by being passed to another function.
4186 In that case, unwrap both types so that we can compare the
4187 underlying records. */
4188 if (TREE_CODE (htype) == ARRAY_TYPE
4189 || POINTER_TYPE_P (htype))
4190 {
4191 wtype = TREE_TYPE (wtype);
4192 htype = TREE_TYPE (htype);
4193 }
4194 }
4195 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4196 return va_list_type_node;
4197
4198 return NULL_TREE;
4199 }
4200
4201 /* The "standard" implementation of va_start: just assign `nextarg' to
4202 the variable. */
4203
4204 void
4205 std_expand_builtin_va_start (tree valist, rtx nextarg)
4206 {
4207 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4208 convert_move (va_r, nextarg, 0);
4209 }
4210
4211 /* Expand EXP, a call to __builtin_va_start. */
4212
4213 static rtx
4214 expand_builtin_va_start (tree exp)
4215 {
4216 rtx nextarg;
4217 tree valist;
4218 location_t loc = EXPR_LOCATION (exp);
4219
4220 if (call_expr_nargs (exp) < 2)
4221 {
4222 error_at (loc, "too few arguments to function %<va_start%>");
4223 return const0_rtx;
4224 }
4225
4226 if (fold_builtin_next_arg (exp, true))
4227 return const0_rtx;
4228
4229 nextarg = expand_builtin_next_arg ();
4230 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4231
4232 if (targetm.expand_builtin_va_start)
4233 targetm.expand_builtin_va_start (valist, nextarg);
4234 else
4235 std_expand_builtin_va_start (valist, nextarg);
4236
4237 return const0_rtx;
4238 }
4239
4240 /* The "standard" implementation of va_arg: read the value from the
4241 current (padded) address and increment by the (padded) size. */
4242
4243 tree
4244 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4245 gimple_seq *post_p)
4246 {
4247 tree addr, t, type_size, rounded_size, valist_tmp;
4248 unsigned HOST_WIDE_INT align, boundary;
4249 bool indirect;
4250
4251 #ifdef ARGS_GROW_DOWNWARD
4252 /* All of the alignment and movement below is for args-grow-up machines.
4253 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4254 implement their own specialized gimplify_va_arg_expr routines. */
4255 gcc_unreachable ();
4256 #endif
4257
4258 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4259 if (indirect)
4260 type = build_pointer_type (type);
4261
4262 align = PARM_BOUNDARY / BITS_PER_UNIT;
4263 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
4264
4265 /* When we align parameter on stack for caller, if the parameter
4266 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4267 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4268 here with caller. */
4269 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4270 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4271
4272 boundary /= BITS_PER_UNIT;
4273
4274 /* Hoist the valist value into a temporary for the moment. */
4275 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4276
4277 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4278 requires greater alignment, we must perform dynamic alignment. */
4279 if (boundary > align
4280 && !integer_zerop (TYPE_SIZE (type)))
4281 {
4282 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4283 fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
4284 gimplify_and_add (t, pre_p);
4285
4286 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4287 fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist),
4288 valist_tmp,
4289 build_int_cst (TREE_TYPE (valist), -boundary)));
4290 gimplify_and_add (t, pre_p);
4291 }
4292 else
4293 boundary = align;
4294
4295 /* If the actual alignment is less than the alignment of the type,
4296 adjust the type accordingly so that we don't assume strict alignment
4297 when dereferencing the pointer. */
4298 boundary *= BITS_PER_UNIT;
4299 if (boundary < TYPE_ALIGN (type))
4300 {
4301 type = build_variant_type_copy (type);
4302 TYPE_ALIGN (type) = boundary;
4303 }
4304
4305 /* Compute the rounded size of the type. */
4306 type_size = size_in_bytes (type);
4307 rounded_size = round_up (type_size, align);
4308
4309 /* Reduce rounded_size so it's sharable with the postqueue. */
4310 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4311
4312 /* Get AP. */
4313 addr = valist_tmp;
4314 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4315 {
4316 /* Small args are padded downward. */
4317 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4318 rounded_size, size_int (align));
4319 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4320 size_binop (MINUS_EXPR, rounded_size, type_size));
4321 addr = fold_build_pointer_plus (addr, t);
4322 }
4323
4324 /* Compute new value for AP. */
4325 t = fold_build_pointer_plus (valist_tmp, rounded_size);
4326 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4327 gimplify_and_add (t, pre_p);
4328
4329 addr = fold_convert (build_pointer_type (type), addr);
4330
4331 if (indirect)
4332 addr = build_va_arg_indirect_ref (addr);
4333
4334 return build_va_arg_indirect_ref (addr);
4335 }
4336
4337 /* Build an indirect-ref expression over the given TREE, which represents a
4338 piece of a va_arg() expansion. */
4339 tree
4340 build_va_arg_indirect_ref (tree addr)
4341 {
4342 addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
4343
4344 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4345 mf_mark (addr);
4346
4347 return addr;
4348 }
4349
4350 /* Return a dummy expression of type TYPE in order to keep going after an
4351 error. */
4352
4353 static tree
4354 dummy_object (tree type)
4355 {
4356 tree t = build_int_cst (build_pointer_type (type), 0);
4357 return build2 (MEM_REF, type, t, t);
4358 }
4359
4360 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4361 builtin function, but a very special sort of operator. */
4362
4363 enum gimplify_status
4364 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4365 {
4366 tree promoted_type, have_va_type;
4367 tree valist = TREE_OPERAND (*expr_p, 0);
4368 tree type = TREE_TYPE (*expr_p);
4369 tree t;
4370 location_t loc = EXPR_LOCATION (*expr_p);
4371
4372 /* Verify that valist is of the proper type. */
4373 have_va_type = TREE_TYPE (valist);
4374 if (have_va_type == error_mark_node)
4375 return GS_ERROR;
4376 have_va_type = targetm.canonical_va_list_type (have_va_type);
4377
4378 if (have_va_type == NULL_TREE)
4379 {
4380 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4381 return GS_ERROR;
4382 }
4383
4384 /* Generate a diagnostic for requesting data of a type that cannot
4385 be passed through `...' due to type promotion at the call site. */
4386 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4387 != type)
4388 {
4389 static bool gave_help;
4390 bool warned;
4391
4392 /* Unfortunately, this is merely undefined, rather than a constraint
4393 violation, so we cannot make this an error. If this call is never
4394 executed, the program is still strictly conforming. */
4395 warned = warning_at (loc, 0,
4396 "%qT is promoted to %qT when passed through %<...%>",
4397 type, promoted_type);
4398 if (!gave_help && warned)
4399 {
4400 gave_help = true;
4401 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4402 promoted_type, type);
4403 }
4404
4405 /* We can, however, treat "undefined" any way we please.
4406 Call abort to encourage the user to fix the program. */
4407 if (warned)
4408 inform (loc, "if this code is reached, the program will abort");
4409 /* Before the abort, allow the evaluation of the va_list
4410 expression to exit or longjmp. */
4411 gimplify_and_add (valist, pre_p);
4412 t = build_call_expr_loc (loc,
4413 builtin_decl_implicit (BUILT_IN_TRAP), 0);
4414 gimplify_and_add (t, pre_p);
4415
4416 /* This is dead code, but go ahead and finish so that the
4417 mode of the result comes out right. */
4418 *expr_p = dummy_object (type);
4419 return GS_ALL_DONE;
4420 }
4421 else
4422 {
4423 /* Make it easier for the backends by protecting the valist argument
4424 from multiple evaluations. */
4425 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4426 {
4427 /* For this case, the backends will be expecting a pointer to
4428 TREE_TYPE (abi), but it's possible we've
4429 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4430 So fix it. */
4431 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4432 {
4433 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4434 valist = fold_convert_loc (loc, p1,
4435 build_fold_addr_expr_loc (loc, valist));
4436 }
4437
4438 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4439 }
4440 else
4441 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4442
4443 if (!targetm.gimplify_va_arg_expr)
4444 /* FIXME: Once most targets are converted we should merely
4445 assert this is non-null. */
4446 return GS_ALL_DONE;
4447
4448 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4449 return GS_OK;
4450 }
4451 }
4452
4453 /* Expand EXP, a call to __builtin_va_end. */
4454
4455 static rtx
4456 expand_builtin_va_end (tree exp)
4457 {
4458 tree valist = CALL_EXPR_ARG (exp, 0);
4459
4460 /* Evaluate for side effects, if needed. I hate macros that don't
4461 do that. */
4462 if (TREE_SIDE_EFFECTS (valist))
4463 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4464
4465 return const0_rtx;
4466 }
4467
4468 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4469 builtin rather than just as an assignment in stdarg.h because of the
4470 nastiness of array-type va_list types. */
4471
4472 static rtx
4473 expand_builtin_va_copy (tree exp)
4474 {
4475 tree dst, src, t;
4476 location_t loc = EXPR_LOCATION (exp);
4477
4478 dst = CALL_EXPR_ARG (exp, 0);
4479 src = CALL_EXPR_ARG (exp, 1);
4480
4481 dst = stabilize_va_list_loc (loc, dst, 1);
4482 src = stabilize_va_list_loc (loc, src, 0);
4483
4484 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4485
4486 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4487 {
4488 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4489 TREE_SIDE_EFFECTS (t) = 1;
4490 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4491 }
4492 else
4493 {
4494 rtx dstb, srcb, size;
4495
4496 /* Evaluate to pointers. */
4497 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4498 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4499 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4500 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4501
4502 dstb = convert_memory_address (Pmode, dstb);
4503 srcb = convert_memory_address (Pmode, srcb);
4504
4505 /* "Dereference" to BLKmode memories. */
4506 dstb = gen_rtx_MEM (BLKmode, dstb);
4507 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4508 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4509 srcb = gen_rtx_MEM (BLKmode, srcb);
4510 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4511 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4512
4513 /* Copy. */
4514 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4515 }
4516
4517 return const0_rtx;
4518 }
4519
4520 /* Expand a call to one of the builtin functions __builtin_frame_address or
4521 __builtin_return_address. */
4522
4523 static rtx
4524 expand_builtin_frame_address (tree fndecl, tree exp)
4525 {
4526 /* The argument must be a nonnegative integer constant.
4527 It counts the number of frames to scan up the stack.
4528 The value is the return address saved in that frame. */
4529 if (call_expr_nargs (exp) == 0)
4530 /* Warning about missing arg was already issued. */
4531 return const0_rtx;
4532 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4533 {
4534 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4535 error ("invalid argument to %<__builtin_frame_address%>");
4536 else
4537 error ("invalid argument to %<__builtin_return_address%>");
4538 return const0_rtx;
4539 }
4540 else
4541 {
4542 rtx tem
4543 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4544 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4545
4546 /* Some ports cannot access arbitrary stack frames. */
4547 if (tem == NULL)
4548 {
4549 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4550 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4551 else
4552 warning (0, "unsupported argument to %<__builtin_return_address%>");
4553 return const0_rtx;
4554 }
4555
4556 /* For __builtin_frame_address, return what we've got. */
4557 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4558 return tem;
4559
4560 if (!REG_P (tem)
4561 && ! CONSTANT_P (tem))
4562 tem = copy_addr_to_reg (tem);
4563 return tem;
4564 }
4565 }
4566
4567 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4568 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4569 is the same as for allocate_dynamic_stack_space. */
4570
4571 static rtx
4572 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4573 {
4574 rtx op0;
4575 rtx result;
4576 bool valid_arglist;
4577 unsigned int align;
4578 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4579 == BUILT_IN_ALLOCA_WITH_ALIGN);
4580
4581 /* Emit normal call if we use mudflap. */
4582 if (flag_mudflap)
4583 return NULL_RTX;
4584
4585 valid_arglist
4586 = (alloca_with_align
4587 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4588 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4589
4590 if (!valid_arglist)
4591 return NULL_RTX;
4592
4593 /* Compute the argument. */
4594 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4595
4596 /* Compute the alignment. */
4597 align = (alloca_with_align
4598 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4599 : BIGGEST_ALIGNMENT);
4600
4601 /* Allocate the desired space. */
4602 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4603 result = convert_memory_address (ptr_mode, result);
4604
4605 return result;
4606 }
4607
4608 /* Expand a call to bswap builtin in EXP.
4609 Return NULL_RTX if a normal call should be emitted rather than expanding the
4610 function in-line. If convenient, the result should be placed in TARGET.
4611 SUBTARGET may be used as the target for computing one of EXP's operands. */
4612
4613 static rtx
4614 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4615 rtx subtarget)
4616 {
4617 tree arg;
4618 rtx op0;
4619
4620 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4621 return NULL_RTX;
4622
4623 arg = CALL_EXPR_ARG (exp, 0);
4624 op0 = expand_expr (arg,
4625 subtarget && GET_MODE (subtarget) == target_mode
4626 ? subtarget : NULL_RTX,
4627 target_mode, EXPAND_NORMAL);
4628 if (GET_MODE (op0) != target_mode)
4629 op0 = convert_to_mode (target_mode, op0, 1);
4630
4631 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4632
4633 gcc_assert (target);
4634
4635 return convert_to_mode (target_mode, target, 1);
4636 }
4637
4638 /* Expand a call to a unary builtin in EXP.
4639 Return NULL_RTX if a normal call should be emitted rather than expanding the
4640 function in-line. If convenient, the result should be placed in TARGET.
4641 SUBTARGET may be used as the target for computing one of EXP's operands. */
4642
4643 static rtx
4644 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4645 rtx subtarget, optab op_optab)
4646 {
4647 rtx op0;
4648
4649 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4650 return NULL_RTX;
4651
4652 /* Compute the argument. */
4653 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4654 (subtarget
4655 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4656 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4657 VOIDmode, EXPAND_NORMAL);
4658 /* Compute op, into TARGET if possible.
4659 Set TARGET to wherever the result comes back. */
4660 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4661 op_optab, op0, target, op_optab != clrsb_optab);
4662 gcc_assert (target);
4663
4664 return convert_to_mode (target_mode, target, 0);
4665 }
4666
4667 /* Expand a call to __builtin_expect. We just return our argument
4668 as the builtin_expect semantic should've been already executed by
4669 tree branch prediction pass. */
4670
4671 static rtx
4672 expand_builtin_expect (tree exp, rtx target)
4673 {
4674 tree arg;
4675
4676 if (call_expr_nargs (exp) < 2)
4677 return const0_rtx;
4678 arg = CALL_EXPR_ARG (exp, 0);
4679
4680 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4681 /* When guessing was done, the hints should be already stripped away. */
4682 gcc_assert (!flag_guess_branch_prob
4683 || optimize == 0 || seen_error ());
4684 return target;
4685 }
4686
4687 /* Expand a call to __builtin_assume_aligned. We just return our first
4688 argument as the builtin_assume_aligned semantic should've been already
4689 executed by CCP. */
4690
4691 static rtx
4692 expand_builtin_assume_aligned (tree exp, rtx target)
4693 {
4694 if (call_expr_nargs (exp) < 2)
4695 return const0_rtx;
4696 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4697 EXPAND_NORMAL);
4698 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4699 && (call_expr_nargs (exp) < 3
4700 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4701 return target;
4702 }
4703
4704 void
4705 expand_builtin_trap (void)
4706 {
4707 #ifdef HAVE_trap
4708 if (HAVE_trap)
4709 {
4710 rtx insn = emit_insn (gen_trap ());
4711 /* For trap insns when not accumulating outgoing args force
4712 REG_ARGS_SIZE note to prevent crossjumping of calls with
4713 different args sizes. */
4714 if (!ACCUMULATE_OUTGOING_ARGS)
4715 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4716 }
4717 else
4718 #endif
4719 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4720 emit_barrier ();
4721 }
4722
4723 /* Expand a call to __builtin_unreachable. We do nothing except emit
4724 a barrier saying that control flow will not pass here.
4725
4726 It is the responsibility of the program being compiled to ensure
4727 that control flow does never reach __builtin_unreachable. */
4728 static void
4729 expand_builtin_unreachable (void)
4730 {
4731 emit_barrier ();
4732 }
4733
4734 /* Expand EXP, a call to fabs, fabsf or fabsl.
4735 Return NULL_RTX if a normal call should be emitted rather than expanding
4736 the function inline. If convenient, the result should be placed
4737 in TARGET. SUBTARGET may be used as the target for computing
4738 the operand. */
4739
4740 static rtx
4741 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4742 {
4743 enum machine_mode mode;
4744 tree arg;
4745 rtx op0;
4746
4747 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4748 return NULL_RTX;
4749
4750 arg = CALL_EXPR_ARG (exp, 0);
4751 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4752 mode = TYPE_MODE (TREE_TYPE (arg));
4753 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4754 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4755 }
4756
4757 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4758 Return NULL is a normal call should be emitted rather than expanding the
4759 function inline. If convenient, the result should be placed in TARGET.
4760 SUBTARGET may be used as the target for computing the operand. */
4761
4762 static rtx
4763 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4764 {
4765 rtx op0, op1;
4766 tree arg;
4767
4768 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4769 return NULL_RTX;
4770
4771 arg = CALL_EXPR_ARG (exp, 0);
4772 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4773
4774 arg = CALL_EXPR_ARG (exp, 1);
4775 op1 = expand_normal (arg);
4776
4777 return expand_copysign (op0, op1, target);
4778 }
4779
4780 /* Create a new constant string literal and return a char* pointer to it.
4781 The STRING_CST value is the LEN characters at STR. */
4782 tree
4783 build_string_literal (int len, const char *str)
4784 {
4785 tree t, elem, index, type;
4786
4787 t = build_string (len, str);
4788 elem = build_type_variant (char_type_node, 1, 0);
4789 index = build_index_type (size_int (len - 1));
4790 type = build_array_type (elem, index);
4791 TREE_TYPE (t) = type;
4792 TREE_CONSTANT (t) = 1;
4793 TREE_READONLY (t) = 1;
4794 TREE_STATIC (t) = 1;
4795
4796 type = build_pointer_type (elem);
4797 t = build1 (ADDR_EXPR, type,
4798 build4 (ARRAY_REF, elem,
4799 t, integer_zero_node, NULL_TREE, NULL_TREE));
4800 return t;
4801 }
4802
4803 /* Expand a call to __builtin___clear_cache. */
4804
4805 static rtx
4806 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4807 {
4808 #ifndef HAVE_clear_cache
4809 #ifdef CLEAR_INSN_CACHE
4810 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4811 does something. Just do the default expansion to a call to
4812 __clear_cache(). */
4813 return NULL_RTX;
4814 #else
4815 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4816 does nothing. There is no need to call it. Do nothing. */
4817 return const0_rtx;
4818 #endif /* CLEAR_INSN_CACHE */
4819 #else
4820 /* We have a "clear_cache" insn, and it will handle everything. */
4821 tree begin, end;
4822 rtx begin_rtx, end_rtx;
4823
4824 /* We must not expand to a library call. If we did, any
4825 fallback library function in libgcc that might contain a call to
4826 __builtin___clear_cache() would recurse infinitely. */
4827 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4828 {
4829 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4830 return const0_rtx;
4831 }
4832
4833 if (HAVE_clear_cache)
4834 {
4835 struct expand_operand ops[2];
4836
4837 begin = CALL_EXPR_ARG (exp, 0);
4838 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4839
4840 end = CALL_EXPR_ARG (exp, 1);
4841 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4842
4843 create_address_operand (&ops[0], begin_rtx);
4844 create_address_operand (&ops[1], end_rtx);
4845 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4846 return const0_rtx;
4847 }
4848 return const0_rtx;
4849 #endif /* HAVE_clear_cache */
4850 }
4851
4852 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4853
4854 static rtx
4855 round_trampoline_addr (rtx tramp)
4856 {
4857 rtx temp, addend, mask;
4858
4859 /* If we don't need too much alignment, we'll have been guaranteed
4860 proper alignment by get_trampoline_type. */
4861 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4862 return tramp;
4863
4864 /* Round address up to desired boundary. */
4865 temp = gen_reg_rtx (Pmode);
4866 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4867 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4868
4869 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4870 temp, 0, OPTAB_LIB_WIDEN);
4871 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4872 temp, 0, OPTAB_LIB_WIDEN);
4873
4874 return tramp;
4875 }
4876
4877 static rtx
4878 expand_builtin_init_trampoline (tree exp, bool onstack)
4879 {
4880 tree t_tramp, t_func, t_chain;
4881 rtx m_tramp, r_tramp, r_chain, tmp;
4882
4883 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4884 POINTER_TYPE, VOID_TYPE))
4885 return NULL_RTX;
4886
4887 t_tramp = CALL_EXPR_ARG (exp, 0);
4888 t_func = CALL_EXPR_ARG (exp, 1);
4889 t_chain = CALL_EXPR_ARG (exp, 2);
4890
4891 r_tramp = expand_normal (t_tramp);
4892 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4893 MEM_NOTRAP_P (m_tramp) = 1;
4894
4895 /* If ONSTACK, the TRAMP argument should be the address of a field
4896 within the local function's FRAME decl. Either way, let's see if
4897 we can fill in the MEM_ATTRs for this memory. */
4898 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4899 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4900
4901 /* Creator of a heap trampoline is responsible for making sure the
4902 address is aligned to at least STACK_BOUNDARY. Normally malloc
4903 will ensure this anyhow. */
4904 tmp = round_trampoline_addr (r_tramp);
4905 if (tmp != r_tramp)
4906 {
4907 m_tramp = change_address (m_tramp, BLKmode, tmp);
4908 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4909 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4910 }
4911
4912 /* The FUNC argument should be the address of the nested function.
4913 Extract the actual function decl to pass to the hook. */
4914 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4915 t_func = TREE_OPERAND (t_func, 0);
4916 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4917
4918 r_chain = expand_normal (t_chain);
4919
4920 /* Generate insns to initialize the trampoline. */
4921 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4922
4923 if (onstack)
4924 {
4925 trampolines_created = 1;
4926
4927 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4928 "trampoline generated for nested function %qD", t_func);
4929 }
4930
4931 return const0_rtx;
4932 }
4933
4934 static rtx
4935 expand_builtin_adjust_trampoline (tree exp)
4936 {
4937 rtx tramp;
4938
4939 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4940 return NULL_RTX;
4941
4942 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4943 tramp = round_trampoline_addr (tramp);
4944 if (targetm.calls.trampoline_adjust_address)
4945 tramp = targetm.calls.trampoline_adjust_address (tramp);
4946
4947 return tramp;
4948 }
4949
4950 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4951 function. The function first checks whether the back end provides
4952 an insn to implement signbit for the respective mode. If not, it
4953 checks whether the floating point format of the value is such that
4954 the sign bit can be extracted. If that is not the case, the
4955 function returns NULL_RTX to indicate that a normal call should be
4956 emitted rather than expanding the function in-line. EXP is the
4957 expression that is a call to the builtin function; if convenient,
4958 the result should be placed in TARGET. */
4959 static rtx
4960 expand_builtin_signbit (tree exp, rtx target)
4961 {
4962 const struct real_format *fmt;
4963 enum machine_mode fmode, imode, rmode;
4964 tree arg;
4965 int word, bitpos;
4966 enum insn_code icode;
4967 rtx temp;
4968 location_t loc = EXPR_LOCATION (exp);
4969
4970 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4971 return NULL_RTX;
4972
4973 arg = CALL_EXPR_ARG (exp, 0);
4974 fmode = TYPE_MODE (TREE_TYPE (arg));
4975 rmode = TYPE_MODE (TREE_TYPE (exp));
4976 fmt = REAL_MODE_FORMAT (fmode);
4977
4978 arg = builtin_save_expr (arg);
4979
4980 /* Expand the argument yielding a RTX expression. */
4981 temp = expand_normal (arg);
4982
4983 /* Check if the back end provides an insn that handles signbit for the
4984 argument's mode. */
4985 icode = optab_handler (signbit_optab, fmode);
4986 if (icode != CODE_FOR_nothing)
4987 {
4988 rtx last = get_last_insn ();
4989 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4990 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4991 return target;
4992 delete_insns_since (last);
4993 }
4994
4995 /* For floating point formats without a sign bit, implement signbit
4996 as "ARG < 0.0". */
4997 bitpos = fmt->signbit_ro;
4998 if (bitpos < 0)
4999 {
5000 /* But we can't do this if the format supports signed zero. */
5001 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5002 return NULL_RTX;
5003
5004 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5005 build_real (TREE_TYPE (arg), dconst0));
5006 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5007 }
5008
5009 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5010 {
5011 imode = int_mode_for_mode (fmode);
5012 if (imode == BLKmode)
5013 return NULL_RTX;
5014 temp = gen_lowpart (imode, temp);
5015 }
5016 else
5017 {
5018 imode = word_mode;
5019 /* Handle targets with different FP word orders. */
5020 if (FLOAT_WORDS_BIG_ENDIAN)
5021 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5022 else
5023 word = bitpos / BITS_PER_WORD;
5024 temp = operand_subword_force (temp, word, fmode);
5025 bitpos = bitpos % BITS_PER_WORD;
5026 }
5027
5028 /* Force the intermediate word_mode (or narrower) result into a
5029 register. This avoids attempting to create paradoxical SUBREGs
5030 of floating point modes below. */
5031 temp = force_reg (imode, temp);
5032
5033 /* If the bitpos is within the "result mode" lowpart, the operation
5034 can be implement with a single bitwise AND. Otherwise, we need
5035 a right shift and an AND. */
5036
5037 if (bitpos < GET_MODE_BITSIZE (rmode))
5038 {
5039 double_int mask = double_int_zero.set_bit (bitpos);
5040
5041 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5042 temp = gen_lowpart (rmode, temp);
5043 temp = expand_binop (rmode, and_optab, temp,
5044 immed_double_int_const (mask, rmode),
5045 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5046 }
5047 else
5048 {
5049 /* Perform a logical right shift to place the signbit in the least
5050 significant bit, then truncate the result to the desired mode
5051 and mask just this bit. */
5052 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5053 temp = gen_lowpart (rmode, temp);
5054 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5055 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5056 }
5057
5058 return temp;
5059 }
5060
5061 /* Expand fork or exec calls. TARGET is the desired target of the
5062 call. EXP is the call. FN is the
5063 identificator of the actual function. IGNORE is nonzero if the
5064 value is to be ignored. */
5065
5066 static rtx
5067 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5068 {
5069 tree id, decl;
5070 tree call;
5071
5072 /* If we are not profiling, just call the function. */
5073 if (!profile_arc_flag)
5074 return NULL_RTX;
5075
5076 /* Otherwise call the wrapper. This should be equivalent for the rest of
5077 compiler, so the code does not diverge, and the wrapper may run the
5078 code necessary for keeping the profiling sane. */
5079
5080 switch (DECL_FUNCTION_CODE (fn))
5081 {
5082 case BUILT_IN_FORK:
5083 id = get_identifier ("__gcov_fork");
5084 break;
5085
5086 case BUILT_IN_EXECL:
5087 id = get_identifier ("__gcov_execl");
5088 break;
5089
5090 case BUILT_IN_EXECV:
5091 id = get_identifier ("__gcov_execv");
5092 break;
5093
5094 case BUILT_IN_EXECLP:
5095 id = get_identifier ("__gcov_execlp");
5096 break;
5097
5098 case BUILT_IN_EXECLE:
5099 id = get_identifier ("__gcov_execle");
5100 break;
5101
5102 case BUILT_IN_EXECVP:
5103 id = get_identifier ("__gcov_execvp");
5104 break;
5105
5106 case BUILT_IN_EXECVE:
5107 id = get_identifier ("__gcov_execve");
5108 break;
5109
5110 default:
5111 gcc_unreachable ();
5112 }
5113
5114 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5115 FUNCTION_DECL, id, TREE_TYPE (fn));
5116 DECL_EXTERNAL (decl) = 1;
5117 TREE_PUBLIC (decl) = 1;
5118 DECL_ARTIFICIAL (decl) = 1;
5119 TREE_NOTHROW (decl) = 1;
5120 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5121 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5122 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5123 return expand_call (call, target, ignore);
5124 }
5125
5126
5127 \f
5128 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5129 the pointer in these functions is void*, the tree optimizers may remove
5130 casts. The mode computed in expand_builtin isn't reliable either, due
5131 to __sync_bool_compare_and_swap.
5132
5133 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5134 group of builtins. This gives us log2 of the mode size. */
5135
5136 static inline enum machine_mode
5137 get_builtin_sync_mode (int fcode_diff)
5138 {
5139 /* The size is not negotiable, so ask not to get BLKmode in return
5140 if the target indicates that a smaller size would be better. */
5141 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5142 }
5143
5144 /* Expand the memory expression LOC and return the appropriate memory operand
5145 for the builtin_sync operations. */
5146
5147 static rtx
5148 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5149 {
5150 rtx addr, mem;
5151
5152 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5153 addr = convert_memory_address (Pmode, addr);
5154
5155 /* Note that we explicitly do not want any alias information for this
5156 memory, so that we kill all other live memories. Otherwise we don't
5157 satisfy the full barrier semantics of the intrinsic. */
5158 mem = validize_mem (gen_rtx_MEM (mode, addr));
5159
5160 /* The alignment needs to be at least according to that of the mode. */
5161 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5162 get_pointer_alignment (loc)));
5163 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5164 MEM_VOLATILE_P (mem) = 1;
5165
5166 return mem;
5167 }
5168
5169 /* Make sure an argument is in the right mode.
5170 EXP is the tree argument.
5171 MODE is the mode it should be in. */
5172
5173 static rtx
5174 expand_expr_force_mode (tree exp, enum machine_mode mode)
5175 {
5176 rtx val;
5177 enum machine_mode old_mode;
5178
5179 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5180 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5181 of CONST_INTs, where we know the old_mode only from the call argument. */
5182
5183 old_mode = GET_MODE (val);
5184 if (old_mode == VOIDmode)
5185 old_mode = TYPE_MODE (TREE_TYPE (exp));
5186 val = convert_modes (mode, old_mode, val, 1);
5187 return val;
5188 }
5189
5190
5191 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5192 EXP is the CALL_EXPR. CODE is the rtx code
5193 that corresponds to the arithmetic or logical operation from the name;
5194 an exception here is that NOT actually means NAND. TARGET is an optional
5195 place for us to store the results; AFTER is true if this is the
5196 fetch_and_xxx form. */
5197
5198 static rtx
5199 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5200 enum rtx_code code, bool after,
5201 rtx target)
5202 {
5203 rtx val, mem;
5204 location_t loc = EXPR_LOCATION (exp);
5205
5206 if (code == NOT && warn_sync_nand)
5207 {
5208 tree fndecl = get_callee_fndecl (exp);
5209 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5210
5211 static bool warned_f_a_n, warned_n_a_f;
5212
5213 switch (fcode)
5214 {
5215 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5216 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5217 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5218 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5219 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5220 if (warned_f_a_n)
5221 break;
5222
5223 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5224 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5225 warned_f_a_n = true;
5226 break;
5227
5228 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5229 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5230 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5231 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5232 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5233 if (warned_n_a_f)
5234 break;
5235
5236 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5237 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5238 warned_n_a_f = true;
5239 break;
5240
5241 default:
5242 gcc_unreachable ();
5243 }
5244 }
5245
5246 /* Expand the operands. */
5247 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5248 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5249
5250 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5251 after);
5252 }
5253
5254 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5255 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5256 true if this is the boolean form. TARGET is a place for us to store the
5257 results; this is NOT optional if IS_BOOL is true. */
5258
5259 static rtx
5260 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5261 bool is_bool, rtx target)
5262 {
5263 rtx old_val, new_val, mem;
5264 rtx *pbool, *poval;
5265
5266 /* Expand the operands. */
5267 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5268 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5269 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5270
5271 pbool = poval = NULL;
5272 if (target != const0_rtx)
5273 {
5274 if (is_bool)
5275 pbool = &target;
5276 else
5277 poval = &target;
5278 }
5279 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5280 false, MEMMODEL_SEQ_CST,
5281 MEMMODEL_SEQ_CST))
5282 return NULL_RTX;
5283
5284 return target;
5285 }
5286
5287 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5288 general form is actually an atomic exchange, and some targets only
5289 support a reduced form with the second argument being a constant 1.
5290 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5291 the results. */
5292
5293 static rtx
5294 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5295 rtx target)
5296 {
5297 rtx val, mem;
5298
5299 /* Expand the operands. */
5300 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5301 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5302
5303 return expand_sync_lock_test_and_set (target, mem, val);
5304 }
5305
5306 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5307
5308 static void
5309 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5310 {
5311 rtx mem;
5312
5313 /* Expand the operands. */
5314 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5315
5316 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5317 }
5318
5319 /* Given an integer representing an ``enum memmodel'', verify its
5320 correctness and return the memory model enum. */
5321
5322 static enum memmodel
5323 get_memmodel (tree exp)
5324 {
5325 rtx op;
5326 unsigned HOST_WIDE_INT val;
5327
5328 /* If the parameter is not a constant, it's a run time value so we'll just
5329 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5330 if (TREE_CODE (exp) != INTEGER_CST)
5331 return MEMMODEL_SEQ_CST;
5332
5333 op = expand_normal (exp);
5334
5335 val = INTVAL (op);
5336 if (targetm.memmodel_check)
5337 val = targetm.memmodel_check (val);
5338 else if (val & ~MEMMODEL_MASK)
5339 {
5340 warning (OPT_Winvalid_memory_model,
5341 "Unknown architecture specifier in memory model to builtin.");
5342 return MEMMODEL_SEQ_CST;
5343 }
5344
5345 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5346 {
5347 warning (OPT_Winvalid_memory_model,
5348 "invalid memory model argument to builtin");
5349 return MEMMODEL_SEQ_CST;
5350 }
5351
5352 return (enum memmodel) val;
5353 }
5354
5355 /* Expand the __atomic_exchange intrinsic:
5356 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5357 EXP is the CALL_EXPR.
5358 TARGET is an optional place for us to store the results. */
5359
5360 static rtx
5361 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5362 {
5363 rtx val, mem;
5364 enum memmodel model;
5365
5366 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5367 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5368 {
5369 error ("invalid memory model for %<__atomic_exchange%>");
5370 return NULL_RTX;
5371 }
5372
5373 if (!flag_inline_atomics)
5374 return NULL_RTX;
5375
5376 /* Expand the operands. */
5377 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5378 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5379
5380 return expand_atomic_exchange (target, mem, val, model);
5381 }
5382
5383 /* Expand the __atomic_compare_exchange intrinsic:
5384 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5385 TYPE desired, BOOL weak,
5386 enum memmodel success,
5387 enum memmodel failure)
5388 EXP is the CALL_EXPR.
5389 TARGET is an optional place for us to store the results. */
5390
5391 static rtx
5392 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5393 rtx target)
5394 {
5395 rtx expect, desired, mem, oldval;
5396 enum memmodel success, failure;
5397 tree weak;
5398 bool is_weak;
5399
5400 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5401 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5402
5403 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5404 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5405 {
5406 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5407 return NULL_RTX;
5408 }
5409
5410 if (failure > success)
5411 {
5412 error ("failure memory model cannot be stronger than success "
5413 "memory model for %<__atomic_compare_exchange%>");
5414 return NULL_RTX;
5415 }
5416
5417 if (!flag_inline_atomics)
5418 return NULL_RTX;
5419
5420 /* Expand the operands. */
5421 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5422
5423 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5424 expect = convert_memory_address (Pmode, expect);
5425 expect = gen_rtx_MEM (mode, expect);
5426 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5427
5428 weak = CALL_EXPR_ARG (exp, 3);
5429 is_weak = false;
5430 if (host_integerp (weak, 0) && tree_low_cst (weak, 0) != 0)
5431 is_weak = true;
5432
5433 oldval = expect;
5434 if (!expand_atomic_compare_and_swap ((target == const0_rtx ? NULL : &target),
5435 &oldval, mem, oldval, desired,
5436 is_weak, success, failure))
5437 return NULL_RTX;
5438
5439 if (oldval != expect)
5440 emit_move_insn (expect, oldval);
5441
5442 return target;
5443 }
5444
5445 /* Expand the __atomic_load intrinsic:
5446 TYPE __atomic_load (TYPE *object, enum memmodel)
5447 EXP is the CALL_EXPR.
5448 TARGET is an optional place for us to store the results. */
5449
5450 static rtx
5451 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5452 {
5453 rtx mem;
5454 enum memmodel model;
5455
5456 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5457 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5458 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5459 {
5460 error ("invalid memory model for %<__atomic_load%>");
5461 return NULL_RTX;
5462 }
5463
5464 if (!flag_inline_atomics)
5465 return NULL_RTX;
5466
5467 /* Expand the operand. */
5468 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5469
5470 return expand_atomic_load (target, mem, model);
5471 }
5472
5473
5474 /* Expand the __atomic_store intrinsic:
5475 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5476 EXP is the CALL_EXPR.
5477 TARGET is an optional place for us to store the results. */
5478
5479 static rtx
5480 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5481 {
5482 rtx mem, val;
5483 enum memmodel model;
5484
5485 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5486 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5487 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5488 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5489 {
5490 error ("invalid memory model for %<__atomic_store%>");
5491 return NULL_RTX;
5492 }
5493
5494 if (!flag_inline_atomics)
5495 return NULL_RTX;
5496
5497 /* Expand the operands. */
5498 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5499 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5500
5501 return expand_atomic_store (mem, val, model, false);
5502 }
5503
5504 /* Expand the __atomic_fetch_XXX intrinsic:
5505 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5506 EXP is the CALL_EXPR.
5507 TARGET is an optional place for us to store the results.
5508 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5509 FETCH_AFTER is true if returning the result of the operation.
5510 FETCH_AFTER is false if returning the value before the operation.
5511 IGNORE is true if the result is not used.
5512 EXT_CALL is the correct builtin for an external call if this cannot be
5513 resolved to an instruction sequence. */
5514
5515 static rtx
5516 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5517 enum rtx_code code, bool fetch_after,
5518 bool ignore, enum built_in_function ext_call)
5519 {
5520 rtx val, mem, ret;
5521 enum memmodel model;
5522 tree fndecl;
5523 tree addr;
5524
5525 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5526
5527 /* Expand the operands. */
5528 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5529 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5530
5531 /* Only try generating instructions if inlining is turned on. */
5532 if (flag_inline_atomics)
5533 {
5534 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5535 if (ret)
5536 return ret;
5537 }
5538
5539 /* Return if a different routine isn't needed for the library call. */
5540 if (ext_call == BUILT_IN_NONE)
5541 return NULL_RTX;
5542
5543 /* Change the call to the specified function. */
5544 fndecl = get_callee_fndecl (exp);
5545 addr = CALL_EXPR_FN (exp);
5546 STRIP_NOPS (addr);
5547
5548 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5549 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5550
5551 /* Expand the call here so we can emit trailing code. */
5552 ret = expand_call (exp, target, ignore);
5553
5554 /* Replace the original function just in case it matters. */
5555 TREE_OPERAND (addr, 0) = fndecl;
5556
5557 /* Then issue the arithmetic correction to return the right result. */
5558 if (!ignore)
5559 {
5560 if (code == NOT)
5561 {
5562 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5563 OPTAB_LIB_WIDEN);
5564 ret = expand_simple_unop (mode, NOT, ret, target, true);
5565 }
5566 else
5567 ret = expand_simple_binop (mode, code, ret, val, target, true,
5568 OPTAB_LIB_WIDEN);
5569 }
5570 return ret;
5571 }
5572
5573
5574 #ifndef HAVE_atomic_clear
5575 # define HAVE_atomic_clear 0
5576 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5577 #endif
5578
5579 /* Expand an atomic clear operation.
5580 void _atomic_clear (BOOL *obj, enum memmodel)
5581 EXP is the call expression. */
5582
5583 static rtx
5584 expand_builtin_atomic_clear (tree exp)
5585 {
5586 enum machine_mode mode;
5587 rtx mem, ret;
5588 enum memmodel model;
5589
5590 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5591 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5592 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5593
5594 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5595 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5596 {
5597 error ("invalid memory model for %<__atomic_store%>");
5598 return const0_rtx;
5599 }
5600
5601 if (HAVE_atomic_clear)
5602 {
5603 emit_insn (gen_atomic_clear (mem, model));
5604 return const0_rtx;
5605 }
5606
5607 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5608 Failing that, a store is issued by __atomic_store. The only way this can
5609 fail is if the bool type is larger than a word size. Unlikely, but
5610 handle it anyway for completeness. Assume a single threaded model since
5611 there is no atomic support in this case, and no barriers are required. */
5612 ret = expand_atomic_store (mem, const0_rtx, model, true);
5613 if (!ret)
5614 emit_move_insn (mem, const0_rtx);
5615 return const0_rtx;
5616 }
5617
5618 /* Expand an atomic test_and_set operation.
5619 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5620 EXP is the call expression. */
5621
5622 static rtx
5623 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5624 {
5625 rtx mem;
5626 enum memmodel model;
5627 enum machine_mode mode;
5628
5629 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5630 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5631 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5632
5633 return expand_atomic_test_and_set (target, mem, model);
5634 }
5635
5636
5637 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5638 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5639
5640 static tree
5641 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5642 {
5643 int size;
5644 enum machine_mode mode;
5645 unsigned int mode_align, type_align;
5646
5647 if (TREE_CODE (arg0) != INTEGER_CST)
5648 return NULL_TREE;
5649
5650 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5651 mode = mode_for_size (size, MODE_INT, 0);
5652 mode_align = GET_MODE_ALIGNMENT (mode);
5653
5654 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5655 type_align = mode_align;
5656 else
5657 {
5658 tree ttype = TREE_TYPE (arg1);
5659
5660 /* This function is usually invoked and folded immediately by the front
5661 end before anything else has a chance to look at it. The pointer
5662 parameter at this point is usually cast to a void *, so check for that
5663 and look past the cast. */
5664 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5665 && VOID_TYPE_P (TREE_TYPE (ttype)))
5666 arg1 = TREE_OPERAND (arg1, 0);
5667
5668 ttype = TREE_TYPE (arg1);
5669 gcc_assert (POINTER_TYPE_P (ttype));
5670
5671 /* Get the underlying type of the object. */
5672 ttype = TREE_TYPE (ttype);
5673 type_align = TYPE_ALIGN (ttype);
5674 }
5675
5676 /* If the object has smaller alignment, the the lock free routines cannot
5677 be used. */
5678 if (type_align < mode_align)
5679 return boolean_false_node;
5680
5681 /* Check if a compare_and_swap pattern exists for the mode which represents
5682 the required size. The pattern is not allowed to fail, so the existence
5683 of the pattern indicates support is present. */
5684 if (can_compare_and_swap_p (mode, true))
5685 return boolean_true_node;
5686 else
5687 return boolean_false_node;
5688 }
5689
5690 /* Return true if the parameters to call EXP represent an object which will
5691 always generate lock free instructions. The first argument represents the
5692 size of the object, and the second parameter is a pointer to the object
5693 itself. If NULL is passed for the object, then the result is based on
5694 typical alignment for an object of the specified size. Otherwise return
5695 false. */
5696
5697 static rtx
5698 expand_builtin_atomic_always_lock_free (tree exp)
5699 {
5700 tree size;
5701 tree arg0 = CALL_EXPR_ARG (exp, 0);
5702 tree arg1 = CALL_EXPR_ARG (exp, 1);
5703
5704 if (TREE_CODE (arg0) != INTEGER_CST)
5705 {
5706 error ("non-constant argument 1 to __atomic_always_lock_free");
5707 return const0_rtx;
5708 }
5709
5710 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5711 if (size == boolean_true_node)
5712 return const1_rtx;
5713 return const0_rtx;
5714 }
5715
5716 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5717 is lock free on this architecture. */
5718
5719 static tree
5720 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5721 {
5722 if (!flag_inline_atomics)
5723 return NULL_TREE;
5724
5725 /* If it isn't always lock free, don't generate a result. */
5726 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5727 return boolean_true_node;
5728
5729 return NULL_TREE;
5730 }
5731
5732 /* Return true if the parameters to call EXP represent an object which will
5733 always generate lock free instructions. The first argument represents the
5734 size of the object, and the second parameter is a pointer to the object
5735 itself. If NULL is passed for the object, then the result is based on
5736 typical alignment for an object of the specified size. Otherwise return
5737 NULL*/
5738
5739 static rtx
5740 expand_builtin_atomic_is_lock_free (tree exp)
5741 {
5742 tree size;
5743 tree arg0 = CALL_EXPR_ARG (exp, 0);
5744 tree arg1 = CALL_EXPR_ARG (exp, 1);
5745
5746 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5747 {
5748 error ("non-integer argument 1 to __atomic_is_lock_free");
5749 return NULL_RTX;
5750 }
5751
5752 if (!flag_inline_atomics)
5753 return NULL_RTX;
5754
5755 /* If the value is known at compile time, return the RTX for it. */
5756 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5757 if (size == boolean_true_node)
5758 return const1_rtx;
5759
5760 return NULL_RTX;
5761 }
5762
5763 /* Expand the __atomic_thread_fence intrinsic:
5764 void __atomic_thread_fence (enum memmodel)
5765 EXP is the CALL_EXPR. */
5766
5767 static void
5768 expand_builtin_atomic_thread_fence (tree exp)
5769 {
5770 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5771 expand_mem_thread_fence (model);
5772 }
5773
5774 /* Expand the __atomic_signal_fence intrinsic:
5775 void __atomic_signal_fence (enum memmodel)
5776 EXP is the CALL_EXPR. */
5777
5778 static void
5779 expand_builtin_atomic_signal_fence (tree exp)
5780 {
5781 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5782 expand_mem_signal_fence (model);
5783 }
5784
5785 /* Expand the __sync_synchronize intrinsic. */
5786
5787 static void
5788 expand_builtin_sync_synchronize (void)
5789 {
5790 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5791 }
5792
5793 static rtx
5794 expand_builtin_thread_pointer (tree exp, rtx target)
5795 {
5796 enum insn_code icode;
5797 if (!validate_arglist (exp, VOID_TYPE))
5798 return const0_rtx;
5799 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5800 if (icode != CODE_FOR_nothing)
5801 {
5802 struct expand_operand op;
5803 if (!REG_P (target) || GET_MODE (target) != Pmode)
5804 target = gen_reg_rtx (Pmode);
5805 create_output_operand (&op, target, Pmode);
5806 expand_insn (icode, 1, &op);
5807 return target;
5808 }
5809 error ("__builtin_thread_pointer is not supported on this target");
5810 return const0_rtx;
5811 }
5812
5813 static void
5814 expand_builtin_set_thread_pointer (tree exp)
5815 {
5816 enum insn_code icode;
5817 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5818 return;
5819 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5820 if (icode != CODE_FOR_nothing)
5821 {
5822 struct expand_operand op;
5823 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5824 Pmode, EXPAND_NORMAL);
5825 create_input_operand (&op, val, Pmode);
5826 expand_insn (icode, 1, &op);
5827 return;
5828 }
5829 error ("__builtin_set_thread_pointer is not supported on this target");
5830 }
5831
5832 \f
5833 /* Expand an expression EXP that calls a built-in function,
5834 with result going to TARGET if that's convenient
5835 (and in mode MODE if that's convenient).
5836 SUBTARGET may be used as the target for computing one of EXP's operands.
5837 IGNORE is nonzero if the value is to be ignored. */
5838
5839 rtx
5840 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5841 int ignore)
5842 {
5843 tree fndecl = get_callee_fndecl (exp);
5844 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5845 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5846 int flags;
5847
5848 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5849 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5850
5851 /* When not optimizing, generate calls to library functions for a certain
5852 set of builtins. */
5853 if (!optimize
5854 && !called_as_built_in (fndecl)
5855 && fcode != BUILT_IN_FORK
5856 && fcode != BUILT_IN_EXECL
5857 && fcode != BUILT_IN_EXECV
5858 && fcode != BUILT_IN_EXECLP
5859 && fcode != BUILT_IN_EXECLE
5860 && fcode != BUILT_IN_EXECVP
5861 && fcode != BUILT_IN_EXECVE
5862 && fcode != BUILT_IN_ALLOCA
5863 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5864 && fcode != BUILT_IN_FREE)
5865 return expand_call (exp, target, ignore);
5866
5867 /* The built-in function expanders test for target == const0_rtx
5868 to determine whether the function's result will be ignored. */
5869 if (ignore)
5870 target = const0_rtx;
5871
5872 /* If the result of a pure or const built-in function is ignored, and
5873 none of its arguments are volatile, we can avoid expanding the
5874 built-in call and just evaluate the arguments for side-effects. */
5875 if (target == const0_rtx
5876 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5877 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5878 {
5879 bool volatilep = false;
5880 tree arg;
5881 call_expr_arg_iterator iter;
5882
5883 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5884 if (TREE_THIS_VOLATILE (arg))
5885 {
5886 volatilep = true;
5887 break;
5888 }
5889
5890 if (! volatilep)
5891 {
5892 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5893 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5894 return const0_rtx;
5895 }
5896 }
5897
5898 switch (fcode)
5899 {
5900 CASE_FLT_FN (BUILT_IN_FABS):
5901 case BUILT_IN_FABSD32:
5902 case BUILT_IN_FABSD64:
5903 case BUILT_IN_FABSD128:
5904 target = expand_builtin_fabs (exp, target, subtarget);
5905 if (target)
5906 return target;
5907 break;
5908
5909 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5910 target = expand_builtin_copysign (exp, target, subtarget);
5911 if (target)
5912 return target;
5913 break;
5914
5915 /* Just do a normal library call if we were unable to fold
5916 the values. */
5917 CASE_FLT_FN (BUILT_IN_CABS):
5918 break;
5919
5920 CASE_FLT_FN (BUILT_IN_EXP):
5921 CASE_FLT_FN (BUILT_IN_EXP10):
5922 CASE_FLT_FN (BUILT_IN_POW10):
5923 CASE_FLT_FN (BUILT_IN_EXP2):
5924 CASE_FLT_FN (BUILT_IN_EXPM1):
5925 CASE_FLT_FN (BUILT_IN_LOGB):
5926 CASE_FLT_FN (BUILT_IN_LOG):
5927 CASE_FLT_FN (BUILT_IN_LOG10):
5928 CASE_FLT_FN (BUILT_IN_LOG2):
5929 CASE_FLT_FN (BUILT_IN_LOG1P):
5930 CASE_FLT_FN (BUILT_IN_TAN):
5931 CASE_FLT_FN (BUILT_IN_ASIN):
5932 CASE_FLT_FN (BUILT_IN_ACOS):
5933 CASE_FLT_FN (BUILT_IN_ATAN):
5934 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5935 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5936 because of possible accuracy problems. */
5937 if (! flag_unsafe_math_optimizations)
5938 break;
5939 CASE_FLT_FN (BUILT_IN_SQRT):
5940 CASE_FLT_FN (BUILT_IN_FLOOR):
5941 CASE_FLT_FN (BUILT_IN_CEIL):
5942 CASE_FLT_FN (BUILT_IN_TRUNC):
5943 CASE_FLT_FN (BUILT_IN_ROUND):
5944 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5945 CASE_FLT_FN (BUILT_IN_RINT):
5946 target = expand_builtin_mathfn (exp, target, subtarget);
5947 if (target)
5948 return target;
5949 break;
5950
5951 CASE_FLT_FN (BUILT_IN_FMA):
5952 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5953 if (target)
5954 return target;
5955 break;
5956
5957 CASE_FLT_FN (BUILT_IN_ILOGB):
5958 if (! flag_unsafe_math_optimizations)
5959 break;
5960 CASE_FLT_FN (BUILT_IN_ISINF):
5961 CASE_FLT_FN (BUILT_IN_FINITE):
5962 case BUILT_IN_ISFINITE:
5963 case BUILT_IN_ISNORMAL:
5964 target = expand_builtin_interclass_mathfn (exp, target);
5965 if (target)
5966 return target;
5967 break;
5968
5969 CASE_FLT_FN (BUILT_IN_ICEIL):
5970 CASE_FLT_FN (BUILT_IN_LCEIL):
5971 CASE_FLT_FN (BUILT_IN_LLCEIL):
5972 CASE_FLT_FN (BUILT_IN_LFLOOR):
5973 CASE_FLT_FN (BUILT_IN_IFLOOR):
5974 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5975 target = expand_builtin_int_roundingfn (exp, target);
5976 if (target)
5977 return target;
5978 break;
5979
5980 CASE_FLT_FN (BUILT_IN_IRINT):
5981 CASE_FLT_FN (BUILT_IN_LRINT):
5982 CASE_FLT_FN (BUILT_IN_LLRINT):
5983 CASE_FLT_FN (BUILT_IN_IROUND):
5984 CASE_FLT_FN (BUILT_IN_LROUND):
5985 CASE_FLT_FN (BUILT_IN_LLROUND):
5986 target = expand_builtin_int_roundingfn_2 (exp, target);
5987 if (target)
5988 return target;
5989 break;
5990
5991 CASE_FLT_FN (BUILT_IN_POWI):
5992 target = expand_builtin_powi (exp, target);
5993 if (target)
5994 return target;
5995 break;
5996
5997 CASE_FLT_FN (BUILT_IN_ATAN2):
5998 CASE_FLT_FN (BUILT_IN_LDEXP):
5999 CASE_FLT_FN (BUILT_IN_SCALB):
6000 CASE_FLT_FN (BUILT_IN_SCALBN):
6001 CASE_FLT_FN (BUILT_IN_SCALBLN):
6002 if (! flag_unsafe_math_optimizations)
6003 break;
6004
6005 CASE_FLT_FN (BUILT_IN_FMOD):
6006 CASE_FLT_FN (BUILT_IN_REMAINDER):
6007 CASE_FLT_FN (BUILT_IN_DREM):
6008 CASE_FLT_FN (BUILT_IN_POW):
6009 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6010 if (target)
6011 return target;
6012 break;
6013
6014 CASE_FLT_FN (BUILT_IN_CEXPI):
6015 target = expand_builtin_cexpi (exp, target);
6016 gcc_assert (target);
6017 return target;
6018
6019 CASE_FLT_FN (BUILT_IN_SIN):
6020 CASE_FLT_FN (BUILT_IN_COS):
6021 if (! flag_unsafe_math_optimizations)
6022 break;
6023 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6024 if (target)
6025 return target;
6026 break;
6027
6028 CASE_FLT_FN (BUILT_IN_SINCOS):
6029 if (! flag_unsafe_math_optimizations)
6030 break;
6031 target = expand_builtin_sincos (exp);
6032 if (target)
6033 return target;
6034 break;
6035
6036 case BUILT_IN_APPLY_ARGS:
6037 return expand_builtin_apply_args ();
6038
6039 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6040 FUNCTION with a copy of the parameters described by
6041 ARGUMENTS, and ARGSIZE. It returns a block of memory
6042 allocated on the stack into which is stored all the registers
6043 that might possibly be used for returning the result of a
6044 function. ARGUMENTS is the value returned by
6045 __builtin_apply_args. ARGSIZE is the number of bytes of
6046 arguments that must be copied. ??? How should this value be
6047 computed? We'll also need a safe worst case value for varargs
6048 functions. */
6049 case BUILT_IN_APPLY:
6050 if (!validate_arglist (exp, POINTER_TYPE,
6051 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6052 && !validate_arglist (exp, REFERENCE_TYPE,
6053 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6054 return const0_rtx;
6055 else
6056 {
6057 rtx ops[3];
6058
6059 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6060 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6061 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6062
6063 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6064 }
6065
6066 /* __builtin_return (RESULT) causes the function to return the
6067 value described by RESULT. RESULT is address of the block of
6068 memory returned by __builtin_apply. */
6069 case BUILT_IN_RETURN:
6070 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6071 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6072 return const0_rtx;
6073
6074 case BUILT_IN_SAVEREGS:
6075 return expand_builtin_saveregs ();
6076
6077 case BUILT_IN_VA_ARG_PACK:
6078 /* All valid uses of __builtin_va_arg_pack () are removed during
6079 inlining. */
6080 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6081 return const0_rtx;
6082
6083 case BUILT_IN_VA_ARG_PACK_LEN:
6084 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6085 inlining. */
6086 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6087 return const0_rtx;
6088
6089 /* Return the address of the first anonymous stack arg. */
6090 case BUILT_IN_NEXT_ARG:
6091 if (fold_builtin_next_arg (exp, false))
6092 return const0_rtx;
6093 return expand_builtin_next_arg ();
6094
6095 case BUILT_IN_CLEAR_CACHE:
6096 target = expand_builtin___clear_cache (exp);
6097 if (target)
6098 return target;
6099 break;
6100
6101 case BUILT_IN_CLASSIFY_TYPE:
6102 return expand_builtin_classify_type (exp);
6103
6104 case BUILT_IN_CONSTANT_P:
6105 return const0_rtx;
6106
6107 case BUILT_IN_FRAME_ADDRESS:
6108 case BUILT_IN_RETURN_ADDRESS:
6109 return expand_builtin_frame_address (fndecl, exp);
6110
6111 /* Returns the address of the area where the structure is returned.
6112 0 otherwise. */
6113 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6114 if (call_expr_nargs (exp) != 0
6115 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6116 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6117 return const0_rtx;
6118 else
6119 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6120
6121 case BUILT_IN_ALLOCA:
6122 case BUILT_IN_ALLOCA_WITH_ALIGN:
6123 /* If the allocation stems from the declaration of a variable-sized
6124 object, it cannot accumulate. */
6125 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6126 if (target)
6127 return target;
6128 break;
6129
6130 case BUILT_IN_STACK_SAVE:
6131 return expand_stack_save ();
6132
6133 case BUILT_IN_STACK_RESTORE:
6134 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6135 return const0_rtx;
6136
6137 case BUILT_IN_BSWAP16:
6138 case BUILT_IN_BSWAP32:
6139 case BUILT_IN_BSWAP64:
6140 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6141 if (target)
6142 return target;
6143 break;
6144
6145 CASE_INT_FN (BUILT_IN_FFS):
6146 target = expand_builtin_unop (target_mode, exp, target,
6147 subtarget, ffs_optab);
6148 if (target)
6149 return target;
6150 break;
6151
6152 CASE_INT_FN (BUILT_IN_CLZ):
6153 target = expand_builtin_unop (target_mode, exp, target,
6154 subtarget, clz_optab);
6155 if (target)
6156 return target;
6157 break;
6158
6159 CASE_INT_FN (BUILT_IN_CTZ):
6160 target = expand_builtin_unop (target_mode, exp, target,
6161 subtarget, ctz_optab);
6162 if (target)
6163 return target;
6164 break;
6165
6166 CASE_INT_FN (BUILT_IN_CLRSB):
6167 target = expand_builtin_unop (target_mode, exp, target,
6168 subtarget, clrsb_optab);
6169 if (target)
6170 return target;
6171 break;
6172
6173 CASE_INT_FN (BUILT_IN_POPCOUNT):
6174 target = expand_builtin_unop (target_mode, exp, target,
6175 subtarget, popcount_optab);
6176 if (target)
6177 return target;
6178 break;
6179
6180 CASE_INT_FN (BUILT_IN_PARITY):
6181 target = expand_builtin_unop (target_mode, exp, target,
6182 subtarget, parity_optab);
6183 if (target)
6184 return target;
6185 break;
6186
6187 case BUILT_IN_STRLEN:
6188 target = expand_builtin_strlen (exp, target, target_mode);
6189 if (target)
6190 return target;
6191 break;
6192
6193 case BUILT_IN_STRCPY:
6194 target = expand_builtin_strcpy (exp, target);
6195 if (target)
6196 return target;
6197 break;
6198
6199 case BUILT_IN_STRNCPY:
6200 target = expand_builtin_strncpy (exp, target);
6201 if (target)
6202 return target;
6203 break;
6204
6205 case BUILT_IN_STPCPY:
6206 target = expand_builtin_stpcpy (exp, target, mode);
6207 if (target)
6208 return target;
6209 break;
6210
6211 case BUILT_IN_MEMCPY:
6212 target = expand_builtin_memcpy (exp, target);
6213 if (target)
6214 return target;
6215 break;
6216
6217 case BUILT_IN_MEMPCPY:
6218 target = expand_builtin_mempcpy (exp, target, mode);
6219 if (target)
6220 return target;
6221 break;
6222
6223 case BUILT_IN_MEMSET:
6224 target = expand_builtin_memset (exp, target, mode);
6225 if (target)
6226 return target;
6227 break;
6228
6229 case BUILT_IN_BZERO:
6230 target = expand_builtin_bzero (exp);
6231 if (target)
6232 return target;
6233 break;
6234
6235 case BUILT_IN_STRCMP:
6236 target = expand_builtin_strcmp (exp, target);
6237 if (target)
6238 return target;
6239 break;
6240
6241 case BUILT_IN_STRNCMP:
6242 target = expand_builtin_strncmp (exp, target, mode);
6243 if (target)
6244 return target;
6245 break;
6246
6247 case BUILT_IN_BCMP:
6248 case BUILT_IN_MEMCMP:
6249 target = expand_builtin_memcmp (exp, target, mode);
6250 if (target)
6251 return target;
6252 break;
6253
6254 case BUILT_IN_SETJMP:
6255 /* This should have been lowered to the builtins below. */
6256 gcc_unreachable ();
6257
6258 case BUILT_IN_SETJMP_SETUP:
6259 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6260 and the receiver label. */
6261 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6262 {
6263 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6264 VOIDmode, EXPAND_NORMAL);
6265 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6266 rtx label_r = label_rtx (label);
6267
6268 /* This is copied from the handling of non-local gotos. */
6269 expand_builtin_setjmp_setup (buf_addr, label_r);
6270 nonlocal_goto_handler_labels
6271 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6272 nonlocal_goto_handler_labels);
6273 /* ??? Do not let expand_label treat us as such since we would
6274 not want to be both on the list of non-local labels and on
6275 the list of forced labels. */
6276 FORCED_LABEL (label) = 0;
6277 return const0_rtx;
6278 }
6279 break;
6280
6281 case BUILT_IN_SETJMP_DISPATCHER:
6282 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6283 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6284 {
6285 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6286 rtx label_r = label_rtx (label);
6287
6288 /* Remove the dispatcher label from the list of non-local labels
6289 since the receiver labels have been added to it above. */
6290 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6291 return const0_rtx;
6292 }
6293 break;
6294
6295 case BUILT_IN_SETJMP_RECEIVER:
6296 /* __builtin_setjmp_receiver is passed the receiver label. */
6297 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6298 {
6299 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6300 rtx label_r = label_rtx (label);
6301
6302 expand_builtin_setjmp_receiver (label_r);
6303 return const0_rtx;
6304 }
6305 break;
6306
6307 /* __builtin_longjmp is passed a pointer to an array of five words.
6308 It's similar to the C library longjmp function but works with
6309 __builtin_setjmp above. */
6310 case BUILT_IN_LONGJMP:
6311 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6312 {
6313 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6314 VOIDmode, EXPAND_NORMAL);
6315 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6316
6317 if (value != const1_rtx)
6318 {
6319 error ("%<__builtin_longjmp%> second argument must be 1");
6320 return const0_rtx;
6321 }
6322
6323 expand_builtin_longjmp (buf_addr, value);
6324 return const0_rtx;
6325 }
6326 break;
6327
6328 case BUILT_IN_NONLOCAL_GOTO:
6329 target = expand_builtin_nonlocal_goto (exp);
6330 if (target)
6331 return target;
6332 break;
6333
6334 /* This updates the setjmp buffer that is its argument with the value
6335 of the current stack pointer. */
6336 case BUILT_IN_UPDATE_SETJMP_BUF:
6337 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6338 {
6339 rtx buf_addr
6340 = expand_normal (CALL_EXPR_ARG (exp, 0));
6341
6342 expand_builtin_update_setjmp_buf (buf_addr);
6343 return const0_rtx;
6344 }
6345 break;
6346
6347 case BUILT_IN_TRAP:
6348 expand_builtin_trap ();
6349 return const0_rtx;
6350
6351 case BUILT_IN_UNREACHABLE:
6352 expand_builtin_unreachable ();
6353 return const0_rtx;
6354
6355 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6356 case BUILT_IN_SIGNBITD32:
6357 case BUILT_IN_SIGNBITD64:
6358 case BUILT_IN_SIGNBITD128:
6359 target = expand_builtin_signbit (exp, target);
6360 if (target)
6361 return target;
6362 break;
6363
6364 /* Various hooks for the DWARF 2 __throw routine. */
6365 case BUILT_IN_UNWIND_INIT:
6366 expand_builtin_unwind_init ();
6367 return const0_rtx;
6368 case BUILT_IN_DWARF_CFA:
6369 return virtual_cfa_rtx;
6370 #ifdef DWARF2_UNWIND_INFO
6371 case BUILT_IN_DWARF_SP_COLUMN:
6372 return expand_builtin_dwarf_sp_column ();
6373 case BUILT_IN_INIT_DWARF_REG_SIZES:
6374 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6375 return const0_rtx;
6376 #endif
6377 case BUILT_IN_FROB_RETURN_ADDR:
6378 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6379 case BUILT_IN_EXTRACT_RETURN_ADDR:
6380 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6381 case BUILT_IN_EH_RETURN:
6382 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6383 CALL_EXPR_ARG (exp, 1));
6384 return const0_rtx;
6385 #ifdef EH_RETURN_DATA_REGNO
6386 case BUILT_IN_EH_RETURN_DATA_REGNO:
6387 return expand_builtin_eh_return_data_regno (exp);
6388 #endif
6389 case BUILT_IN_EXTEND_POINTER:
6390 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6391 case BUILT_IN_EH_POINTER:
6392 return expand_builtin_eh_pointer (exp);
6393 case BUILT_IN_EH_FILTER:
6394 return expand_builtin_eh_filter (exp);
6395 case BUILT_IN_EH_COPY_VALUES:
6396 return expand_builtin_eh_copy_values (exp);
6397
6398 case BUILT_IN_VA_START:
6399 return expand_builtin_va_start (exp);
6400 case BUILT_IN_VA_END:
6401 return expand_builtin_va_end (exp);
6402 case BUILT_IN_VA_COPY:
6403 return expand_builtin_va_copy (exp);
6404 case BUILT_IN_EXPECT:
6405 return expand_builtin_expect (exp, target);
6406 case BUILT_IN_ASSUME_ALIGNED:
6407 return expand_builtin_assume_aligned (exp, target);
6408 case BUILT_IN_PREFETCH:
6409 expand_builtin_prefetch (exp);
6410 return const0_rtx;
6411
6412 case BUILT_IN_INIT_TRAMPOLINE:
6413 return expand_builtin_init_trampoline (exp, true);
6414 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6415 return expand_builtin_init_trampoline (exp, false);
6416 case BUILT_IN_ADJUST_TRAMPOLINE:
6417 return expand_builtin_adjust_trampoline (exp);
6418
6419 case BUILT_IN_FORK:
6420 case BUILT_IN_EXECL:
6421 case BUILT_IN_EXECV:
6422 case BUILT_IN_EXECLP:
6423 case BUILT_IN_EXECLE:
6424 case BUILT_IN_EXECVP:
6425 case BUILT_IN_EXECVE:
6426 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6427 if (target)
6428 return target;
6429 break;
6430
6431 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6432 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6433 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6434 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6435 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6436 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6437 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6438 if (target)
6439 return target;
6440 break;
6441
6442 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6443 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6444 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6445 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6446 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6447 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6448 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6449 if (target)
6450 return target;
6451 break;
6452
6453 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6454 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6455 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6456 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6457 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6458 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6459 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6460 if (target)
6461 return target;
6462 break;
6463
6464 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6465 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6466 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6467 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6468 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6469 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6470 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6471 if (target)
6472 return target;
6473 break;
6474
6475 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6476 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6477 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6478 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6479 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6480 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6481 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6482 if (target)
6483 return target;
6484 break;
6485
6486 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6487 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6488 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6489 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6490 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6491 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6492 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6493 if (target)
6494 return target;
6495 break;
6496
6497 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6498 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6499 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6500 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6501 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6502 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6503 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6504 if (target)
6505 return target;
6506 break;
6507
6508 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6509 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6510 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6511 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6512 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6513 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6514 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6515 if (target)
6516 return target;
6517 break;
6518
6519 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6520 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6521 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6522 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6523 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6524 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6525 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6526 if (target)
6527 return target;
6528 break;
6529
6530 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6531 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6532 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6533 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6534 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6535 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6536 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6537 if (target)
6538 return target;
6539 break;
6540
6541 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6542 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6543 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6544 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6545 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6546 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6547 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6548 if (target)
6549 return target;
6550 break;
6551
6552 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6553 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6554 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6555 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6556 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6557 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6558 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6559 if (target)
6560 return target;
6561 break;
6562
6563 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6564 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6565 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6566 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6567 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6568 if (mode == VOIDmode)
6569 mode = TYPE_MODE (boolean_type_node);
6570 if (!target || !register_operand (target, mode))
6571 target = gen_reg_rtx (mode);
6572
6573 mode = get_builtin_sync_mode
6574 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6575 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6576 if (target)
6577 return target;
6578 break;
6579
6580 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6581 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6582 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6583 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6584 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6585 mode = get_builtin_sync_mode
6586 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6587 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6588 if (target)
6589 return target;
6590 break;
6591
6592 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6593 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6594 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6595 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6596 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6597 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6598 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6599 if (target)
6600 return target;
6601 break;
6602
6603 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6604 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6605 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6606 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6607 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6608 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6609 expand_builtin_sync_lock_release (mode, exp);
6610 return const0_rtx;
6611
6612 case BUILT_IN_SYNC_SYNCHRONIZE:
6613 expand_builtin_sync_synchronize ();
6614 return const0_rtx;
6615
6616 case BUILT_IN_ATOMIC_EXCHANGE_1:
6617 case BUILT_IN_ATOMIC_EXCHANGE_2:
6618 case BUILT_IN_ATOMIC_EXCHANGE_4:
6619 case BUILT_IN_ATOMIC_EXCHANGE_8:
6620 case BUILT_IN_ATOMIC_EXCHANGE_16:
6621 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6622 target = expand_builtin_atomic_exchange (mode, exp, target);
6623 if (target)
6624 return target;
6625 break;
6626
6627 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6628 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6629 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6630 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6631 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6632 {
6633 unsigned int nargs, z;
6634 vec<tree, va_gc> *vec;
6635
6636 mode =
6637 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6638 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6639 if (target)
6640 return target;
6641
6642 /* If this is turned into an external library call, the weak parameter
6643 must be dropped to match the expected parameter list. */
6644 nargs = call_expr_nargs (exp);
6645 vec_alloc (vec, nargs - 1);
6646 for (z = 0; z < 3; z++)
6647 vec->quick_push (CALL_EXPR_ARG (exp, z));
6648 /* Skip the boolean weak parameter. */
6649 for (z = 4; z < 6; z++)
6650 vec->quick_push (CALL_EXPR_ARG (exp, z));
6651 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6652 break;
6653 }
6654
6655 case BUILT_IN_ATOMIC_LOAD_1:
6656 case BUILT_IN_ATOMIC_LOAD_2:
6657 case BUILT_IN_ATOMIC_LOAD_4:
6658 case BUILT_IN_ATOMIC_LOAD_8:
6659 case BUILT_IN_ATOMIC_LOAD_16:
6660 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6661 target = expand_builtin_atomic_load (mode, exp, target);
6662 if (target)
6663 return target;
6664 break;
6665
6666 case BUILT_IN_ATOMIC_STORE_1:
6667 case BUILT_IN_ATOMIC_STORE_2:
6668 case BUILT_IN_ATOMIC_STORE_4:
6669 case BUILT_IN_ATOMIC_STORE_8:
6670 case BUILT_IN_ATOMIC_STORE_16:
6671 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6672 target = expand_builtin_atomic_store (mode, exp);
6673 if (target)
6674 return const0_rtx;
6675 break;
6676
6677 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6678 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6679 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6680 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6681 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6682 {
6683 enum built_in_function lib;
6684 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6685 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6686 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6687 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6688 ignore, lib);
6689 if (target)
6690 return target;
6691 break;
6692 }
6693 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6694 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6695 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6696 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6697 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6698 {
6699 enum built_in_function lib;
6700 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6701 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6702 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6703 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6704 ignore, lib);
6705 if (target)
6706 return target;
6707 break;
6708 }
6709 case BUILT_IN_ATOMIC_AND_FETCH_1:
6710 case BUILT_IN_ATOMIC_AND_FETCH_2:
6711 case BUILT_IN_ATOMIC_AND_FETCH_4:
6712 case BUILT_IN_ATOMIC_AND_FETCH_8:
6713 case BUILT_IN_ATOMIC_AND_FETCH_16:
6714 {
6715 enum built_in_function lib;
6716 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6717 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6718 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6719 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6720 ignore, lib);
6721 if (target)
6722 return target;
6723 break;
6724 }
6725 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6726 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6727 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6728 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6729 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6730 {
6731 enum built_in_function lib;
6732 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6733 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6734 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6735 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6736 ignore, lib);
6737 if (target)
6738 return target;
6739 break;
6740 }
6741 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6742 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6743 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6744 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6745 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6746 {
6747 enum built_in_function lib;
6748 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6749 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6750 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6751 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6752 ignore, lib);
6753 if (target)
6754 return target;
6755 break;
6756 }
6757 case BUILT_IN_ATOMIC_OR_FETCH_1:
6758 case BUILT_IN_ATOMIC_OR_FETCH_2:
6759 case BUILT_IN_ATOMIC_OR_FETCH_4:
6760 case BUILT_IN_ATOMIC_OR_FETCH_8:
6761 case BUILT_IN_ATOMIC_OR_FETCH_16:
6762 {
6763 enum built_in_function lib;
6764 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6765 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6766 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6767 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6768 ignore, lib);
6769 if (target)
6770 return target;
6771 break;
6772 }
6773 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6774 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6775 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6776 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6777 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6778 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6779 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6780 ignore, BUILT_IN_NONE);
6781 if (target)
6782 return target;
6783 break;
6784
6785 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6786 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6787 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6788 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6789 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6790 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6791 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6792 ignore, BUILT_IN_NONE);
6793 if (target)
6794 return target;
6795 break;
6796
6797 case BUILT_IN_ATOMIC_FETCH_AND_1:
6798 case BUILT_IN_ATOMIC_FETCH_AND_2:
6799 case BUILT_IN_ATOMIC_FETCH_AND_4:
6800 case BUILT_IN_ATOMIC_FETCH_AND_8:
6801 case BUILT_IN_ATOMIC_FETCH_AND_16:
6802 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6803 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6804 ignore, BUILT_IN_NONE);
6805 if (target)
6806 return target;
6807 break;
6808
6809 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6810 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6811 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6812 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6813 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6814 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6815 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6816 ignore, BUILT_IN_NONE);
6817 if (target)
6818 return target;
6819 break;
6820
6821 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6822 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6823 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6824 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6825 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6826 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6827 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6828 ignore, BUILT_IN_NONE);
6829 if (target)
6830 return target;
6831 break;
6832
6833 case BUILT_IN_ATOMIC_FETCH_OR_1:
6834 case BUILT_IN_ATOMIC_FETCH_OR_2:
6835 case BUILT_IN_ATOMIC_FETCH_OR_4:
6836 case BUILT_IN_ATOMIC_FETCH_OR_8:
6837 case BUILT_IN_ATOMIC_FETCH_OR_16:
6838 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6839 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6840 ignore, BUILT_IN_NONE);
6841 if (target)
6842 return target;
6843 break;
6844
6845 case BUILT_IN_ATOMIC_TEST_AND_SET:
6846 return expand_builtin_atomic_test_and_set (exp, target);
6847
6848 case BUILT_IN_ATOMIC_CLEAR:
6849 return expand_builtin_atomic_clear (exp);
6850
6851 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6852 return expand_builtin_atomic_always_lock_free (exp);
6853
6854 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6855 target = expand_builtin_atomic_is_lock_free (exp);
6856 if (target)
6857 return target;
6858 break;
6859
6860 case BUILT_IN_ATOMIC_THREAD_FENCE:
6861 expand_builtin_atomic_thread_fence (exp);
6862 return const0_rtx;
6863
6864 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6865 expand_builtin_atomic_signal_fence (exp);
6866 return const0_rtx;
6867
6868 case BUILT_IN_OBJECT_SIZE:
6869 return expand_builtin_object_size (exp);
6870
6871 case BUILT_IN_MEMCPY_CHK:
6872 case BUILT_IN_MEMPCPY_CHK:
6873 case BUILT_IN_MEMMOVE_CHK:
6874 case BUILT_IN_MEMSET_CHK:
6875 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6876 if (target)
6877 return target;
6878 break;
6879
6880 case BUILT_IN_STRCPY_CHK:
6881 case BUILT_IN_STPCPY_CHK:
6882 case BUILT_IN_STRNCPY_CHK:
6883 case BUILT_IN_STPNCPY_CHK:
6884 case BUILT_IN_STRCAT_CHK:
6885 case BUILT_IN_STRNCAT_CHK:
6886 case BUILT_IN_SNPRINTF_CHK:
6887 case BUILT_IN_VSNPRINTF_CHK:
6888 maybe_emit_chk_warning (exp, fcode);
6889 break;
6890
6891 case BUILT_IN_SPRINTF_CHK:
6892 case BUILT_IN_VSPRINTF_CHK:
6893 maybe_emit_sprintf_chk_warning (exp, fcode);
6894 break;
6895
6896 case BUILT_IN_FREE:
6897 if (warn_free_nonheap_object)
6898 maybe_emit_free_warning (exp);
6899 break;
6900
6901 case BUILT_IN_THREAD_POINTER:
6902 return expand_builtin_thread_pointer (exp, target);
6903
6904 case BUILT_IN_SET_THREAD_POINTER:
6905 expand_builtin_set_thread_pointer (exp);
6906 return const0_rtx;
6907
6908 default: /* just do library call, if unknown builtin */
6909 break;
6910 }
6911
6912 /* The switch statement above can drop through to cause the function
6913 to be called normally. */
6914 return expand_call (exp, target, ignore);
6915 }
6916
6917 /* Determine whether a tree node represents a call to a built-in
6918 function. If the tree T is a call to a built-in function with
6919 the right number of arguments of the appropriate types, return
6920 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6921 Otherwise the return value is END_BUILTINS. */
6922
6923 enum built_in_function
6924 builtin_mathfn_code (const_tree t)
6925 {
6926 const_tree fndecl, arg, parmlist;
6927 const_tree argtype, parmtype;
6928 const_call_expr_arg_iterator iter;
6929
6930 if (TREE_CODE (t) != CALL_EXPR
6931 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6932 return END_BUILTINS;
6933
6934 fndecl = get_callee_fndecl (t);
6935 if (fndecl == NULL_TREE
6936 || TREE_CODE (fndecl) != FUNCTION_DECL
6937 || ! DECL_BUILT_IN (fndecl)
6938 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6939 return END_BUILTINS;
6940
6941 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6942 init_const_call_expr_arg_iterator (t, &iter);
6943 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6944 {
6945 /* If a function doesn't take a variable number of arguments,
6946 the last element in the list will have type `void'. */
6947 parmtype = TREE_VALUE (parmlist);
6948 if (VOID_TYPE_P (parmtype))
6949 {
6950 if (more_const_call_expr_args_p (&iter))
6951 return END_BUILTINS;
6952 return DECL_FUNCTION_CODE (fndecl);
6953 }
6954
6955 if (! more_const_call_expr_args_p (&iter))
6956 return END_BUILTINS;
6957
6958 arg = next_const_call_expr_arg (&iter);
6959 argtype = TREE_TYPE (arg);
6960
6961 if (SCALAR_FLOAT_TYPE_P (parmtype))
6962 {
6963 if (! SCALAR_FLOAT_TYPE_P (argtype))
6964 return END_BUILTINS;
6965 }
6966 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6967 {
6968 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6969 return END_BUILTINS;
6970 }
6971 else if (POINTER_TYPE_P (parmtype))
6972 {
6973 if (! POINTER_TYPE_P (argtype))
6974 return END_BUILTINS;
6975 }
6976 else if (INTEGRAL_TYPE_P (parmtype))
6977 {
6978 if (! INTEGRAL_TYPE_P (argtype))
6979 return END_BUILTINS;
6980 }
6981 else
6982 return END_BUILTINS;
6983 }
6984
6985 /* Variable-length argument list. */
6986 return DECL_FUNCTION_CODE (fndecl);
6987 }
6988
6989 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6990 evaluate to a constant. */
6991
6992 static tree
6993 fold_builtin_constant_p (tree arg)
6994 {
6995 /* We return 1 for a numeric type that's known to be a constant
6996 value at compile-time or for an aggregate type that's a
6997 literal constant. */
6998 STRIP_NOPS (arg);
6999
7000 /* If we know this is a constant, emit the constant of one. */
7001 if (CONSTANT_CLASS_P (arg)
7002 || (TREE_CODE (arg) == CONSTRUCTOR
7003 && TREE_CONSTANT (arg)))
7004 return integer_one_node;
7005 if (TREE_CODE (arg) == ADDR_EXPR)
7006 {
7007 tree op = TREE_OPERAND (arg, 0);
7008 if (TREE_CODE (op) == STRING_CST
7009 || (TREE_CODE (op) == ARRAY_REF
7010 && integer_zerop (TREE_OPERAND (op, 1))
7011 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7012 return integer_one_node;
7013 }
7014
7015 /* If this expression has side effects, show we don't know it to be a
7016 constant. Likewise if it's a pointer or aggregate type since in
7017 those case we only want literals, since those are only optimized
7018 when generating RTL, not later.
7019 And finally, if we are compiling an initializer, not code, we
7020 need to return a definite result now; there's not going to be any
7021 more optimization done. */
7022 if (TREE_SIDE_EFFECTS (arg)
7023 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7024 || POINTER_TYPE_P (TREE_TYPE (arg))
7025 || cfun == 0
7026 || folding_initializer
7027 || force_folding_builtin_constant_p)
7028 return integer_zero_node;
7029
7030 return NULL_TREE;
7031 }
7032
7033 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7034 return it as a truthvalue. */
7035
7036 static tree
7037 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
7038 {
7039 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7040
7041 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7042 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7043 ret_type = TREE_TYPE (TREE_TYPE (fn));
7044 pred_type = TREE_VALUE (arg_types);
7045 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7046
7047 pred = fold_convert_loc (loc, pred_type, pred);
7048 expected = fold_convert_loc (loc, expected_type, expected);
7049 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
7050
7051 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7052 build_int_cst (ret_type, 0));
7053 }
7054
7055 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7056 NULL_TREE if no simplification is possible. */
7057
7058 static tree
7059 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
7060 {
7061 tree inner, fndecl, inner_arg0;
7062 enum tree_code code;
7063
7064 /* Distribute the expected value over short-circuiting operators.
7065 See through the cast from truthvalue_type_node to long. */
7066 inner_arg0 = arg0;
7067 while (TREE_CODE (inner_arg0) == NOP_EXPR
7068 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7069 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7070 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7071
7072 /* If this is a builtin_expect within a builtin_expect keep the
7073 inner one. See through a comparison against a constant. It
7074 might have been added to create a thruthvalue. */
7075 inner = inner_arg0;
7076
7077 if (COMPARISON_CLASS_P (inner)
7078 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7079 inner = TREE_OPERAND (inner, 0);
7080
7081 if (TREE_CODE (inner) == CALL_EXPR
7082 && (fndecl = get_callee_fndecl (inner))
7083 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7084 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7085 return arg0;
7086
7087 inner = inner_arg0;
7088 code = TREE_CODE (inner);
7089 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7090 {
7091 tree op0 = TREE_OPERAND (inner, 0);
7092 tree op1 = TREE_OPERAND (inner, 1);
7093
7094 op0 = build_builtin_expect_predicate (loc, op0, arg1);
7095 op1 = build_builtin_expect_predicate (loc, op1, arg1);
7096 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7097
7098 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7099 }
7100
7101 /* If the argument isn't invariant then there's nothing else we can do. */
7102 if (!TREE_CONSTANT (inner_arg0))
7103 return NULL_TREE;
7104
7105 /* If we expect that a comparison against the argument will fold to
7106 a constant return the constant. In practice, this means a true
7107 constant or the address of a non-weak symbol. */
7108 inner = inner_arg0;
7109 STRIP_NOPS (inner);
7110 if (TREE_CODE (inner) == ADDR_EXPR)
7111 {
7112 do
7113 {
7114 inner = TREE_OPERAND (inner, 0);
7115 }
7116 while (TREE_CODE (inner) == COMPONENT_REF
7117 || TREE_CODE (inner) == ARRAY_REF);
7118 if ((TREE_CODE (inner) == VAR_DECL
7119 || TREE_CODE (inner) == FUNCTION_DECL)
7120 && DECL_WEAK (inner))
7121 return NULL_TREE;
7122 }
7123
7124 /* Otherwise, ARG0 already has the proper type for the return value. */
7125 return arg0;
7126 }
7127
7128 /* Fold a call to __builtin_classify_type with argument ARG. */
7129
7130 static tree
7131 fold_builtin_classify_type (tree arg)
7132 {
7133 if (arg == 0)
7134 return build_int_cst (integer_type_node, no_type_class);
7135
7136 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7137 }
7138
7139 /* Fold a call to __builtin_strlen with argument ARG. */
7140
7141 static tree
7142 fold_builtin_strlen (location_t loc, tree type, tree arg)
7143 {
7144 if (!validate_arg (arg, POINTER_TYPE))
7145 return NULL_TREE;
7146 else
7147 {
7148 tree len = c_strlen (arg, 0);
7149
7150 if (len)
7151 return fold_convert_loc (loc, type, len);
7152
7153 return NULL_TREE;
7154 }
7155 }
7156
7157 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7158
7159 static tree
7160 fold_builtin_inf (location_t loc, tree type, int warn)
7161 {
7162 REAL_VALUE_TYPE real;
7163
7164 /* __builtin_inff is intended to be usable to define INFINITY on all
7165 targets. If an infinity is not available, INFINITY expands "to a
7166 positive constant of type float that overflows at translation
7167 time", footnote "In this case, using INFINITY will violate the
7168 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7169 Thus we pedwarn to ensure this constraint violation is
7170 diagnosed. */
7171 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7172 pedwarn (loc, 0, "target format does not support infinity");
7173
7174 real_inf (&real);
7175 return build_real (type, real);
7176 }
7177
7178 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7179
7180 static tree
7181 fold_builtin_nan (tree arg, tree type, int quiet)
7182 {
7183 REAL_VALUE_TYPE real;
7184 const char *str;
7185
7186 if (!validate_arg (arg, POINTER_TYPE))
7187 return NULL_TREE;
7188 str = c_getstr (arg);
7189 if (!str)
7190 return NULL_TREE;
7191
7192 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7193 return NULL_TREE;
7194
7195 return build_real (type, real);
7196 }
7197
7198 /* Return true if the floating point expression T has an integer value.
7199 We also allow +Inf, -Inf and NaN to be considered integer values. */
7200
7201 static bool
7202 integer_valued_real_p (tree t)
7203 {
7204 switch (TREE_CODE (t))
7205 {
7206 case FLOAT_EXPR:
7207 return true;
7208
7209 case ABS_EXPR:
7210 case SAVE_EXPR:
7211 return integer_valued_real_p (TREE_OPERAND (t, 0));
7212
7213 case COMPOUND_EXPR:
7214 case MODIFY_EXPR:
7215 case BIND_EXPR:
7216 return integer_valued_real_p (TREE_OPERAND (t, 1));
7217
7218 case PLUS_EXPR:
7219 case MINUS_EXPR:
7220 case MULT_EXPR:
7221 case MIN_EXPR:
7222 case MAX_EXPR:
7223 return integer_valued_real_p (TREE_OPERAND (t, 0))
7224 && integer_valued_real_p (TREE_OPERAND (t, 1));
7225
7226 case COND_EXPR:
7227 return integer_valued_real_p (TREE_OPERAND (t, 1))
7228 && integer_valued_real_p (TREE_OPERAND (t, 2));
7229
7230 case REAL_CST:
7231 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7232
7233 case NOP_EXPR:
7234 {
7235 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7236 if (TREE_CODE (type) == INTEGER_TYPE)
7237 return true;
7238 if (TREE_CODE (type) == REAL_TYPE)
7239 return integer_valued_real_p (TREE_OPERAND (t, 0));
7240 break;
7241 }
7242
7243 case CALL_EXPR:
7244 switch (builtin_mathfn_code (t))
7245 {
7246 CASE_FLT_FN (BUILT_IN_CEIL):
7247 CASE_FLT_FN (BUILT_IN_FLOOR):
7248 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7249 CASE_FLT_FN (BUILT_IN_RINT):
7250 CASE_FLT_FN (BUILT_IN_ROUND):
7251 CASE_FLT_FN (BUILT_IN_TRUNC):
7252 return true;
7253
7254 CASE_FLT_FN (BUILT_IN_FMIN):
7255 CASE_FLT_FN (BUILT_IN_FMAX):
7256 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7257 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7258
7259 default:
7260 break;
7261 }
7262 break;
7263
7264 default:
7265 break;
7266 }
7267 return false;
7268 }
7269
7270 /* FNDECL is assumed to be a builtin where truncation can be propagated
7271 across (for instance floor((double)f) == (double)floorf (f).
7272 Do the transformation for a call with argument ARG. */
7273
7274 static tree
7275 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7276 {
7277 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7278
7279 if (!validate_arg (arg, REAL_TYPE))
7280 return NULL_TREE;
7281
7282 /* Integer rounding functions are idempotent. */
7283 if (fcode == builtin_mathfn_code (arg))
7284 return arg;
7285
7286 /* If argument is already integer valued, and we don't need to worry
7287 about setting errno, there's no need to perform rounding. */
7288 if (! flag_errno_math && integer_valued_real_p (arg))
7289 return arg;
7290
7291 if (optimize)
7292 {
7293 tree arg0 = strip_float_extensions (arg);
7294 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7295 tree newtype = TREE_TYPE (arg0);
7296 tree decl;
7297
7298 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7299 && (decl = mathfn_built_in (newtype, fcode)))
7300 return fold_convert_loc (loc, ftype,
7301 build_call_expr_loc (loc, decl, 1,
7302 fold_convert_loc (loc,
7303 newtype,
7304 arg0)));
7305 }
7306 return NULL_TREE;
7307 }
7308
7309 /* FNDECL is assumed to be builtin which can narrow the FP type of
7310 the argument, for instance lround((double)f) -> lroundf (f).
7311 Do the transformation for a call with argument ARG. */
7312
7313 static tree
7314 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7315 {
7316 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7317
7318 if (!validate_arg (arg, REAL_TYPE))
7319 return NULL_TREE;
7320
7321 /* If argument is already integer valued, and we don't need to worry
7322 about setting errno, there's no need to perform rounding. */
7323 if (! flag_errno_math && integer_valued_real_p (arg))
7324 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7325 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7326
7327 if (optimize)
7328 {
7329 tree ftype = TREE_TYPE (arg);
7330 tree arg0 = strip_float_extensions (arg);
7331 tree newtype = TREE_TYPE (arg0);
7332 tree decl;
7333
7334 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7335 && (decl = mathfn_built_in (newtype, fcode)))
7336 return build_call_expr_loc (loc, decl, 1,
7337 fold_convert_loc (loc, newtype, arg0));
7338 }
7339
7340 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7341 sizeof (int) == sizeof (long). */
7342 if (TYPE_PRECISION (integer_type_node)
7343 == TYPE_PRECISION (long_integer_type_node))
7344 {
7345 tree newfn = NULL_TREE;
7346 switch (fcode)
7347 {
7348 CASE_FLT_FN (BUILT_IN_ICEIL):
7349 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7350 break;
7351
7352 CASE_FLT_FN (BUILT_IN_IFLOOR):
7353 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7354 break;
7355
7356 CASE_FLT_FN (BUILT_IN_IROUND):
7357 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7358 break;
7359
7360 CASE_FLT_FN (BUILT_IN_IRINT):
7361 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7362 break;
7363
7364 default:
7365 break;
7366 }
7367
7368 if (newfn)
7369 {
7370 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7371 return fold_convert_loc (loc,
7372 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7373 }
7374 }
7375
7376 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7377 sizeof (long long) == sizeof (long). */
7378 if (TYPE_PRECISION (long_long_integer_type_node)
7379 == TYPE_PRECISION (long_integer_type_node))
7380 {
7381 tree newfn = NULL_TREE;
7382 switch (fcode)
7383 {
7384 CASE_FLT_FN (BUILT_IN_LLCEIL):
7385 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7386 break;
7387
7388 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7389 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7390 break;
7391
7392 CASE_FLT_FN (BUILT_IN_LLROUND):
7393 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7394 break;
7395
7396 CASE_FLT_FN (BUILT_IN_LLRINT):
7397 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7398 break;
7399
7400 default:
7401 break;
7402 }
7403
7404 if (newfn)
7405 {
7406 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7407 return fold_convert_loc (loc,
7408 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7409 }
7410 }
7411
7412 return NULL_TREE;
7413 }
7414
7415 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7416 return type. Return NULL_TREE if no simplification can be made. */
7417
7418 static tree
7419 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7420 {
7421 tree res;
7422
7423 if (!validate_arg (arg, COMPLEX_TYPE)
7424 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7425 return NULL_TREE;
7426
7427 /* Calculate the result when the argument is a constant. */
7428 if (TREE_CODE (arg) == COMPLEX_CST
7429 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7430 type, mpfr_hypot)))
7431 return res;
7432
7433 if (TREE_CODE (arg) == COMPLEX_EXPR)
7434 {
7435 tree real = TREE_OPERAND (arg, 0);
7436 tree imag = TREE_OPERAND (arg, 1);
7437
7438 /* If either part is zero, cabs is fabs of the other. */
7439 if (real_zerop (real))
7440 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7441 if (real_zerop (imag))
7442 return fold_build1_loc (loc, ABS_EXPR, type, real);
7443
7444 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7445 if (flag_unsafe_math_optimizations
7446 && operand_equal_p (real, imag, OEP_PURE_SAME))
7447 {
7448 const REAL_VALUE_TYPE sqrt2_trunc
7449 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7450 STRIP_NOPS (real);
7451 return fold_build2_loc (loc, MULT_EXPR, type,
7452 fold_build1_loc (loc, ABS_EXPR, type, real),
7453 build_real (type, sqrt2_trunc));
7454 }
7455 }
7456
7457 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7458 if (TREE_CODE (arg) == NEGATE_EXPR
7459 || TREE_CODE (arg) == CONJ_EXPR)
7460 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7461
7462 /* Don't do this when optimizing for size. */
7463 if (flag_unsafe_math_optimizations
7464 && optimize && optimize_function_for_speed_p (cfun))
7465 {
7466 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7467
7468 if (sqrtfn != NULL_TREE)
7469 {
7470 tree rpart, ipart, result;
7471
7472 arg = builtin_save_expr (arg);
7473
7474 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7475 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7476
7477 rpart = builtin_save_expr (rpart);
7478 ipart = builtin_save_expr (ipart);
7479
7480 result = fold_build2_loc (loc, PLUS_EXPR, type,
7481 fold_build2_loc (loc, MULT_EXPR, type,
7482 rpart, rpart),
7483 fold_build2_loc (loc, MULT_EXPR, type,
7484 ipart, ipart));
7485
7486 return build_call_expr_loc (loc, sqrtfn, 1, result);
7487 }
7488 }
7489
7490 return NULL_TREE;
7491 }
7492
7493 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7494 complex tree type of the result. If NEG is true, the imaginary
7495 zero is negative. */
7496
7497 static tree
7498 build_complex_cproj (tree type, bool neg)
7499 {
7500 REAL_VALUE_TYPE rinf, rzero = dconst0;
7501
7502 real_inf (&rinf);
7503 rzero.sign = neg;
7504 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7505 build_real (TREE_TYPE (type), rzero));
7506 }
7507
7508 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7509 return type. Return NULL_TREE if no simplification can be made. */
7510
7511 static tree
7512 fold_builtin_cproj (location_t loc, tree arg, tree type)
7513 {
7514 if (!validate_arg (arg, COMPLEX_TYPE)
7515 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7516 return NULL_TREE;
7517
7518 /* If there are no infinities, return arg. */
7519 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7520 return non_lvalue_loc (loc, arg);
7521
7522 /* Calculate the result when the argument is a constant. */
7523 if (TREE_CODE (arg) == COMPLEX_CST)
7524 {
7525 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7526 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7527
7528 if (real_isinf (real) || real_isinf (imag))
7529 return build_complex_cproj (type, imag->sign);
7530 else
7531 return arg;
7532 }
7533 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7534 {
7535 tree real = TREE_OPERAND (arg, 0);
7536 tree imag = TREE_OPERAND (arg, 1);
7537
7538 STRIP_NOPS (real);
7539 STRIP_NOPS (imag);
7540
7541 /* If the real part is inf and the imag part is known to be
7542 nonnegative, return (inf + 0i). Remember side-effects are
7543 possible in the imag part. */
7544 if (TREE_CODE (real) == REAL_CST
7545 && real_isinf (TREE_REAL_CST_PTR (real))
7546 && tree_expr_nonnegative_p (imag))
7547 return omit_one_operand_loc (loc, type,
7548 build_complex_cproj (type, false),
7549 arg);
7550
7551 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7552 Remember side-effects are possible in the real part. */
7553 if (TREE_CODE (imag) == REAL_CST
7554 && real_isinf (TREE_REAL_CST_PTR (imag)))
7555 return
7556 omit_one_operand_loc (loc, type,
7557 build_complex_cproj (type, TREE_REAL_CST_PTR
7558 (imag)->sign), arg);
7559 }
7560
7561 return NULL_TREE;
7562 }
7563
7564 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7565 Return NULL_TREE if no simplification can be made. */
7566
7567 static tree
7568 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7569 {
7570
7571 enum built_in_function fcode;
7572 tree res;
7573
7574 if (!validate_arg (arg, REAL_TYPE))
7575 return NULL_TREE;
7576
7577 /* Calculate the result when the argument is a constant. */
7578 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7579 return res;
7580
7581 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7582 fcode = builtin_mathfn_code (arg);
7583 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7584 {
7585 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7586 arg = fold_build2_loc (loc, MULT_EXPR, type,
7587 CALL_EXPR_ARG (arg, 0),
7588 build_real (type, dconsthalf));
7589 return build_call_expr_loc (loc, expfn, 1, arg);
7590 }
7591
7592 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7593 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7594 {
7595 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7596
7597 if (powfn)
7598 {
7599 tree arg0 = CALL_EXPR_ARG (arg, 0);
7600 tree tree_root;
7601 /* The inner root was either sqrt or cbrt. */
7602 /* This was a conditional expression but it triggered a bug
7603 in Sun C 5.5. */
7604 REAL_VALUE_TYPE dconstroot;
7605 if (BUILTIN_SQRT_P (fcode))
7606 dconstroot = dconsthalf;
7607 else
7608 dconstroot = dconst_third ();
7609
7610 /* Adjust for the outer root. */
7611 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7612 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7613 tree_root = build_real (type, dconstroot);
7614 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7615 }
7616 }
7617
7618 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7619 if (flag_unsafe_math_optimizations
7620 && (fcode == BUILT_IN_POW
7621 || fcode == BUILT_IN_POWF
7622 || fcode == BUILT_IN_POWL))
7623 {
7624 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7625 tree arg0 = CALL_EXPR_ARG (arg, 0);
7626 tree arg1 = CALL_EXPR_ARG (arg, 1);
7627 tree narg1;
7628 if (!tree_expr_nonnegative_p (arg0))
7629 arg0 = build1 (ABS_EXPR, type, arg0);
7630 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7631 build_real (type, dconsthalf));
7632 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7633 }
7634
7635 return NULL_TREE;
7636 }
7637
7638 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7639 Return NULL_TREE if no simplification can be made. */
7640
7641 static tree
7642 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7643 {
7644 const enum built_in_function fcode = builtin_mathfn_code (arg);
7645 tree res;
7646
7647 if (!validate_arg (arg, REAL_TYPE))
7648 return NULL_TREE;
7649
7650 /* Calculate the result when the argument is a constant. */
7651 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7652 return res;
7653
7654 if (flag_unsafe_math_optimizations)
7655 {
7656 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7657 if (BUILTIN_EXPONENT_P (fcode))
7658 {
7659 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7660 const REAL_VALUE_TYPE third_trunc =
7661 real_value_truncate (TYPE_MODE (type), dconst_third ());
7662 arg = fold_build2_loc (loc, MULT_EXPR, type,
7663 CALL_EXPR_ARG (arg, 0),
7664 build_real (type, third_trunc));
7665 return build_call_expr_loc (loc, expfn, 1, arg);
7666 }
7667
7668 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7669 if (BUILTIN_SQRT_P (fcode))
7670 {
7671 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7672
7673 if (powfn)
7674 {
7675 tree arg0 = CALL_EXPR_ARG (arg, 0);
7676 tree tree_root;
7677 REAL_VALUE_TYPE dconstroot = dconst_third ();
7678
7679 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7680 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7681 tree_root = build_real (type, dconstroot);
7682 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7683 }
7684 }
7685
7686 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7687 if (BUILTIN_CBRT_P (fcode))
7688 {
7689 tree arg0 = CALL_EXPR_ARG (arg, 0);
7690 if (tree_expr_nonnegative_p (arg0))
7691 {
7692 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7693
7694 if (powfn)
7695 {
7696 tree tree_root;
7697 REAL_VALUE_TYPE dconstroot;
7698
7699 real_arithmetic (&dconstroot, MULT_EXPR,
7700 dconst_third_ptr (), dconst_third_ptr ());
7701 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7702 tree_root = build_real (type, dconstroot);
7703 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7704 }
7705 }
7706 }
7707
7708 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7709 if (fcode == BUILT_IN_POW
7710 || fcode == BUILT_IN_POWF
7711 || fcode == BUILT_IN_POWL)
7712 {
7713 tree arg00 = CALL_EXPR_ARG (arg, 0);
7714 tree arg01 = CALL_EXPR_ARG (arg, 1);
7715 if (tree_expr_nonnegative_p (arg00))
7716 {
7717 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7718 const REAL_VALUE_TYPE dconstroot
7719 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7720 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7721 build_real (type, dconstroot));
7722 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7723 }
7724 }
7725 }
7726 return NULL_TREE;
7727 }
7728
7729 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7730 TYPE is the type of the return value. Return NULL_TREE if no
7731 simplification can be made. */
7732
7733 static tree
7734 fold_builtin_cos (location_t loc,
7735 tree arg, tree type, tree fndecl)
7736 {
7737 tree res, narg;
7738
7739 if (!validate_arg (arg, REAL_TYPE))
7740 return NULL_TREE;
7741
7742 /* Calculate the result when the argument is a constant. */
7743 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7744 return res;
7745
7746 /* Optimize cos(-x) into cos (x). */
7747 if ((narg = fold_strip_sign_ops (arg)))
7748 return build_call_expr_loc (loc, fndecl, 1, narg);
7749
7750 return NULL_TREE;
7751 }
7752
7753 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7754 Return NULL_TREE if no simplification can be made. */
7755
7756 static tree
7757 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7758 {
7759 if (validate_arg (arg, REAL_TYPE))
7760 {
7761 tree res, narg;
7762
7763 /* Calculate the result when the argument is a constant. */
7764 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7765 return res;
7766
7767 /* Optimize cosh(-x) into cosh (x). */
7768 if ((narg = fold_strip_sign_ops (arg)))
7769 return build_call_expr_loc (loc, fndecl, 1, narg);
7770 }
7771
7772 return NULL_TREE;
7773 }
7774
7775 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7776 argument ARG. TYPE is the type of the return value. Return
7777 NULL_TREE if no simplification can be made. */
7778
7779 static tree
7780 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7781 bool hyper)
7782 {
7783 if (validate_arg (arg, COMPLEX_TYPE)
7784 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7785 {
7786 tree tmp;
7787
7788 /* Calculate the result when the argument is a constant. */
7789 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7790 return tmp;
7791
7792 /* Optimize fn(-x) into fn(x). */
7793 if ((tmp = fold_strip_sign_ops (arg)))
7794 return build_call_expr_loc (loc, fndecl, 1, tmp);
7795 }
7796
7797 return NULL_TREE;
7798 }
7799
7800 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7801 Return NULL_TREE if no simplification can be made. */
7802
7803 static tree
7804 fold_builtin_tan (tree arg, tree type)
7805 {
7806 enum built_in_function fcode;
7807 tree res;
7808
7809 if (!validate_arg (arg, REAL_TYPE))
7810 return NULL_TREE;
7811
7812 /* Calculate the result when the argument is a constant. */
7813 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7814 return res;
7815
7816 /* Optimize tan(atan(x)) = x. */
7817 fcode = builtin_mathfn_code (arg);
7818 if (flag_unsafe_math_optimizations
7819 && (fcode == BUILT_IN_ATAN
7820 || fcode == BUILT_IN_ATANF
7821 || fcode == BUILT_IN_ATANL))
7822 return CALL_EXPR_ARG (arg, 0);
7823
7824 return NULL_TREE;
7825 }
7826
7827 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7828 NULL_TREE if no simplification can be made. */
7829
7830 static tree
7831 fold_builtin_sincos (location_t loc,
7832 tree arg0, tree arg1, tree arg2)
7833 {
7834 tree type;
7835 tree res, fn, call;
7836
7837 if (!validate_arg (arg0, REAL_TYPE)
7838 || !validate_arg (arg1, POINTER_TYPE)
7839 || !validate_arg (arg2, POINTER_TYPE))
7840 return NULL_TREE;
7841
7842 type = TREE_TYPE (arg0);
7843
7844 /* Calculate the result when the argument is a constant. */
7845 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7846 return res;
7847
7848 /* Canonicalize sincos to cexpi. */
7849 if (!targetm.libc_has_function (function_c99_math_complex))
7850 return NULL_TREE;
7851 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7852 if (!fn)
7853 return NULL_TREE;
7854
7855 call = build_call_expr_loc (loc, fn, 1, arg0);
7856 call = builtin_save_expr (call);
7857
7858 return build2 (COMPOUND_EXPR, void_type_node,
7859 build2 (MODIFY_EXPR, void_type_node,
7860 build_fold_indirect_ref_loc (loc, arg1),
7861 build1 (IMAGPART_EXPR, type, call)),
7862 build2 (MODIFY_EXPR, void_type_node,
7863 build_fold_indirect_ref_loc (loc, arg2),
7864 build1 (REALPART_EXPR, type, call)));
7865 }
7866
7867 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7868 NULL_TREE if no simplification can be made. */
7869
7870 static tree
7871 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7872 {
7873 tree rtype;
7874 tree realp, imagp, ifn;
7875 tree res;
7876
7877 if (!validate_arg (arg0, COMPLEX_TYPE)
7878 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7879 return NULL_TREE;
7880
7881 /* Calculate the result when the argument is a constant. */
7882 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7883 return res;
7884
7885 rtype = TREE_TYPE (TREE_TYPE (arg0));
7886
7887 /* In case we can figure out the real part of arg0 and it is constant zero
7888 fold to cexpi. */
7889 if (!targetm.libc_has_function (function_c99_math_complex))
7890 return NULL_TREE;
7891 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7892 if (!ifn)
7893 return NULL_TREE;
7894
7895 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7896 && real_zerop (realp))
7897 {
7898 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7899 return build_call_expr_loc (loc, ifn, 1, narg);
7900 }
7901
7902 /* In case we can easily decompose real and imaginary parts split cexp
7903 to exp (r) * cexpi (i). */
7904 if (flag_unsafe_math_optimizations
7905 && realp)
7906 {
7907 tree rfn, rcall, icall;
7908
7909 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7910 if (!rfn)
7911 return NULL_TREE;
7912
7913 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7914 if (!imagp)
7915 return NULL_TREE;
7916
7917 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7918 icall = builtin_save_expr (icall);
7919 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7920 rcall = builtin_save_expr (rcall);
7921 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7922 fold_build2_loc (loc, MULT_EXPR, rtype,
7923 rcall,
7924 fold_build1_loc (loc, REALPART_EXPR,
7925 rtype, icall)),
7926 fold_build2_loc (loc, MULT_EXPR, rtype,
7927 rcall,
7928 fold_build1_loc (loc, IMAGPART_EXPR,
7929 rtype, icall)));
7930 }
7931
7932 return NULL_TREE;
7933 }
7934
7935 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7936 Return NULL_TREE if no simplification can be made. */
7937
7938 static tree
7939 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7940 {
7941 if (!validate_arg (arg, REAL_TYPE))
7942 return NULL_TREE;
7943
7944 /* Optimize trunc of constant value. */
7945 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7946 {
7947 REAL_VALUE_TYPE r, x;
7948 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7949
7950 x = TREE_REAL_CST (arg);
7951 real_trunc (&r, TYPE_MODE (type), &x);
7952 return build_real (type, r);
7953 }
7954
7955 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7956 }
7957
7958 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7959 Return NULL_TREE if no simplification can be made. */
7960
7961 static tree
7962 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7963 {
7964 if (!validate_arg (arg, REAL_TYPE))
7965 return NULL_TREE;
7966
7967 /* Optimize floor of constant value. */
7968 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7969 {
7970 REAL_VALUE_TYPE x;
7971
7972 x = TREE_REAL_CST (arg);
7973 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7974 {
7975 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7976 REAL_VALUE_TYPE r;
7977
7978 real_floor (&r, TYPE_MODE (type), &x);
7979 return build_real (type, r);
7980 }
7981 }
7982
7983 /* Fold floor (x) where x is nonnegative to trunc (x). */
7984 if (tree_expr_nonnegative_p (arg))
7985 {
7986 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7987 if (truncfn)
7988 return build_call_expr_loc (loc, truncfn, 1, arg);
7989 }
7990
7991 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7992 }
7993
7994 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7995 Return NULL_TREE if no simplification can be made. */
7996
7997 static tree
7998 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7999 {
8000 if (!validate_arg (arg, REAL_TYPE))
8001 return NULL_TREE;
8002
8003 /* Optimize ceil of constant value. */
8004 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8005 {
8006 REAL_VALUE_TYPE x;
8007
8008 x = TREE_REAL_CST (arg);
8009 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8010 {
8011 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8012 REAL_VALUE_TYPE r;
8013
8014 real_ceil (&r, TYPE_MODE (type), &x);
8015 return build_real (type, r);
8016 }
8017 }
8018
8019 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8020 }
8021
8022 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8023 Return NULL_TREE if no simplification can be made. */
8024
8025 static tree
8026 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8027 {
8028 if (!validate_arg (arg, REAL_TYPE))
8029 return NULL_TREE;
8030
8031 /* Optimize round of constant value. */
8032 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8033 {
8034 REAL_VALUE_TYPE x;
8035
8036 x = TREE_REAL_CST (arg);
8037 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8038 {
8039 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8040 REAL_VALUE_TYPE r;
8041
8042 real_round (&r, TYPE_MODE (type), &x);
8043 return build_real (type, r);
8044 }
8045 }
8046
8047 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8048 }
8049
8050 /* Fold function call to builtin lround, lroundf or lroundl (or the
8051 corresponding long long versions) and other rounding functions. ARG
8052 is the argument to the call. Return NULL_TREE if no simplification
8053 can be made. */
8054
8055 static tree
8056 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8057 {
8058 if (!validate_arg (arg, REAL_TYPE))
8059 return NULL_TREE;
8060
8061 /* Optimize lround of constant value. */
8062 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8063 {
8064 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8065
8066 if (real_isfinite (&x))
8067 {
8068 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8069 tree ftype = TREE_TYPE (arg);
8070 double_int val;
8071 REAL_VALUE_TYPE r;
8072
8073 switch (DECL_FUNCTION_CODE (fndecl))
8074 {
8075 CASE_FLT_FN (BUILT_IN_IFLOOR):
8076 CASE_FLT_FN (BUILT_IN_LFLOOR):
8077 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8078 real_floor (&r, TYPE_MODE (ftype), &x);
8079 break;
8080
8081 CASE_FLT_FN (BUILT_IN_ICEIL):
8082 CASE_FLT_FN (BUILT_IN_LCEIL):
8083 CASE_FLT_FN (BUILT_IN_LLCEIL):
8084 real_ceil (&r, TYPE_MODE (ftype), &x);
8085 break;
8086
8087 CASE_FLT_FN (BUILT_IN_IROUND):
8088 CASE_FLT_FN (BUILT_IN_LROUND):
8089 CASE_FLT_FN (BUILT_IN_LLROUND):
8090 real_round (&r, TYPE_MODE (ftype), &x);
8091 break;
8092
8093 default:
8094 gcc_unreachable ();
8095 }
8096
8097 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
8098 if (double_int_fits_to_tree_p (itype, val))
8099 return double_int_to_tree (itype, val);
8100 }
8101 }
8102
8103 switch (DECL_FUNCTION_CODE (fndecl))
8104 {
8105 CASE_FLT_FN (BUILT_IN_LFLOOR):
8106 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8107 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8108 if (tree_expr_nonnegative_p (arg))
8109 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8110 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8111 break;
8112 default:;
8113 }
8114
8115 return fold_fixed_mathfn (loc, fndecl, arg);
8116 }
8117
8118 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8119 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8120 the argument to the call. Return NULL_TREE if no simplification can
8121 be made. */
8122
8123 static tree
8124 fold_builtin_bitop (tree fndecl, tree arg)
8125 {
8126 if (!validate_arg (arg, INTEGER_TYPE))
8127 return NULL_TREE;
8128
8129 /* Optimize for constant argument. */
8130 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8131 {
8132 HOST_WIDE_INT hi, width, result;
8133 unsigned HOST_WIDE_INT lo;
8134 tree type;
8135
8136 type = TREE_TYPE (arg);
8137 width = TYPE_PRECISION (type);
8138 lo = TREE_INT_CST_LOW (arg);
8139
8140 /* Clear all the bits that are beyond the type's precision. */
8141 if (width > HOST_BITS_PER_WIDE_INT)
8142 {
8143 hi = TREE_INT_CST_HIGH (arg);
8144 if (width < HOST_BITS_PER_DOUBLE_INT)
8145 hi &= ~(HOST_WIDE_INT_M1U << (width - HOST_BITS_PER_WIDE_INT));
8146 }
8147 else
8148 {
8149 hi = 0;
8150 if (width < HOST_BITS_PER_WIDE_INT)
8151 lo &= ~(HOST_WIDE_INT_M1U << width);
8152 }
8153
8154 switch (DECL_FUNCTION_CODE (fndecl))
8155 {
8156 CASE_INT_FN (BUILT_IN_FFS):
8157 if (lo != 0)
8158 result = ffs_hwi (lo);
8159 else if (hi != 0)
8160 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
8161 else
8162 result = 0;
8163 break;
8164
8165 CASE_INT_FN (BUILT_IN_CLZ):
8166 if (hi != 0)
8167 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8168 else if (lo != 0)
8169 result = width - floor_log2 (lo) - 1;
8170 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8171 result = width;
8172 break;
8173
8174 CASE_INT_FN (BUILT_IN_CTZ):
8175 if (lo != 0)
8176 result = ctz_hwi (lo);
8177 else if (hi != 0)
8178 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
8179 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8180 result = width;
8181 break;
8182
8183 CASE_INT_FN (BUILT_IN_CLRSB):
8184 if (width > 2 * HOST_BITS_PER_WIDE_INT)
8185 return NULL_TREE;
8186 if (width > HOST_BITS_PER_WIDE_INT
8187 && (hi & ((unsigned HOST_WIDE_INT) 1
8188 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
8189 {
8190 hi = ~hi & ~(HOST_WIDE_INT_M1U
8191 << (width - HOST_BITS_PER_WIDE_INT - 1));
8192 lo = ~lo;
8193 }
8194 else if (width <= HOST_BITS_PER_WIDE_INT
8195 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
8196 lo = ~lo & ~(HOST_WIDE_INT_M1U << (width - 1));
8197 if (hi != 0)
8198 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
8199 else if (lo != 0)
8200 result = width - floor_log2 (lo) - 2;
8201 else
8202 result = width - 1;
8203 break;
8204
8205 CASE_INT_FN (BUILT_IN_POPCOUNT):
8206 result = 0;
8207 while (lo)
8208 result++, lo &= lo - 1;
8209 while (hi)
8210 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8211 break;
8212
8213 CASE_INT_FN (BUILT_IN_PARITY):
8214 result = 0;
8215 while (lo)
8216 result++, lo &= lo - 1;
8217 while (hi)
8218 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8219 result &= 1;
8220 break;
8221
8222 default:
8223 gcc_unreachable ();
8224 }
8225
8226 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8227 }
8228
8229 return NULL_TREE;
8230 }
8231
8232 /* Fold function call to builtin_bswap and the short, long and long long
8233 variants. Return NULL_TREE if no simplification can be made. */
8234 static tree
8235 fold_builtin_bswap (tree fndecl, tree arg)
8236 {
8237 if (! validate_arg (arg, INTEGER_TYPE))
8238 return NULL_TREE;
8239
8240 /* Optimize constant value. */
8241 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8242 {
8243 HOST_WIDE_INT hi, width, r_hi = 0;
8244 unsigned HOST_WIDE_INT lo, r_lo = 0;
8245 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8246
8247 width = TYPE_PRECISION (type);
8248 lo = TREE_INT_CST_LOW (arg);
8249 hi = TREE_INT_CST_HIGH (arg);
8250
8251 switch (DECL_FUNCTION_CODE (fndecl))
8252 {
8253 case BUILT_IN_BSWAP16:
8254 case BUILT_IN_BSWAP32:
8255 case BUILT_IN_BSWAP64:
8256 {
8257 int s;
8258
8259 for (s = 0; s < width; s += 8)
8260 {
8261 int d = width - s - 8;
8262 unsigned HOST_WIDE_INT byte;
8263
8264 if (s < HOST_BITS_PER_WIDE_INT)
8265 byte = (lo >> s) & 0xff;
8266 else
8267 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8268
8269 if (d < HOST_BITS_PER_WIDE_INT)
8270 r_lo |= byte << d;
8271 else
8272 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8273 }
8274 }
8275
8276 break;
8277
8278 default:
8279 gcc_unreachable ();
8280 }
8281
8282 if (width < HOST_BITS_PER_WIDE_INT)
8283 return build_int_cst (type, r_lo);
8284 else
8285 return build_int_cst_wide (type, r_lo, r_hi);
8286 }
8287
8288 return NULL_TREE;
8289 }
8290
8291 /* A subroutine of fold_builtin to fold the various logarithmic
8292 functions. Return NULL_TREE if no simplification can me made.
8293 FUNC is the corresponding MPFR logarithm function. */
8294
8295 static tree
8296 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8297 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8298 {
8299 if (validate_arg (arg, REAL_TYPE))
8300 {
8301 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8302 tree res;
8303 const enum built_in_function fcode = builtin_mathfn_code (arg);
8304
8305 /* Calculate the result when the argument is a constant. */
8306 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8307 return res;
8308
8309 /* Special case, optimize logN(expN(x)) = x. */
8310 if (flag_unsafe_math_optimizations
8311 && ((func == mpfr_log
8312 && (fcode == BUILT_IN_EXP
8313 || fcode == BUILT_IN_EXPF
8314 || fcode == BUILT_IN_EXPL))
8315 || (func == mpfr_log2
8316 && (fcode == BUILT_IN_EXP2
8317 || fcode == BUILT_IN_EXP2F
8318 || fcode == BUILT_IN_EXP2L))
8319 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8320 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8321
8322 /* Optimize logN(func()) for various exponential functions. We
8323 want to determine the value "x" and the power "exponent" in
8324 order to transform logN(x**exponent) into exponent*logN(x). */
8325 if (flag_unsafe_math_optimizations)
8326 {
8327 tree exponent = 0, x = 0;
8328
8329 switch (fcode)
8330 {
8331 CASE_FLT_FN (BUILT_IN_EXP):
8332 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8333 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8334 dconst_e ()));
8335 exponent = CALL_EXPR_ARG (arg, 0);
8336 break;
8337 CASE_FLT_FN (BUILT_IN_EXP2):
8338 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8339 x = build_real (type, dconst2);
8340 exponent = CALL_EXPR_ARG (arg, 0);
8341 break;
8342 CASE_FLT_FN (BUILT_IN_EXP10):
8343 CASE_FLT_FN (BUILT_IN_POW10):
8344 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8345 {
8346 REAL_VALUE_TYPE dconst10;
8347 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8348 x = build_real (type, dconst10);
8349 }
8350 exponent = CALL_EXPR_ARG (arg, 0);
8351 break;
8352 CASE_FLT_FN (BUILT_IN_SQRT):
8353 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8354 x = CALL_EXPR_ARG (arg, 0);
8355 exponent = build_real (type, dconsthalf);
8356 break;
8357 CASE_FLT_FN (BUILT_IN_CBRT):
8358 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8359 x = CALL_EXPR_ARG (arg, 0);
8360 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8361 dconst_third ()));
8362 break;
8363 CASE_FLT_FN (BUILT_IN_POW):
8364 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8365 x = CALL_EXPR_ARG (arg, 0);
8366 exponent = CALL_EXPR_ARG (arg, 1);
8367 break;
8368 default:
8369 break;
8370 }
8371
8372 /* Now perform the optimization. */
8373 if (x && exponent)
8374 {
8375 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8376 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8377 }
8378 }
8379 }
8380
8381 return NULL_TREE;
8382 }
8383
8384 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8385 NULL_TREE if no simplification can be made. */
8386
8387 static tree
8388 fold_builtin_hypot (location_t loc, tree fndecl,
8389 tree arg0, tree arg1, tree type)
8390 {
8391 tree res, narg0, narg1;
8392
8393 if (!validate_arg (arg0, REAL_TYPE)
8394 || !validate_arg (arg1, REAL_TYPE))
8395 return NULL_TREE;
8396
8397 /* Calculate the result when the argument is a constant. */
8398 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8399 return res;
8400
8401 /* If either argument to hypot has a negate or abs, strip that off.
8402 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8403 narg0 = fold_strip_sign_ops (arg0);
8404 narg1 = fold_strip_sign_ops (arg1);
8405 if (narg0 || narg1)
8406 {
8407 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8408 narg1 ? narg1 : arg1);
8409 }
8410
8411 /* If either argument is zero, hypot is fabs of the other. */
8412 if (real_zerop (arg0))
8413 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8414 else if (real_zerop (arg1))
8415 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8416
8417 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8418 if (flag_unsafe_math_optimizations
8419 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8420 {
8421 const REAL_VALUE_TYPE sqrt2_trunc
8422 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8423 return fold_build2_loc (loc, MULT_EXPR, type,
8424 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8425 build_real (type, sqrt2_trunc));
8426 }
8427
8428 return NULL_TREE;
8429 }
8430
8431
8432 /* Fold a builtin function call to pow, powf, or powl. Return
8433 NULL_TREE if no simplification can be made. */
8434 static tree
8435 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8436 {
8437 tree res;
8438
8439 if (!validate_arg (arg0, REAL_TYPE)
8440 || !validate_arg (arg1, REAL_TYPE))
8441 return NULL_TREE;
8442
8443 /* Calculate the result when the argument is a constant. */
8444 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8445 return res;
8446
8447 /* Optimize pow(1.0,y) = 1.0. */
8448 if (real_onep (arg0))
8449 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8450
8451 if (TREE_CODE (arg1) == REAL_CST
8452 && !TREE_OVERFLOW (arg1))
8453 {
8454 REAL_VALUE_TYPE cint;
8455 REAL_VALUE_TYPE c;
8456 HOST_WIDE_INT n;
8457
8458 c = TREE_REAL_CST (arg1);
8459
8460 /* Optimize pow(x,0.0) = 1.0. */
8461 if (REAL_VALUES_EQUAL (c, dconst0))
8462 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8463 arg0);
8464
8465 /* Optimize pow(x,1.0) = x. */
8466 if (REAL_VALUES_EQUAL (c, dconst1))
8467 return arg0;
8468
8469 /* Optimize pow(x,-1.0) = 1.0/x. */
8470 if (REAL_VALUES_EQUAL (c, dconstm1))
8471 return fold_build2_loc (loc, RDIV_EXPR, type,
8472 build_real (type, dconst1), arg0);
8473
8474 /* Optimize pow(x,0.5) = sqrt(x). */
8475 if (flag_unsafe_math_optimizations
8476 && REAL_VALUES_EQUAL (c, dconsthalf))
8477 {
8478 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8479
8480 if (sqrtfn != NULL_TREE)
8481 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8482 }
8483
8484 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8485 if (flag_unsafe_math_optimizations)
8486 {
8487 const REAL_VALUE_TYPE dconstroot
8488 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8489
8490 if (REAL_VALUES_EQUAL (c, dconstroot))
8491 {
8492 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8493 if (cbrtfn != NULL_TREE)
8494 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8495 }
8496 }
8497
8498 /* Check for an integer exponent. */
8499 n = real_to_integer (&c);
8500 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8501 if (real_identical (&c, &cint))
8502 {
8503 /* Attempt to evaluate pow at compile-time, unless this should
8504 raise an exception. */
8505 if (TREE_CODE (arg0) == REAL_CST
8506 && !TREE_OVERFLOW (arg0)
8507 && (n > 0
8508 || (!flag_trapping_math && !flag_errno_math)
8509 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8510 {
8511 REAL_VALUE_TYPE x;
8512 bool inexact;
8513
8514 x = TREE_REAL_CST (arg0);
8515 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8516 if (flag_unsafe_math_optimizations || !inexact)
8517 return build_real (type, x);
8518 }
8519
8520 /* Strip sign ops from even integer powers. */
8521 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8522 {
8523 tree narg0 = fold_strip_sign_ops (arg0);
8524 if (narg0)
8525 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8526 }
8527 }
8528 }
8529
8530 if (flag_unsafe_math_optimizations)
8531 {
8532 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8533
8534 /* Optimize pow(expN(x),y) = expN(x*y). */
8535 if (BUILTIN_EXPONENT_P (fcode))
8536 {
8537 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8538 tree arg = CALL_EXPR_ARG (arg0, 0);
8539 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8540 return build_call_expr_loc (loc, expfn, 1, arg);
8541 }
8542
8543 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8544 if (BUILTIN_SQRT_P (fcode))
8545 {
8546 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8547 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8548 build_real (type, dconsthalf));
8549 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8550 }
8551
8552 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8553 if (BUILTIN_CBRT_P (fcode))
8554 {
8555 tree arg = CALL_EXPR_ARG (arg0, 0);
8556 if (tree_expr_nonnegative_p (arg))
8557 {
8558 const REAL_VALUE_TYPE dconstroot
8559 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8560 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8561 build_real (type, dconstroot));
8562 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8563 }
8564 }
8565
8566 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8567 if (fcode == BUILT_IN_POW
8568 || fcode == BUILT_IN_POWF
8569 || fcode == BUILT_IN_POWL)
8570 {
8571 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8572 if (tree_expr_nonnegative_p (arg00))
8573 {
8574 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8575 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8576 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8577 }
8578 }
8579 }
8580
8581 return NULL_TREE;
8582 }
8583
8584 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8585 Return NULL_TREE if no simplification can be made. */
8586 static tree
8587 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8588 tree arg0, tree arg1, tree type)
8589 {
8590 if (!validate_arg (arg0, REAL_TYPE)
8591 || !validate_arg (arg1, INTEGER_TYPE))
8592 return NULL_TREE;
8593
8594 /* Optimize pow(1.0,y) = 1.0. */
8595 if (real_onep (arg0))
8596 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8597
8598 if (host_integerp (arg1, 0))
8599 {
8600 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8601
8602 /* Evaluate powi at compile-time. */
8603 if (TREE_CODE (arg0) == REAL_CST
8604 && !TREE_OVERFLOW (arg0))
8605 {
8606 REAL_VALUE_TYPE x;
8607 x = TREE_REAL_CST (arg0);
8608 real_powi (&x, TYPE_MODE (type), &x, c);
8609 return build_real (type, x);
8610 }
8611
8612 /* Optimize pow(x,0) = 1.0. */
8613 if (c == 0)
8614 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8615 arg0);
8616
8617 /* Optimize pow(x,1) = x. */
8618 if (c == 1)
8619 return arg0;
8620
8621 /* Optimize pow(x,-1) = 1.0/x. */
8622 if (c == -1)
8623 return fold_build2_loc (loc, RDIV_EXPR, type,
8624 build_real (type, dconst1), arg0);
8625 }
8626
8627 return NULL_TREE;
8628 }
8629
8630 /* A subroutine of fold_builtin to fold the various exponent
8631 functions. Return NULL_TREE if no simplification can be made.
8632 FUNC is the corresponding MPFR exponent function. */
8633
8634 static tree
8635 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8636 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8637 {
8638 if (validate_arg (arg, REAL_TYPE))
8639 {
8640 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8641 tree res;
8642
8643 /* Calculate the result when the argument is a constant. */
8644 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8645 return res;
8646
8647 /* Optimize expN(logN(x)) = x. */
8648 if (flag_unsafe_math_optimizations)
8649 {
8650 const enum built_in_function fcode = builtin_mathfn_code (arg);
8651
8652 if ((func == mpfr_exp
8653 && (fcode == BUILT_IN_LOG
8654 || fcode == BUILT_IN_LOGF
8655 || fcode == BUILT_IN_LOGL))
8656 || (func == mpfr_exp2
8657 && (fcode == BUILT_IN_LOG2
8658 || fcode == BUILT_IN_LOG2F
8659 || fcode == BUILT_IN_LOG2L))
8660 || (func == mpfr_exp10
8661 && (fcode == BUILT_IN_LOG10
8662 || fcode == BUILT_IN_LOG10F
8663 || fcode == BUILT_IN_LOG10L)))
8664 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8665 }
8666 }
8667
8668 return NULL_TREE;
8669 }
8670
8671 /* Return true if VAR is a VAR_DECL or a component thereof. */
8672
8673 static bool
8674 var_decl_component_p (tree var)
8675 {
8676 tree inner = var;
8677 while (handled_component_p (inner))
8678 inner = TREE_OPERAND (inner, 0);
8679 return SSA_VAR_P (inner);
8680 }
8681
8682 /* Fold function call to builtin memset. Return
8683 NULL_TREE if no simplification can be made. */
8684
8685 static tree
8686 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8687 tree type, bool ignore)
8688 {
8689 tree var, ret, etype;
8690 unsigned HOST_WIDE_INT length, cval;
8691
8692 if (! validate_arg (dest, POINTER_TYPE)
8693 || ! validate_arg (c, INTEGER_TYPE)
8694 || ! validate_arg (len, INTEGER_TYPE))
8695 return NULL_TREE;
8696
8697 if (! host_integerp (len, 1))
8698 return NULL_TREE;
8699
8700 /* If the LEN parameter is zero, return DEST. */
8701 if (integer_zerop (len))
8702 return omit_one_operand_loc (loc, type, dest, c);
8703
8704 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8705 return NULL_TREE;
8706
8707 var = dest;
8708 STRIP_NOPS (var);
8709 if (TREE_CODE (var) != ADDR_EXPR)
8710 return NULL_TREE;
8711
8712 var = TREE_OPERAND (var, 0);
8713 if (TREE_THIS_VOLATILE (var))
8714 return NULL_TREE;
8715
8716 etype = TREE_TYPE (var);
8717 if (TREE_CODE (etype) == ARRAY_TYPE)
8718 etype = TREE_TYPE (etype);
8719
8720 if (!INTEGRAL_TYPE_P (etype)
8721 && !POINTER_TYPE_P (etype))
8722 return NULL_TREE;
8723
8724 if (! var_decl_component_p (var))
8725 return NULL_TREE;
8726
8727 length = tree_low_cst (len, 1);
8728 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8729 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
8730 return NULL_TREE;
8731
8732 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8733 return NULL_TREE;
8734
8735 if (integer_zerop (c))
8736 cval = 0;
8737 else
8738 {
8739 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8740 return NULL_TREE;
8741
8742 cval = TREE_INT_CST_LOW (c);
8743 cval &= 0xff;
8744 cval |= cval << 8;
8745 cval |= cval << 16;
8746 cval |= (cval << 31) << 1;
8747 }
8748
8749 ret = build_int_cst_type (etype, cval);
8750 var = build_fold_indirect_ref_loc (loc,
8751 fold_convert_loc (loc,
8752 build_pointer_type (etype),
8753 dest));
8754 ret = build2 (MODIFY_EXPR, etype, var, ret);
8755 if (ignore)
8756 return ret;
8757
8758 return omit_one_operand_loc (loc, type, dest, ret);
8759 }
8760
8761 /* Fold function call to builtin memset. Return
8762 NULL_TREE if no simplification can be made. */
8763
8764 static tree
8765 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8766 {
8767 if (! validate_arg (dest, POINTER_TYPE)
8768 || ! validate_arg (size, INTEGER_TYPE))
8769 return NULL_TREE;
8770
8771 if (!ignore)
8772 return NULL_TREE;
8773
8774 /* New argument list transforming bzero(ptr x, int y) to
8775 memset(ptr x, int 0, size_t y). This is done this way
8776 so that if it isn't expanded inline, we fallback to
8777 calling bzero instead of memset. */
8778
8779 return fold_builtin_memset (loc, dest, integer_zero_node,
8780 fold_convert_loc (loc, size_type_node, size),
8781 void_type_node, ignore);
8782 }
8783
8784 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8785 NULL_TREE if no simplification can be made.
8786 If ENDP is 0, return DEST (like memcpy).
8787 If ENDP is 1, return DEST+LEN (like mempcpy).
8788 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8789 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8790 (memmove). */
8791
8792 static tree
8793 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8794 tree len, tree type, bool ignore, int endp)
8795 {
8796 tree destvar, srcvar, expr;
8797
8798 if (! validate_arg (dest, POINTER_TYPE)
8799 || ! validate_arg (src, POINTER_TYPE)
8800 || ! validate_arg (len, INTEGER_TYPE))
8801 return NULL_TREE;
8802
8803 /* If the LEN parameter is zero, return DEST. */
8804 if (integer_zerop (len))
8805 return omit_one_operand_loc (loc, type, dest, src);
8806
8807 /* If SRC and DEST are the same (and not volatile), return
8808 DEST{,+LEN,+LEN-1}. */
8809 if (operand_equal_p (src, dest, 0))
8810 expr = len;
8811 else
8812 {
8813 tree srctype, desttype;
8814 unsigned int src_align, dest_align;
8815 tree off0;
8816
8817 if (endp == 3)
8818 {
8819 src_align = get_pointer_alignment (src);
8820 dest_align = get_pointer_alignment (dest);
8821
8822 /* Both DEST and SRC must be pointer types.
8823 ??? This is what old code did. Is the testing for pointer types
8824 really mandatory?
8825
8826 If either SRC is readonly or length is 1, we can use memcpy. */
8827 if (!dest_align || !src_align)
8828 return NULL_TREE;
8829 if (readonly_data_expr (src)
8830 || (host_integerp (len, 1)
8831 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8832 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
8833 {
8834 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8835 if (!fn)
8836 return NULL_TREE;
8837 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8838 }
8839
8840 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8841 if (TREE_CODE (src) == ADDR_EXPR
8842 && TREE_CODE (dest) == ADDR_EXPR)
8843 {
8844 tree src_base, dest_base, fn;
8845 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8846 HOST_WIDE_INT size = -1;
8847 HOST_WIDE_INT maxsize = -1;
8848
8849 srcvar = TREE_OPERAND (src, 0);
8850 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8851 &size, &maxsize);
8852 destvar = TREE_OPERAND (dest, 0);
8853 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8854 &size, &maxsize);
8855 if (host_integerp (len, 1))
8856 maxsize = tree_low_cst (len, 1);
8857 else
8858 maxsize = -1;
8859 src_offset /= BITS_PER_UNIT;
8860 dest_offset /= BITS_PER_UNIT;
8861 if (SSA_VAR_P (src_base)
8862 && SSA_VAR_P (dest_base))
8863 {
8864 if (operand_equal_p (src_base, dest_base, 0)
8865 && ranges_overlap_p (src_offset, maxsize,
8866 dest_offset, maxsize))
8867 return NULL_TREE;
8868 }
8869 else if (TREE_CODE (src_base) == MEM_REF
8870 && TREE_CODE (dest_base) == MEM_REF)
8871 {
8872 double_int off;
8873 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8874 TREE_OPERAND (dest_base, 0), 0))
8875 return NULL_TREE;
8876 off = mem_ref_offset (src_base) +
8877 double_int::from_shwi (src_offset);
8878 if (!off.fits_shwi ())
8879 return NULL_TREE;
8880 src_offset = off.low;
8881 off = mem_ref_offset (dest_base) +
8882 double_int::from_shwi (dest_offset);
8883 if (!off.fits_shwi ())
8884 return NULL_TREE;
8885 dest_offset = off.low;
8886 if (ranges_overlap_p (src_offset, maxsize,
8887 dest_offset, maxsize))
8888 return NULL_TREE;
8889 }
8890 else
8891 return NULL_TREE;
8892
8893 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8894 if (!fn)
8895 return NULL_TREE;
8896 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8897 }
8898
8899 /* If the destination and source do not alias optimize into
8900 memcpy as well. */
8901 if ((is_gimple_min_invariant (dest)
8902 || TREE_CODE (dest) == SSA_NAME)
8903 && (is_gimple_min_invariant (src)
8904 || TREE_CODE (src) == SSA_NAME))
8905 {
8906 ao_ref destr, srcr;
8907 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8908 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8909 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8910 {
8911 tree fn;
8912 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8913 if (!fn)
8914 return NULL_TREE;
8915 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8916 }
8917 }
8918
8919 return NULL_TREE;
8920 }
8921
8922 if (!host_integerp (len, 0))
8923 return NULL_TREE;
8924 /* FIXME:
8925 This logic lose for arguments like (type *)malloc (sizeof (type)),
8926 since we strip the casts of up to VOID return value from malloc.
8927 Perhaps we ought to inherit type from non-VOID argument here? */
8928 STRIP_NOPS (src);
8929 STRIP_NOPS (dest);
8930 if (!POINTER_TYPE_P (TREE_TYPE (src))
8931 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8932 return NULL_TREE;
8933 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8934 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8935 {
8936 tree tem = TREE_OPERAND (src, 0);
8937 STRIP_NOPS (tem);
8938 if (tem != TREE_OPERAND (src, 0))
8939 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8940 }
8941 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8942 {
8943 tree tem = TREE_OPERAND (dest, 0);
8944 STRIP_NOPS (tem);
8945 if (tem != TREE_OPERAND (dest, 0))
8946 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8947 }
8948 srctype = TREE_TYPE (TREE_TYPE (src));
8949 if (TREE_CODE (srctype) == ARRAY_TYPE
8950 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8951 {
8952 srctype = TREE_TYPE (srctype);
8953 STRIP_NOPS (src);
8954 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8955 }
8956 desttype = TREE_TYPE (TREE_TYPE (dest));
8957 if (TREE_CODE (desttype) == ARRAY_TYPE
8958 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8959 {
8960 desttype = TREE_TYPE (desttype);
8961 STRIP_NOPS (dest);
8962 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8963 }
8964 if (TREE_ADDRESSABLE (srctype)
8965 || TREE_ADDRESSABLE (desttype))
8966 return NULL_TREE;
8967
8968 src_align = get_pointer_alignment (src);
8969 dest_align = get_pointer_alignment (dest);
8970 if (dest_align < TYPE_ALIGN (desttype)
8971 || src_align < TYPE_ALIGN (srctype))
8972 return NULL_TREE;
8973
8974 if (!ignore)
8975 dest = builtin_save_expr (dest);
8976
8977 /* Build accesses at offset zero with a ref-all character type. */
8978 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8979 ptr_mode, true), 0);
8980
8981 destvar = dest;
8982 STRIP_NOPS (destvar);
8983 if (TREE_CODE (destvar) == ADDR_EXPR
8984 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8985 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8986 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8987 else
8988 destvar = NULL_TREE;
8989
8990 srcvar = src;
8991 STRIP_NOPS (srcvar);
8992 if (TREE_CODE (srcvar) == ADDR_EXPR
8993 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8994 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8995 {
8996 if (!destvar
8997 || src_align >= TYPE_ALIGN (desttype))
8998 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8999 srcvar, off0);
9000 else if (!STRICT_ALIGNMENT)
9001 {
9002 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
9003 src_align);
9004 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
9005 }
9006 else
9007 srcvar = NULL_TREE;
9008 }
9009 else
9010 srcvar = NULL_TREE;
9011
9012 if (srcvar == NULL_TREE && destvar == NULL_TREE)
9013 return NULL_TREE;
9014
9015 if (srcvar == NULL_TREE)
9016 {
9017 STRIP_NOPS (src);
9018 if (src_align >= TYPE_ALIGN (desttype))
9019 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
9020 else
9021 {
9022 if (STRICT_ALIGNMENT)
9023 return NULL_TREE;
9024 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
9025 src_align);
9026 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
9027 }
9028 }
9029 else if (destvar == NULL_TREE)
9030 {
9031 STRIP_NOPS (dest);
9032 if (dest_align >= TYPE_ALIGN (srctype))
9033 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
9034 else
9035 {
9036 if (STRICT_ALIGNMENT)
9037 return NULL_TREE;
9038 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
9039 dest_align);
9040 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
9041 }
9042 }
9043
9044 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
9045 }
9046
9047 if (ignore)
9048 return expr;
9049
9050 if (endp == 0 || endp == 3)
9051 return omit_one_operand_loc (loc, type, dest, expr);
9052
9053 if (expr == len)
9054 expr = NULL_TREE;
9055
9056 if (endp == 2)
9057 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
9058 ssize_int (1));
9059
9060 dest = fold_build_pointer_plus_loc (loc, dest, len);
9061 dest = fold_convert_loc (loc, type, dest);
9062 if (expr)
9063 dest = omit_one_operand_loc (loc, type, dest, expr);
9064 return dest;
9065 }
9066
9067 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9068 If LEN is not NULL, it represents the length of the string to be
9069 copied. Return NULL_TREE if no simplification can be made. */
9070
9071 tree
9072 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
9073 {
9074 tree fn;
9075
9076 if (!validate_arg (dest, POINTER_TYPE)
9077 || !validate_arg (src, POINTER_TYPE))
9078 return NULL_TREE;
9079
9080 /* If SRC and DEST are the same (and not volatile), return DEST. */
9081 if (operand_equal_p (src, dest, 0))
9082 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
9083
9084 if (optimize_function_for_size_p (cfun))
9085 return NULL_TREE;
9086
9087 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9088 if (!fn)
9089 return NULL_TREE;
9090
9091 if (!len)
9092 {
9093 len = c_strlen (src, 1);
9094 if (! len || TREE_SIDE_EFFECTS (len))
9095 return NULL_TREE;
9096 }
9097
9098 len = fold_convert_loc (loc, size_type_node, len);
9099 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
9100 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9101 build_call_expr_loc (loc, fn, 3, dest, src, len));
9102 }
9103
9104 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
9105 Return NULL_TREE if no simplification can be made. */
9106
9107 static tree
9108 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
9109 {
9110 tree fn, len, lenp1, call, type;
9111
9112 if (!validate_arg (dest, POINTER_TYPE)
9113 || !validate_arg (src, POINTER_TYPE))
9114 return NULL_TREE;
9115
9116 len = c_strlen (src, 1);
9117 if (!len
9118 || TREE_CODE (len) != INTEGER_CST)
9119 return NULL_TREE;
9120
9121 if (optimize_function_for_size_p (cfun)
9122 /* If length is zero it's small enough. */
9123 && !integer_zerop (len))
9124 return NULL_TREE;
9125
9126 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9127 if (!fn)
9128 return NULL_TREE;
9129
9130 lenp1 = size_binop_loc (loc, PLUS_EXPR,
9131 fold_convert_loc (loc, size_type_node, len),
9132 build_int_cst (size_type_node, 1));
9133 /* We use dest twice in building our expression. Save it from
9134 multiple expansions. */
9135 dest = builtin_save_expr (dest);
9136 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
9137
9138 type = TREE_TYPE (TREE_TYPE (fndecl));
9139 dest = fold_build_pointer_plus_loc (loc, dest, len);
9140 dest = fold_convert_loc (loc, type, dest);
9141 dest = omit_one_operand_loc (loc, type, dest, call);
9142 return dest;
9143 }
9144
9145 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9146 If SLEN is not NULL, it represents the length of the source string.
9147 Return NULL_TREE if no simplification can be made. */
9148
9149 tree
9150 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
9151 tree src, tree len, tree slen)
9152 {
9153 tree fn;
9154
9155 if (!validate_arg (dest, POINTER_TYPE)
9156 || !validate_arg (src, POINTER_TYPE)
9157 || !validate_arg (len, INTEGER_TYPE))
9158 return NULL_TREE;
9159
9160 /* If the LEN parameter is zero, return DEST. */
9161 if (integer_zerop (len))
9162 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9163
9164 /* We can't compare slen with len as constants below if len is not a
9165 constant. */
9166 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9167 return NULL_TREE;
9168
9169 if (!slen)
9170 slen = c_strlen (src, 1);
9171
9172 /* Now, we must be passed a constant src ptr parameter. */
9173 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9174 return NULL_TREE;
9175
9176 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
9177
9178 /* We do not support simplification of this case, though we do
9179 support it when expanding trees into RTL. */
9180 /* FIXME: generate a call to __builtin_memset. */
9181 if (tree_int_cst_lt (slen, len))
9182 return NULL_TREE;
9183
9184 /* OK transform into builtin memcpy. */
9185 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9186 if (!fn)
9187 return NULL_TREE;
9188
9189 len = fold_convert_loc (loc, size_type_node, len);
9190 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9191 build_call_expr_loc (loc, fn, 3, dest, src, len));
9192 }
9193
9194 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9195 arguments to the call, and TYPE is its return type.
9196 Return NULL_TREE if no simplification can be made. */
9197
9198 static tree
9199 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
9200 {
9201 if (!validate_arg (arg1, POINTER_TYPE)
9202 || !validate_arg (arg2, INTEGER_TYPE)
9203 || !validate_arg (len, INTEGER_TYPE))
9204 return NULL_TREE;
9205 else
9206 {
9207 const char *p1;
9208
9209 if (TREE_CODE (arg2) != INTEGER_CST
9210 || !host_integerp (len, 1))
9211 return NULL_TREE;
9212
9213 p1 = c_getstr (arg1);
9214 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9215 {
9216 char c;
9217 const char *r;
9218 tree tem;
9219
9220 if (target_char_cast (arg2, &c))
9221 return NULL_TREE;
9222
9223 r = (const char *) memchr (p1, c, tree_low_cst (len, 1));
9224
9225 if (r == NULL)
9226 return build_int_cst (TREE_TYPE (arg1), 0);
9227
9228 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
9229 return fold_convert_loc (loc, type, tem);
9230 }
9231 return NULL_TREE;
9232 }
9233 }
9234
9235 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9236 Return NULL_TREE if no simplification can be made. */
9237
9238 static tree
9239 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9240 {
9241 const char *p1, *p2;
9242
9243 if (!validate_arg (arg1, POINTER_TYPE)
9244 || !validate_arg (arg2, POINTER_TYPE)
9245 || !validate_arg (len, INTEGER_TYPE))
9246 return NULL_TREE;
9247
9248 /* If the LEN parameter is zero, return zero. */
9249 if (integer_zerop (len))
9250 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9251 arg1, arg2);
9252
9253 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9254 if (operand_equal_p (arg1, arg2, 0))
9255 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9256
9257 p1 = c_getstr (arg1);
9258 p2 = c_getstr (arg2);
9259
9260 /* If all arguments are constant, and the value of len is not greater
9261 than the lengths of arg1 and arg2, evaluate at compile-time. */
9262 if (host_integerp (len, 1) && p1 && p2
9263 && compare_tree_int (len, strlen (p1) + 1) <= 0
9264 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9265 {
9266 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9267
9268 if (r > 0)
9269 return integer_one_node;
9270 else if (r < 0)
9271 return integer_minus_one_node;
9272 else
9273 return integer_zero_node;
9274 }
9275
9276 /* If len parameter is one, return an expression corresponding to
9277 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9278 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9279 {
9280 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9281 tree cst_uchar_ptr_node
9282 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9283
9284 tree ind1
9285 = fold_convert_loc (loc, integer_type_node,
9286 build1 (INDIRECT_REF, cst_uchar_node,
9287 fold_convert_loc (loc,
9288 cst_uchar_ptr_node,
9289 arg1)));
9290 tree ind2
9291 = fold_convert_loc (loc, integer_type_node,
9292 build1 (INDIRECT_REF, cst_uchar_node,
9293 fold_convert_loc (loc,
9294 cst_uchar_ptr_node,
9295 arg2)));
9296 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9297 }
9298
9299 return NULL_TREE;
9300 }
9301
9302 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9303 Return NULL_TREE if no simplification can be made. */
9304
9305 static tree
9306 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9307 {
9308 const char *p1, *p2;
9309
9310 if (!validate_arg (arg1, POINTER_TYPE)
9311 || !validate_arg (arg2, POINTER_TYPE))
9312 return NULL_TREE;
9313
9314 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9315 if (operand_equal_p (arg1, arg2, 0))
9316 return integer_zero_node;
9317
9318 p1 = c_getstr (arg1);
9319 p2 = c_getstr (arg2);
9320
9321 if (p1 && p2)
9322 {
9323 const int i = strcmp (p1, p2);
9324 if (i < 0)
9325 return integer_minus_one_node;
9326 else if (i > 0)
9327 return integer_one_node;
9328 else
9329 return integer_zero_node;
9330 }
9331
9332 /* If the second arg is "", return *(const unsigned char*)arg1. */
9333 if (p2 && *p2 == '\0')
9334 {
9335 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9336 tree cst_uchar_ptr_node
9337 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9338
9339 return fold_convert_loc (loc, integer_type_node,
9340 build1 (INDIRECT_REF, cst_uchar_node,
9341 fold_convert_loc (loc,
9342 cst_uchar_ptr_node,
9343 arg1)));
9344 }
9345
9346 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9347 if (p1 && *p1 == '\0')
9348 {
9349 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9350 tree cst_uchar_ptr_node
9351 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9352
9353 tree temp
9354 = fold_convert_loc (loc, integer_type_node,
9355 build1 (INDIRECT_REF, cst_uchar_node,
9356 fold_convert_loc (loc,
9357 cst_uchar_ptr_node,
9358 arg2)));
9359 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9360 }
9361
9362 return NULL_TREE;
9363 }
9364
9365 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9366 Return NULL_TREE if no simplification can be made. */
9367
9368 static tree
9369 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9370 {
9371 const char *p1, *p2;
9372
9373 if (!validate_arg (arg1, POINTER_TYPE)
9374 || !validate_arg (arg2, POINTER_TYPE)
9375 || !validate_arg (len, INTEGER_TYPE))
9376 return NULL_TREE;
9377
9378 /* If the LEN parameter is zero, return zero. */
9379 if (integer_zerop (len))
9380 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9381 arg1, arg2);
9382
9383 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9384 if (operand_equal_p (arg1, arg2, 0))
9385 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9386
9387 p1 = c_getstr (arg1);
9388 p2 = c_getstr (arg2);
9389
9390 if (host_integerp (len, 1) && p1 && p2)
9391 {
9392 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9393 if (i > 0)
9394 return integer_one_node;
9395 else if (i < 0)
9396 return integer_minus_one_node;
9397 else
9398 return integer_zero_node;
9399 }
9400
9401 /* If the second arg is "", and the length is greater than zero,
9402 return *(const unsigned char*)arg1. */
9403 if (p2 && *p2 == '\0'
9404 && TREE_CODE (len) == INTEGER_CST
9405 && tree_int_cst_sgn (len) == 1)
9406 {
9407 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9408 tree cst_uchar_ptr_node
9409 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9410
9411 return fold_convert_loc (loc, integer_type_node,
9412 build1 (INDIRECT_REF, cst_uchar_node,
9413 fold_convert_loc (loc,
9414 cst_uchar_ptr_node,
9415 arg1)));
9416 }
9417
9418 /* If the first arg is "", and the length is greater than zero,
9419 return -*(const unsigned char*)arg2. */
9420 if (p1 && *p1 == '\0'
9421 && TREE_CODE (len) == INTEGER_CST
9422 && tree_int_cst_sgn (len) == 1)
9423 {
9424 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9425 tree cst_uchar_ptr_node
9426 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9427
9428 tree temp = fold_convert_loc (loc, integer_type_node,
9429 build1 (INDIRECT_REF, cst_uchar_node,
9430 fold_convert_loc (loc,
9431 cst_uchar_ptr_node,
9432 arg2)));
9433 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9434 }
9435
9436 /* If len parameter is one, return an expression corresponding to
9437 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9438 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9439 {
9440 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9441 tree cst_uchar_ptr_node
9442 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9443
9444 tree ind1 = fold_convert_loc (loc, integer_type_node,
9445 build1 (INDIRECT_REF, cst_uchar_node,
9446 fold_convert_loc (loc,
9447 cst_uchar_ptr_node,
9448 arg1)));
9449 tree ind2 = fold_convert_loc (loc, integer_type_node,
9450 build1 (INDIRECT_REF, cst_uchar_node,
9451 fold_convert_loc (loc,
9452 cst_uchar_ptr_node,
9453 arg2)));
9454 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9455 }
9456
9457 return NULL_TREE;
9458 }
9459
9460 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9461 ARG. Return NULL_TREE if no simplification can be made. */
9462
9463 static tree
9464 fold_builtin_signbit (location_t loc, tree arg, tree type)
9465 {
9466 if (!validate_arg (arg, REAL_TYPE))
9467 return NULL_TREE;
9468
9469 /* If ARG is a compile-time constant, determine the result. */
9470 if (TREE_CODE (arg) == REAL_CST
9471 && !TREE_OVERFLOW (arg))
9472 {
9473 REAL_VALUE_TYPE c;
9474
9475 c = TREE_REAL_CST (arg);
9476 return (REAL_VALUE_NEGATIVE (c)
9477 ? build_one_cst (type)
9478 : build_zero_cst (type));
9479 }
9480
9481 /* If ARG is non-negative, the result is always zero. */
9482 if (tree_expr_nonnegative_p (arg))
9483 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9484
9485 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9486 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9487 return fold_convert (type,
9488 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9489 build_real (TREE_TYPE (arg), dconst0)));
9490
9491 return NULL_TREE;
9492 }
9493
9494 /* Fold function call to builtin copysign, copysignf or copysignl with
9495 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9496 be made. */
9497
9498 static tree
9499 fold_builtin_copysign (location_t loc, tree fndecl,
9500 tree arg1, tree arg2, tree type)
9501 {
9502 tree tem;
9503
9504 if (!validate_arg (arg1, REAL_TYPE)
9505 || !validate_arg (arg2, REAL_TYPE))
9506 return NULL_TREE;
9507
9508 /* copysign(X,X) is X. */
9509 if (operand_equal_p (arg1, arg2, 0))
9510 return fold_convert_loc (loc, type, arg1);
9511
9512 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9513 if (TREE_CODE (arg1) == REAL_CST
9514 && TREE_CODE (arg2) == REAL_CST
9515 && !TREE_OVERFLOW (arg1)
9516 && !TREE_OVERFLOW (arg2))
9517 {
9518 REAL_VALUE_TYPE c1, c2;
9519
9520 c1 = TREE_REAL_CST (arg1);
9521 c2 = TREE_REAL_CST (arg2);
9522 /* c1.sign := c2.sign. */
9523 real_copysign (&c1, &c2);
9524 return build_real (type, c1);
9525 }
9526
9527 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9528 Remember to evaluate Y for side-effects. */
9529 if (tree_expr_nonnegative_p (arg2))
9530 return omit_one_operand_loc (loc, type,
9531 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9532 arg2);
9533
9534 /* Strip sign changing operations for the first argument. */
9535 tem = fold_strip_sign_ops (arg1);
9536 if (tem)
9537 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9538
9539 return NULL_TREE;
9540 }
9541
9542 /* Fold a call to builtin isascii with argument ARG. */
9543
9544 static tree
9545 fold_builtin_isascii (location_t loc, tree arg)
9546 {
9547 if (!validate_arg (arg, INTEGER_TYPE))
9548 return NULL_TREE;
9549 else
9550 {
9551 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9552 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9553 build_int_cst (integer_type_node,
9554 ~ (unsigned HOST_WIDE_INT) 0x7f));
9555 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9556 arg, integer_zero_node);
9557 }
9558 }
9559
9560 /* Fold a call to builtin toascii with argument ARG. */
9561
9562 static tree
9563 fold_builtin_toascii (location_t loc, tree arg)
9564 {
9565 if (!validate_arg (arg, INTEGER_TYPE))
9566 return NULL_TREE;
9567
9568 /* Transform toascii(c) -> (c & 0x7f). */
9569 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9570 build_int_cst (integer_type_node, 0x7f));
9571 }
9572
9573 /* Fold a call to builtin isdigit with argument ARG. */
9574
9575 static tree
9576 fold_builtin_isdigit (location_t loc, tree arg)
9577 {
9578 if (!validate_arg (arg, INTEGER_TYPE))
9579 return NULL_TREE;
9580 else
9581 {
9582 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9583 /* According to the C standard, isdigit is unaffected by locale.
9584 However, it definitely is affected by the target character set. */
9585 unsigned HOST_WIDE_INT target_digit0
9586 = lang_hooks.to_target_charset ('0');
9587
9588 if (target_digit0 == 0)
9589 return NULL_TREE;
9590
9591 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9592 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9593 build_int_cst (unsigned_type_node, target_digit0));
9594 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9595 build_int_cst (unsigned_type_node, 9));
9596 }
9597 }
9598
9599 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9600
9601 static tree
9602 fold_builtin_fabs (location_t loc, tree arg, tree type)
9603 {
9604 if (!validate_arg (arg, REAL_TYPE))
9605 return NULL_TREE;
9606
9607 arg = fold_convert_loc (loc, type, arg);
9608 if (TREE_CODE (arg) == REAL_CST)
9609 return fold_abs_const (arg, type);
9610 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9611 }
9612
9613 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9614
9615 static tree
9616 fold_builtin_abs (location_t loc, tree arg, tree type)
9617 {
9618 if (!validate_arg (arg, INTEGER_TYPE))
9619 return NULL_TREE;
9620
9621 arg = fold_convert_loc (loc, type, arg);
9622 if (TREE_CODE (arg) == INTEGER_CST)
9623 return fold_abs_const (arg, type);
9624 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9625 }
9626
9627 /* Fold a fma operation with arguments ARG[012]. */
9628
9629 tree
9630 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9631 tree type, tree arg0, tree arg1, tree arg2)
9632 {
9633 if (TREE_CODE (arg0) == REAL_CST
9634 && TREE_CODE (arg1) == REAL_CST
9635 && TREE_CODE (arg2) == REAL_CST)
9636 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9637
9638 return NULL_TREE;
9639 }
9640
9641 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9642
9643 static tree
9644 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9645 {
9646 if (validate_arg (arg0, REAL_TYPE)
9647 && validate_arg (arg1, REAL_TYPE)
9648 && validate_arg (arg2, REAL_TYPE))
9649 {
9650 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9651 if (tem)
9652 return tem;
9653
9654 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9655 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9656 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9657 }
9658 return NULL_TREE;
9659 }
9660
9661 /* Fold a call to builtin fmin or fmax. */
9662
9663 static tree
9664 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9665 tree type, bool max)
9666 {
9667 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9668 {
9669 /* Calculate the result when the argument is a constant. */
9670 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9671
9672 if (res)
9673 return res;
9674
9675 /* If either argument is NaN, return the other one. Avoid the
9676 transformation if we get (and honor) a signalling NaN. Using
9677 omit_one_operand() ensures we create a non-lvalue. */
9678 if (TREE_CODE (arg0) == REAL_CST
9679 && real_isnan (&TREE_REAL_CST (arg0))
9680 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9681 || ! TREE_REAL_CST (arg0).signalling))
9682 return omit_one_operand_loc (loc, type, arg1, arg0);
9683 if (TREE_CODE (arg1) == REAL_CST
9684 && real_isnan (&TREE_REAL_CST (arg1))
9685 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9686 || ! TREE_REAL_CST (arg1).signalling))
9687 return omit_one_operand_loc (loc, type, arg0, arg1);
9688
9689 /* Transform fmin/fmax(x,x) -> x. */
9690 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9691 return omit_one_operand_loc (loc, type, arg0, arg1);
9692
9693 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9694 functions to return the numeric arg if the other one is NaN.
9695 These tree codes don't honor that, so only transform if
9696 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9697 handled, so we don't have to worry about it either. */
9698 if (flag_finite_math_only)
9699 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9700 fold_convert_loc (loc, type, arg0),
9701 fold_convert_loc (loc, type, arg1));
9702 }
9703 return NULL_TREE;
9704 }
9705
9706 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9707
9708 static tree
9709 fold_builtin_carg (location_t loc, tree arg, tree type)
9710 {
9711 if (validate_arg (arg, COMPLEX_TYPE)
9712 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9713 {
9714 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9715
9716 if (atan2_fn)
9717 {
9718 tree new_arg = builtin_save_expr (arg);
9719 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9720 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9721 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9722 }
9723 }
9724
9725 return NULL_TREE;
9726 }
9727
9728 /* Fold a call to builtin logb/ilogb. */
9729
9730 static tree
9731 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9732 {
9733 if (! validate_arg (arg, REAL_TYPE))
9734 return NULL_TREE;
9735
9736 STRIP_NOPS (arg);
9737
9738 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9739 {
9740 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9741
9742 switch (value->cl)
9743 {
9744 case rvc_nan:
9745 case rvc_inf:
9746 /* If arg is Inf or NaN and we're logb, return it. */
9747 if (TREE_CODE (rettype) == REAL_TYPE)
9748 {
9749 /* For logb(-Inf) we have to return +Inf. */
9750 if (real_isinf (value) && real_isneg (value))
9751 {
9752 REAL_VALUE_TYPE tem;
9753 real_inf (&tem);
9754 return build_real (rettype, tem);
9755 }
9756 return fold_convert_loc (loc, rettype, arg);
9757 }
9758 /* Fall through... */
9759 case rvc_zero:
9760 /* Zero may set errno and/or raise an exception for logb, also
9761 for ilogb we don't know FP_ILOGB0. */
9762 return NULL_TREE;
9763 case rvc_normal:
9764 /* For normal numbers, proceed iff radix == 2. In GCC,
9765 normalized significands are in the range [0.5, 1.0). We
9766 want the exponent as if they were [1.0, 2.0) so get the
9767 exponent and subtract 1. */
9768 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9769 return fold_convert_loc (loc, rettype,
9770 build_int_cst (integer_type_node,
9771 REAL_EXP (value)-1));
9772 break;
9773 }
9774 }
9775
9776 return NULL_TREE;
9777 }
9778
9779 /* Fold a call to builtin significand, if radix == 2. */
9780
9781 static tree
9782 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9783 {
9784 if (! validate_arg (arg, REAL_TYPE))
9785 return NULL_TREE;
9786
9787 STRIP_NOPS (arg);
9788
9789 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9790 {
9791 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9792
9793 switch (value->cl)
9794 {
9795 case rvc_zero:
9796 case rvc_nan:
9797 case rvc_inf:
9798 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9799 return fold_convert_loc (loc, rettype, arg);
9800 case rvc_normal:
9801 /* For normal numbers, proceed iff radix == 2. */
9802 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9803 {
9804 REAL_VALUE_TYPE result = *value;
9805 /* In GCC, normalized significands are in the range [0.5,
9806 1.0). We want them to be [1.0, 2.0) so set the
9807 exponent to 1. */
9808 SET_REAL_EXP (&result, 1);
9809 return build_real (rettype, result);
9810 }
9811 break;
9812 }
9813 }
9814
9815 return NULL_TREE;
9816 }
9817
9818 /* Fold a call to builtin frexp, we can assume the base is 2. */
9819
9820 static tree
9821 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9822 {
9823 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9824 return NULL_TREE;
9825
9826 STRIP_NOPS (arg0);
9827
9828 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9829 return NULL_TREE;
9830
9831 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9832
9833 /* Proceed if a valid pointer type was passed in. */
9834 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9835 {
9836 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9837 tree frac, exp;
9838
9839 switch (value->cl)
9840 {
9841 case rvc_zero:
9842 /* For +-0, return (*exp = 0, +-0). */
9843 exp = integer_zero_node;
9844 frac = arg0;
9845 break;
9846 case rvc_nan:
9847 case rvc_inf:
9848 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9849 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9850 case rvc_normal:
9851 {
9852 /* Since the frexp function always expects base 2, and in
9853 GCC normalized significands are already in the range
9854 [0.5, 1.0), we have exactly what frexp wants. */
9855 REAL_VALUE_TYPE frac_rvt = *value;
9856 SET_REAL_EXP (&frac_rvt, 0);
9857 frac = build_real (rettype, frac_rvt);
9858 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9859 }
9860 break;
9861 default:
9862 gcc_unreachable ();
9863 }
9864
9865 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9866 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9867 TREE_SIDE_EFFECTS (arg1) = 1;
9868 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9869 }
9870
9871 return NULL_TREE;
9872 }
9873
9874 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9875 then we can assume the base is two. If it's false, then we have to
9876 check the mode of the TYPE parameter in certain cases. */
9877
9878 static tree
9879 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9880 tree type, bool ldexp)
9881 {
9882 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9883 {
9884 STRIP_NOPS (arg0);
9885 STRIP_NOPS (arg1);
9886
9887 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9888 if (real_zerop (arg0) || integer_zerop (arg1)
9889 || (TREE_CODE (arg0) == REAL_CST
9890 && !real_isfinite (&TREE_REAL_CST (arg0))))
9891 return omit_one_operand_loc (loc, type, arg0, arg1);
9892
9893 /* If both arguments are constant, then try to evaluate it. */
9894 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9895 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9896 && host_integerp (arg1, 0))
9897 {
9898 /* Bound the maximum adjustment to twice the range of the
9899 mode's valid exponents. Use abs to ensure the range is
9900 positive as a sanity check. */
9901 const long max_exp_adj = 2 *
9902 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9903 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9904
9905 /* Get the user-requested adjustment. */
9906 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9907
9908 /* The requested adjustment must be inside this range. This
9909 is a preliminary cap to avoid things like overflow, we
9910 may still fail to compute the result for other reasons. */
9911 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9912 {
9913 REAL_VALUE_TYPE initial_result;
9914
9915 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9916
9917 /* Ensure we didn't overflow. */
9918 if (! real_isinf (&initial_result))
9919 {
9920 const REAL_VALUE_TYPE trunc_result
9921 = real_value_truncate (TYPE_MODE (type), initial_result);
9922
9923 /* Only proceed if the target mode can hold the
9924 resulting value. */
9925 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9926 return build_real (type, trunc_result);
9927 }
9928 }
9929 }
9930 }
9931
9932 return NULL_TREE;
9933 }
9934
9935 /* Fold a call to builtin modf. */
9936
9937 static tree
9938 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9939 {
9940 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9941 return NULL_TREE;
9942
9943 STRIP_NOPS (arg0);
9944
9945 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9946 return NULL_TREE;
9947
9948 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9949
9950 /* Proceed if a valid pointer type was passed in. */
9951 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9952 {
9953 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9954 REAL_VALUE_TYPE trunc, frac;
9955
9956 switch (value->cl)
9957 {
9958 case rvc_nan:
9959 case rvc_zero:
9960 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9961 trunc = frac = *value;
9962 break;
9963 case rvc_inf:
9964 /* For +-Inf, return (*arg1 = arg0, +-0). */
9965 frac = dconst0;
9966 frac.sign = value->sign;
9967 trunc = *value;
9968 break;
9969 case rvc_normal:
9970 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9971 real_trunc (&trunc, VOIDmode, value);
9972 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9973 /* If the original number was negative and already
9974 integral, then the fractional part is -0.0. */
9975 if (value->sign && frac.cl == rvc_zero)
9976 frac.sign = value->sign;
9977 break;
9978 }
9979
9980 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9981 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9982 build_real (rettype, trunc));
9983 TREE_SIDE_EFFECTS (arg1) = 1;
9984 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9985 build_real (rettype, frac));
9986 }
9987
9988 return NULL_TREE;
9989 }
9990
9991 /* Given a location LOC, an interclass builtin function decl FNDECL
9992 and its single argument ARG, return an folded expression computing
9993 the same, or NULL_TREE if we either couldn't or didn't want to fold
9994 (the latter happen if there's an RTL instruction available). */
9995
9996 static tree
9997 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9998 {
9999 enum machine_mode mode;
10000
10001 if (!validate_arg (arg, REAL_TYPE))
10002 return NULL_TREE;
10003
10004 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
10005 return NULL_TREE;
10006
10007 mode = TYPE_MODE (TREE_TYPE (arg));
10008
10009 /* If there is no optab, try generic code. */
10010 switch (DECL_FUNCTION_CODE (fndecl))
10011 {
10012 tree result;
10013
10014 CASE_FLT_FN (BUILT_IN_ISINF):
10015 {
10016 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
10017 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
10018 tree const type = TREE_TYPE (arg);
10019 REAL_VALUE_TYPE r;
10020 char buf[128];
10021
10022 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10023 real_from_string (&r, buf);
10024 result = build_call_expr (isgr_fn, 2,
10025 fold_build1_loc (loc, ABS_EXPR, type, arg),
10026 build_real (type, r));
10027 return result;
10028 }
10029 CASE_FLT_FN (BUILT_IN_FINITE):
10030 case BUILT_IN_ISFINITE:
10031 {
10032 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
10033 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10034 tree const type = TREE_TYPE (arg);
10035 REAL_VALUE_TYPE r;
10036 char buf[128];
10037
10038 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10039 real_from_string (&r, buf);
10040 result = build_call_expr (isle_fn, 2,
10041 fold_build1_loc (loc, ABS_EXPR, type, arg),
10042 build_real (type, r));
10043 /*result = fold_build2_loc (loc, UNGT_EXPR,
10044 TREE_TYPE (TREE_TYPE (fndecl)),
10045 fold_build1_loc (loc, ABS_EXPR, type, arg),
10046 build_real (type, r));
10047 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
10048 TREE_TYPE (TREE_TYPE (fndecl)),
10049 result);*/
10050 return result;
10051 }
10052 case BUILT_IN_ISNORMAL:
10053 {
10054 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
10055 islessequal(fabs(x),DBL_MAX). */
10056 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10057 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
10058 tree const type = TREE_TYPE (arg);
10059 REAL_VALUE_TYPE rmax, rmin;
10060 char buf[128];
10061
10062 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10063 real_from_string (&rmax, buf);
10064 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10065 real_from_string (&rmin, buf);
10066 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10067 result = build_call_expr (isle_fn, 2, arg,
10068 build_real (type, rmax));
10069 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
10070 build_call_expr (isge_fn, 2, arg,
10071 build_real (type, rmin)));
10072 return result;
10073 }
10074 default:
10075 break;
10076 }
10077
10078 return NULL_TREE;
10079 }
10080
10081 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10082 ARG is the argument for the call. */
10083
10084 static tree
10085 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
10086 {
10087 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10088 REAL_VALUE_TYPE r;
10089
10090 if (!validate_arg (arg, REAL_TYPE))
10091 return NULL_TREE;
10092
10093 switch (builtin_index)
10094 {
10095 case BUILT_IN_ISINF:
10096 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10097 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10098
10099 if (TREE_CODE (arg) == REAL_CST)
10100 {
10101 r = TREE_REAL_CST (arg);
10102 if (real_isinf (&r))
10103 return real_compare (GT_EXPR, &r, &dconst0)
10104 ? integer_one_node : integer_minus_one_node;
10105 else
10106 return integer_zero_node;
10107 }
10108
10109 return NULL_TREE;
10110
10111 case BUILT_IN_ISINF_SIGN:
10112 {
10113 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10114 /* In a boolean context, GCC will fold the inner COND_EXPR to
10115 1. So e.g. "if (isinf_sign(x))" would be folded to just
10116 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10117 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
10118 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
10119 tree tmp = NULL_TREE;
10120
10121 arg = builtin_save_expr (arg);
10122
10123 if (signbit_fn && isinf_fn)
10124 {
10125 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
10126 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
10127
10128 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10129 signbit_call, integer_zero_node);
10130 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10131 isinf_call, integer_zero_node);
10132
10133 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
10134 integer_minus_one_node, integer_one_node);
10135 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10136 isinf_call, tmp,
10137 integer_zero_node);
10138 }
10139
10140 return tmp;
10141 }
10142
10143 case BUILT_IN_ISFINITE:
10144 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10145 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10146 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10147
10148 if (TREE_CODE (arg) == REAL_CST)
10149 {
10150 r = TREE_REAL_CST (arg);
10151 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10152 }
10153
10154 return NULL_TREE;
10155
10156 case BUILT_IN_ISNAN:
10157 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10158 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10159
10160 if (TREE_CODE (arg) == REAL_CST)
10161 {
10162 r = TREE_REAL_CST (arg);
10163 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10164 }
10165
10166 arg = builtin_save_expr (arg);
10167 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10168
10169 default:
10170 gcc_unreachable ();
10171 }
10172 }
10173
10174 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10175 This builtin will generate code to return the appropriate floating
10176 point classification depending on the value of the floating point
10177 number passed in. The possible return values must be supplied as
10178 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10179 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10180 one floating point argument which is "type generic". */
10181
10182 static tree
10183 fold_builtin_fpclassify (location_t loc, tree exp)
10184 {
10185 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10186 arg, type, res, tmp;
10187 enum machine_mode mode;
10188 REAL_VALUE_TYPE r;
10189 char buf[128];
10190
10191 /* Verify the required arguments in the original call. */
10192 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10193 INTEGER_TYPE, INTEGER_TYPE,
10194 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10195 return NULL_TREE;
10196
10197 fp_nan = CALL_EXPR_ARG (exp, 0);
10198 fp_infinite = CALL_EXPR_ARG (exp, 1);
10199 fp_normal = CALL_EXPR_ARG (exp, 2);
10200 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10201 fp_zero = CALL_EXPR_ARG (exp, 4);
10202 arg = CALL_EXPR_ARG (exp, 5);
10203 type = TREE_TYPE (arg);
10204 mode = TYPE_MODE (type);
10205 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10206
10207 /* fpclassify(x) ->
10208 isnan(x) ? FP_NAN :
10209 (fabs(x) == Inf ? FP_INFINITE :
10210 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10211 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10212
10213 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10214 build_real (type, dconst0));
10215 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10216 tmp, fp_zero, fp_subnormal);
10217
10218 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10219 real_from_string (&r, buf);
10220 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10221 arg, build_real (type, r));
10222 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10223
10224 if (HONOR_INFINITIES (mode))
10225 {
10226 real_inf (&r);
10227 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10228 build_real (type, r));
10229 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10230 fp_infinite, res);
10231 }
10232
10233 if (HONOR_NANS (mode))
10234 {
10235 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10236 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10237 }
10238
10239 return res;
10240 }
10241
10242 /* Fold a call to an unordered comparison function such as
10243 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10244 being called and ARG0 and ARG1 are the arguments for the call.
10245 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10246 the opposite of the desired result. UNORDERED_CODE is used
10247 for modes that can hold NaNs and ORDERED_CODE is used for
10248 the rest. */
10249
10250 static tree
10251 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10252 enum tree_code unordered_code,
10253 enum tree_code ordered_code)
10254 {
10255 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10256 enum tree_code code;
10257 tree type0, type1;
10258 enum tree_code code0, code1;
10259 tree cmp_type = NULL_TREE;
10260
10261 type0 = TREE_TYPE (arg0);
10262 type1 = TREE_TYPE (arg1);
10263
10264 code0 = TREE_CODE (type0);
10265 code1 = TREE_CODE (type1);
10266
10267 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10268 /* Choose the wider of two real types. */
10269 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10270 ? type0 : type1;
10271 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10272 cmp_type = type0;
10273 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10274 cmp_type = type1;
10275
10276 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10277 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10278
10279 if (unordered_code == UNORDERED_EXPR)
10280 {
10281 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10282 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10283 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10284 }
10285
10286 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10287 : ordered_code;
10288 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10289 fold_build2_loc (loc, code, type, arg0, arg1));
10290 }
10291
10292 /* Fold a call to built-in function FNDECL with 0 arguments.
10293 IGNORE is true if the result of the function call is ignored. This
10294 function returns NULL_TREE if no simplification was possible. */
10295
10296 static tree
10297 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10298 {
10299 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10300 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10301 switch (fcode)
10302 {
10303 CASE_FLT_FN (BUILT_IN_INF):
10304 case BUILT_IN_INFD32:
10305 case BUILT_IN_INFD64:
10306 case BUILT_IN_INFD128:
10307 return fold_builtin_inf (loc, type, true);
10308
10309 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10310 return fold_builtin_inf (loc, type, false);
10311
10312 case BUILT_IN_CLASSIFY_TYPE:
10313 return fold_builtin_classify_type (NULL_TREE);
10314
10315 case BUILT_IN_UNREACHABLE:
10316 if (flag_sanitize & SANITIZE_UNREACHABLE
10317 && (current_function_decl == NULL
10318 || !lookup_attribute ("no_sanitize_undefined",
10319 DECL_ATTRIBUTES (current_function_decl))))
10320 return ubsan_instrument_unreachable (loc);
10321 break;
10322
10323 default:
10324 break;
10325 }
10326 return NULL_TREE;
10327 }
10328
10329 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10330 IGNORE is true if the result of the function call is ignored. This
10331 function returns NULL_TREE if no simplification was possible. */
10332
10333 static tree
10334 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10335 {
10336 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10337 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10338 switch (fcode)
10339 {
10340 case BUILT_IN_CONSTANT_P:
10341 {
10342 tree val = fold_builtin_constant_p (arg0);
10343
10344 /* Gimplification will pull the CALL_EXPR for the builtin out of
10345 an if condition. When not optimizing, we'll not CSE it back.
10346 To avoid link error types of regressions, return false now. */
10347 if (!val && !optimize)
10348 val = integer_zero_node;
10349
10350 return val;
10351 }
10352
10353 case BUILT_IN_CLASSIFY_TYPE:
10354 return fold_builtin_classify_type (arg0);
10355
10356 case BUILT_IN_STRLEN:
10357 return fold_builtin_strlen (loc, type, arg0);
10358
10359 CASE_FLT_FN (BUILT_IN_FABS):
10360 case BUILT_IN_FABSD32:
10361 case BUILT_IN_FABSD64:
10362 case BUILT_IN_FABSD128:
10363 return fold_builtin_fabs (loc, arg0, type);
10364
10365 case BUILT_IN_ABS:
10366 case BUILT_IN_LABS:
10367 case BUILT_IN_LLABS:
10368 case BUILT_IN_IMAXABS:
10369 return fold_builtin_abs (loc, arg0, type);
10370
10371 CASE_FLT_FN (BUILT_IN_CONJ):
10372 if (validate_arg (arg0, COMPLEX_TYPE)
10373 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10374 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10375 break;
10376
10377 CASE_FLT_FN (BUILT_IN_CREAL):
10378 if (validate_arg (arg0, COMPLEX_TYPE)
10379 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10380 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10381 break;
10382
10383 CASE_FLT_FN (BUILT_IN_CIMAG):
10384 if (validate_arg (arg0, COMPLEX_TYPE)
10385 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10386 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10387 break;
10388
10389 CASE_FLT_FN (BUILT_IN_CCOS):
10390 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
10391
10392 CASE_FLT_FN (BUILT_IN_CCOSH):
10393 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
10394
10395 CASE_FLT_FN (BUILT_IN_CPROJ):
10396 return fold_builtin_cproj (loc, arg0, type);
10397
10398 CASE_FLT_FN (BUILT_IN_CSIN):
10399 if (validate_arg (arg0, COMPLEX_TYPE)
10400 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10401 return do_mpc_arg1 (arg0, type, mpc_sin);
10402 break;
10403
10404 CASE_FLT_FN (BUILT_IN_CSINH):
10405 if (validate_arg (arg0, COMPLEX_TYPE)
10406 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10407 return do_mpc_arg1 (arg0, type, mpc_sinh);
10408 break;
10409
10410 CASE_FLT_FN (BUILT_IN_CTAN):
10411 if (validate_arg (arg0, COMPLEX_TYPE)
10412 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10413 return do_mpc_arg1 (arg0, type, mpc_tan);
10414 break;
10415
10416 CASE_FLT_FN (BUILT_IN_CTANH):
10417 if (validate_arg (arg0, COMPLEX_TYPE)
10418 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10419 return do_mpc_arg1 (arg0, type, mpc_tanh);
10420 break;
10421
10422 CASE_FLT_FN (BUILT_IN_CLOG):
10423 if (validate_arg (arg0, COMPLEX_TYPE)
10424 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10425 return do_mpc_arg1 (arg0, type, mpc_log);
10426 break;
10427
10428 CASE_FLT_FN (BUILT_IN_CSQRT):
10429 if (validate_arg (arg0, COMPLEX_TYPE)
10430 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10431 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10432 break;
10433
10434 CASE_FLT_FN (BUILT_IN_CASIN):
10435 if (validate_arg (arg0, COMPLEX_TYPE)
10436 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10437 return do_mpc_arg1 (arg0, type, mpc_asin);
10438 break;
10439
10440 CASE_FLT_FN (BUILT_IN_CACOS):
10441 if (validate_arg (arg0, COMPLEX_TYPE)
10442 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10443 return do_mpc_arg1 (arg0, type, mpc_acos);
10444 break;
10445
10446 CASE_FLT_FN (BUILT_IN_CATAN):
10447 if (validate_arg (arg0, COMPLEX_TYPE)
10448 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10449 return do_mpc_arg1 (arg0, type, mpc_atan);
10450 break;
10451
10452 CASE_FLT_FN (BUILT_IN_CASINH):
10453 if (validate_arg (arg0, COMPLEX_TYPE)
10454 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10455 return do_mpc_arg1 (arg0, type, mpc_asinh);
10456 break;
10457
10458 CASE_FLT_FN (BUILT_IN_CACOSH):
10459 if (validate_arg (arg0, COMPLEX_TYPE)
10460 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10461 return do_mpc_arg1 (arg0, type, mpc_acosh);
10462 break;
10463
10464 CASE_FLT_FN (BUILT_IN_CATANH):
10465 if (validate_arg (arg0, COMPLEX_TYPE)
10466 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10467 return do_mpc_arg1 (arg0, type, mpc_atanh);
10468 break;
10469
10470 CASE_FLT_FN (BUILT_IN_CABS):
10471 return fold_builtin_cabs (loc, arg0, type, fndecl);
10472
10473 CASE_FLT_FN (BUILT_IN_CARG):
10474 return fold_builtin_carg (loc, arg0, type);
10475
10476 CASE_FLT_FN (BUILT_IN_SQRT):
10477 return fold_builtin_sqrt (loc, arg0, type);
10478
10479 CASE_FLT_FN (BUILT_IN_CBRT):
10480 return fold_builtin_cbrt (loc, arg0, type);
10481
10482 CASE_FLT_FN (BUILT_IN_ASIN):
10483 if (validate_arg (arg0, REAL_TYPE))
10484 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10485 &dconstm1, &dconst1, true);
10486 break;
10487
10488 CASE_FLT_FN (BUILT_IN_ACOS):
10489 if (validate_arg (arg0, REAL_TYPE))
10490 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10491 &dconstm1, &dconst1, true);
10492 break;
10493
10494 CASE_FLT_FN (BUILT_IN_ATAN):
10495 if (validate_arg (arg0, REAL_TYPE))
10496 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10497 break;
10498
10499 CASE_FLT_FN (BUILT_IN_ASINH):
10500 if (validate_arg (arg0, REAL_TYPE))
10501 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10502 break;
10503
10504 CASE_FLT_FN (BUILT_IN_ACOSH):
10505 if (validate_arg (arg0, REAL_TYPE))
10506 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10507 &dconst1, NULL, true);
10508 break;
10509
10510 CASE_FLT_FN (BUILT_IN_ATANH):
10511 if (validate_arg (arg0, REAL_TYPE))
10512 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10513 &dconstm1, &dconst1, false);
10514 break;
10515
10516 CASE_FLT_FN (BUILT_IN_SIN):
10517 if (validate_arg (arg0, REAL_TYPE))
10518 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10519 break;
10520
10521 CASE_FLT_FN (BUILT_IN_COS):
10522 return fold_builtin_cos (loc, arg0, type, fndecl);
10523
10524 CASE_FLT_FN (BUILT_IN_TAN):
10525 return fold_builtin_tan (arg0, type);
10526
10527 CASE_FLT_FN (BUILT_IN_CEXP):
10528 return fold_builtin_cexp (loc, arg0, type);
10529
10530 CASE_FLT_FN (BUILT_IN_CEXPI):
10531 if (validate_arg (arg0, REAL_TYPE))
10532 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10533 break;
10534
10535 CASE_FLT_FN (BUILT_IN_SINH):
10536 if (validate_arg (arg0, REAL_TYPE))
10537 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10538 break;
10539
10540 CASE_FLT_FN (BUILT_IN_COSH):
10541 return fold_builtin_cosh (loc, arg0, type, fndecl);
10542
10543 CASE_FLT_FN (BUILT_IN_TANH):
10544 if (validate_arg (arg0, REAL_TYPE))
10545 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10546 break;
10547
10548 CASE_FLT_FN (BUILT_IN_ERF):
10549 if (validate_arg (arg0, REAL_TYPE))
10550 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10551 break;
10552
10553 CASE_FLT_FN (BUILT_IN_ERFC):
10554 if (validate_arg (arg0, REAL_TYPE))
10555 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10556 break;
10557
10558 CASE_FLT_FN (BUILT_IN_TGAMMA):
10559 if (validate_arg (arg0, REAL_TYPE))
10560 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10561 break;
10562
10563 CASE_FLT_FN (BUILT_IN_EXP):
10564 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10565
10566 CASE_FLT_FN (BUILT_IN_EXP2):
10567 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10568
10569 CASE_FLT_FN (BUILT_IN_EXP10):
10570 CASE_FLT_FN (BUILT_IN_POW10):
10571 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10572
10573 CASE_FLT_FN (BUILT_IN_EXPM1):
10574 if (validate_arg (arg0, REAL_TYPE))
10575 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10576 break;
10577
10578 CASE_FLT_FN (BUILT_IN_LOG):
10579 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10580
10581 CASE_FLT_FN (BUILT_IN_LOG2):
10582 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10583
10584 CASE_FLT_FN (BUILT_IN_LOG10):
10585 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10586
10587 CASE_FLT_FN (BUILT_IN_LOG1P):
10588 if (validate_arg (arg0, REAL_TYPE))
10589 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10590 &dconstm1, NULL, false);
10591 break;
10592
10593 CASE_FLT_FN (BUILT_IN_J0):
10594 if (validate_arg (arg0, REAL_TYPE))
10595 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10596 NULL, NULL, 0);
10597 break;
10598
10599 CASE_FLT_FN (BUILT_IN_J1):
10600 if (validate_arg (arg0, REAL_TYPE))
10601 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10602 NULL, NULL, 0);
10603 break;
10604
10605 CASE_FLT_FN (BUILT_IN_Y0):
10606 if (validate_arg (arg0, REAL_TYPE))
10607 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10608 &dconst0, NULL, false);
10609 break;
10610
10611 CASE_FLT_FN (BUILT_IN_Y1):
10612 if (validate_arg (arg0, REAL_TYPE))
10613 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10614 &dconst0, NULL, false);
10615 break;
10616
10617 CASE_FLT_FN (BUILT_IN_NAN):
10618 case BUILT_IN_NAND32:
10619 case BUILT_IN_NAND64:
10620 case BUILT_IN_NAND128:
10621 return fold_builtin_nan (arg0, type, true);
10622
10623 CASE_FLT_FN (BUILT_IN_NANS):
10624 return fold_builtin_nan (arg0, type, false);
10625
10626 CASE_FLT_FN (BUILT_IN_FLOOR):
10627 return fold_builtin_floor (loc, fndecl, arg0);
10628
10629 CASE_FLT_FN (BUILT_IN_CEIL):
10630 return fold_builtin_ceil (loc, fndecl, arg0);
10631
10632 CASE_FLT_FN (BUILT_IN_TRUNC):
10633 return fold_builtin_trunc (loc, fndecl, arg0);
10634
10635 CASE_FLT_FN (BUILT_IN_ROUND):
10636 return fold_builtin_round (loc, fndecl, arg0);
10637
10638 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10639 CASE_FLT_FN (BUILT_IN_RINT):
10640 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10641
10642 CASE_FLT_FN (BUILT_IN_ICEIL):
10643 CASE_FLT_FN (BUILT_IN_LCEIL):
10644 CASE_FLT_FN (BUILT_IN_LLCEIL):
10645 CASE_FLT_FN (BUILT_IN_LFLOOR):
10646 CASE_FLT_FN (BUILT_IN_IFLOOR):
10647 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10648 CASE_FLT_FN (BUILT_IN_IROUND):
10649 CASE_FLT_FN (BUILT_IN_LROUND):
10650 CASE_FLT_FN (BUILT_IN_LLROUND):
10651 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10652
10653 CASE_FLT_FN (BUILT_IN_IRINT):
10654 CASE_FLT_FN (BUILT_IN_LRINT):
10655 CASE_FLT_FN (BUILT_IN_LLRINT):
10656 return fold_fixed_mathfn (loc, fndecl, arg0);
10657
10658 case BUILT_IN_BSWAP16:
10659 case BUILT_IN_BSWAP32:
10660 case BUILT_IN_BSWAP64:
10661 return fold_builtin_bswap (fndecl, arg0);
10662
10663 CASE_INT_FN (BUILT_IN_FFS):
10664 CASE_INT_FN (BUILT_IN_CLZ):
10665 CASE_INT_FN (BUILT_IN_CTZ):
10666 CASE_INT_FN (BUILT_IN_CLRSB):
10667 CASE_INT_FN (BUILT_IN_POPCOUNT):
10668 CASE_INT_FN (BUILT_IN_PARITY):
10669 return fold_builtin_bitop (fndecl, arg0);
10670
10671 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10672 return fold_builtin_signbit (loc, arg0, type);
10673
10674 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10675 return fold_builtin_significand (loc, arg0, type);
10676
10677 CASE_FLT_FN (BUILT_IN_ILOGB):
10678 CASE_FLT_FN (BUILT_IN_LOGB):
10679 return fold_builtin_logb (loc, arg0, type);
10680
10681 case BUILT_IN_ISASCII:
10682 return fold_builtin_isascii (loc, arg0);
10683
10684 case BUILT_IN_TOASCII:
10685 return fold_builtin_toascii (loc, arg0);
10686
10687 case BUILT_IN_ISDIGIT:
10688 return fold_builtin_isdigit (loc, arg0);
10689
10690 CASE_FLT_FN (BUILT_IN_FINITE):
10691 case BUILT_IN_FINITED32:
10692 case BUILT_IN_FINITED64:
10693 case BUILT_IN_FINITED128:
10694 case BUILT_IN_ISFINITE:
10695 {
10696 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10697 if (ret)
10698 return ret;
10699 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10700 }
10701
10702 CASE_FLT_FN (BUILT_IN_ISINF):
10703 case BUILT_IN_ISINFD32:
10704 case BUILT_IN_ISINFD64:
10705 case BUILT_IN_ISINFD128:
10706 {
10707 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10708 if (ret)
10709 return ret;
10710 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10711 }
10712
10713 case BUILT_IN_ISNORMAL:
10714 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10715
10716 case BUILT_IN_ISINF_SIGN:
10717 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10718
10719 CASE_FLT_FN (BUILT_IN_ISNAN):
10720 case BUILT_IN_ISNAND32:
10721 case BUILT_IN_ISNAND64:
10722 case BUILT_IN_ISNAND128:
10723 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10724
10725 case BUILT_IN_PRINTF:
10726 case BUILT_IN_PRINTF_UNLOCKED:
10727 case BUILT_IN_VPRINTF:
10728 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10729
10730 case BUILT_IN_FREE:
10731 if (integer_zerop (arg0))
10732 return build_empty_stmt (loc);
10733 break;
10734
10735 default:
10736 break;
10737 }
10738
10739 return NULL_TREE;
10740
10741 }
10742
10743 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10744 IGNORE is true if the result of the function call is ignored. This
10745 function returns NULL_TREE if no simplification was possible. */
10746
10747 static tree
10748 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10749 {
10750 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10751 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10752
10753 switch (fcode)
10754 {
10755 CASE_FLT_FN (BUILT_IN_JN):
10756 if (validate_arg (arg0, INTEGER_TYPE)
10757 && validate_arg (arg1, REAL_TYPE))
10758 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10759 break;
10760
10761 CASE_FLT_FN (BUILT_IN_YN):
10762 if (validate_arg (arg0, INTEGER_TYPE)
10763 && validate_arg (arg1, REAL_TYPE))
10764 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10765 &dconst0, false);
10766 break;
10767
10768 CASE_FLT_FN (BUILT_IN_DREM):
10769 CASE_FLT_FN (BUILT_IN_REMAINDER):
10770 if (validate_arg (arg0, REAL_TYPE)
10771 && validate_arg (arg1, REAL_TYPE))
10772 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10773 break;
10774
10775 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10776 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10777 if (validate_arg (arg0, REAL_TYPE)
10778 && validate_arg (arg1, POINTER_TYPE))
10779 return do_mpfr_lgamma_r (arg0, arg1, type);
10780 break;
10781
10782 CASE_FLT_FN (BUILT_IN_ATAN2):
10783 if (validate_arg (arg0, REAL_TYPE)
10784 && validate_arg (arg1, REAL_TYPE))
10785 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10786 break;
10787
10788 CASE_FLT_FN (BUILT_IN_FDIM):
10789 if (validate_arg (arg0, REAL_TYPE)
10790 && validate_arg (arg1, REAL_TYPE))
10791 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10792 break;
10793
10794 CASE_FLT_FN (BUILT_IN_HYPOT):
10795 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10796
10797 CASE_FLT_FN (BUILT_IN_CPOW):
10798 if (validate_arg (arg0, COMPLEX_TYPE)
10799 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10800 && validate_arg (arg1, COMPLEX_TYPE)
10801 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10802 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10803 break;
10804
10805 CASE_FLT_FN (BUILT_IN_LDEXP):
10806 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10807 CASE_FLT_FN (BUILT_IN_SCALBN):
10808 CASE_FLT_FN (BUILT_IN_SCALBLN):
10809 return fold_builtin_load_exponent (loc, arg0, arg1,
10810 type, /*ldexp=*/false);
10811
10812 CASE_FLT_FN (BUILT_IN_FREXP):
10813 return fold_builtin_frexp (loc, arg0, arg1, type);
10814
10815 CASE_FLT_FN (BUILT_IN_MODF):
10816 return fold_builtin_modf (loc, arg0, arg1, type);
10817
10818 case BUILT_IN_BZERO:
10819 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10820
10821 case BUILT_IN_FPUTS:
10822 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10823
10824 case BUILT_IN_FPUTS_UNLOCKED:
10825 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10826
10827 case BUILT_IN_STRSTR:
10828 return fold_builtin_strstr (loc, arg0, arg1, type);
10829
10830 case BUILT_IN_STRCAT:
10831 return fold_builtin_strcat (loc, arg0, arg1);
10832
10833 case BUILT_IN_STRSPN:
10834 return fold_builtin_strspn (loc, arg0, arg1);
10835
10836 case BUILT_IN_STRCSPN:
10837 return fold_builtin_strcspn (loc, arg0, arg1);
10838
10839 case BUILT_IN_STRCHR:
10840 case BUILT_IN_INDEX:
10841 return fold_builtin_strchr (loc, arg0, arg1, type);
10842
10843 case BUILT_IN_STRRCHR:
10844 case BUILT_IN_RINDEX:
10845 return fold_builtin_strrchr (loc, arg0, arg1, type);
10846
10847 case BUILT_IN_STRCPY:
10848 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10849
10850 case BUILT_IN_STPCPY:
10851 if (ignore)
10852 {
10853 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10854 if (!fn)
10855 break;
10856
10857 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10858 }
10859 else
10860 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10861 break;
10862
10863 case BUILT_IN_STRCMP:
10864 return fold_builtin_strcmp (loc, arg0, arg1);
10865
10866 case BUILT_IN_STRPBRK:
10867 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10868
10869 case BUILT_IN_EXPECT:
10870 return fold_builtin_expect (loc, arg0, arg1);
10871
10872 CASE_FLT_FN (BUILT_IN_POW):
10873 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10874
10875 CASE_FLT_FN (BUILT_IN_POWI):
10876 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10877
10878 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10879 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10880
10881 CASE_FLT_FN (BUILT_IN_FMIN):
10882 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10883
10884 CASE_FLT_FN (BUILT_IN_FMAX):
10885 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10886
10887 case BUILT_IN_ISGREATER:
10888 return fold_builtin_unordered_cmp (loc, fndecl,
10889 arg0, arg1, UNLE_EXPR, LE_EXPR);
10890 case BUILT_IN_ISGREATEREQUAL:
10891 return fold_builtin_unordered_cmp (loc, fndecl,
10892 arg0, arg1, UNLT_EXPR, LT_EXPR);
10893 case BUILT_IN_ISLESS:
10894 return fold_builtin_unordered_cmp (loc, fndecl,
10895 arg0, arg1, UNGE_EXPR, GE_EXPR);
10896 case BUILT_IN_ISLESSEQUAL:
10897 return fold_builtin_unordered_cmp (loc, fndecl,
10898 arg0, arg1, UNGT_EXPR, GT_EXPR);
10899 case BUILT_IN_ISLESSGREATER:
10900 return fold_builtin_unordered_cmp (loc, fndecl,
10901 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10902 case BUILT_IN_ISUNORDERED:
10903 return fold_builtin_unordered_cmp (loc, fndecl,
10904 arg0, arg1, UNORDERED_EXPR,
10905 NOP_EXPR);
10906
10907 /* We do the folding for va_start in the expander. */
10908 case BUILT_IN_VA_START:
10909 break;
10910
10911 case BUILT_IN_SPRINTF:
10912 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10913
10914 case BUILT_IN_OBJECT_SIZE:
10915 return fold_builtin_object_size (arg0, arg1);
10916
10917 case BUILT_IN_PRINTF:
10918 case BUILT_IN_PRINTF_UNLOCKED:
10919 case BUILT_IN_VPRINTF:
10920 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10921
10922 case BUILT_IN_PRINTF_CHK:
10923 case BUILT_IN_VPRINTF_CHK:
10924 if (!validate_arg (arg0, INTEGER_TYPE)
10925 || TREE_SIDE_EFFECTS (arg0))
10926 return NULL_TREE;
10927 else
10928 return fold_builtin_printf (loc, fndecl,
10929 arg1, NULL_TREE, ignore, fcode);
10930 break;
10931
10932 case BUILT_IN_FPRINTF:
10933 case BUILT_IN_FPRINTF_UNLOCKED:
10934 case BUILT_IN_VFPRINTF:
10935 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10936 ignore, fcode);
10937
10938 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10939 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10940
10941 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10942 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10943
10944 default:
10945 break;
10946 }
10947 return NULL_TREE;
10948 }
10949
10950 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10951 and ARG2. IGNORE is true if the result of the function call is ignored.
10952 This function returns NULL_TREE if no simplification was possible. */
10953
10954 static tree
10955 fold_builtin_3 (location_t loc, tree fndecl,
10956 tree arg0, tree arg1, tree arg2, bool ignore)
10957 {
10958 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10959 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10960 switch (fcode)
10961 {
10962
10963 CASE_FLT_FN (BUILT_IN_SINCOS):
10964 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10965
10966 CASE_FLT_FN (BUILT_IN_FMA):
10967 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10968 break;
10969
10970 CASE_FLT_FN (BUILT_IN_REMQUO):
10971 if (validate_arg (arg0, REAL_TYPE)
10972 && validate_arg (arg1, REAL_TYPE)
10973 && validate_arg (arg2, POINTER_TYPE))
10974 return do_mpfr_remquo (arg0, arg1, arg2);
10975 break;
10976
10977 case BUILT_IN_MEMSET:
10978 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10979
10980 case BUILT_IN_BCOPY:
10981 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10982 void_type_node, true, /*endp=*/3);
10983
10984 case BUILT_IN_MEMCPY:
10985 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10986 type, ignore, /*endp=*/0);
10987
10988 case BUILT_IN_MEMPCPY:
10989 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10990 type, ignore, /*endp=*/1);
10991
10992 case BUILT_IN_MEMMOVE:
10993 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10994 type, ignore, /*endp=*/3);
10995
10996 case BUILT_IN_STRNCAT:
10997 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10998
10999 case BUILT_IN_STRNCPY:
11000 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
11001
11002 case BUILT_IN_STRNCMP:
11003 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
11004
11005 case BUILT_IN_MEMCHR:
11006 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
11007
11008 case BUILT_IN_BCMP:
11009 case BUILT_IN_MEMCMP:
11010 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
11011
11012 case BUILT_IN_SPRINTF:
11013 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
11014
11015 case BUILT_IN_SNPRINTF:
11016 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
11017
11018 case BUILT_IN_STRCPY_CHK:
11019 case BUILT_IN_STPCPY_CHK:
11020 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
11021 ignore, fcode);
11022
11023 case BUILT_IN_STRCAT_CHK:
11024 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
11025
11026 case BUILT_IN_PRINTF_CHK:
11027 case BUILT_IN_VPRINTF_CHK:
11028 if (!validate_arg (arg0, INTEGER_TYPE)
11029 || TREE_SIDE_EFFECTS (arg0))
11030 return NULL_TREE;
11031 else
11032 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
11033 break;
11034
11035 case BUILT_IN_FPRINTF:
11036 case BUILT_IN_FPRINTF_UNLOCKED:
11037 case BUILT_IN_VFPRINTF:
11038 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
11039 ignore, fcode);
11040
11041 case BUILT_IN_FPRINTF_CHK:
11042 case BUILT_IN_VFPRINTF_CHK:
11043 if (!validate_arg (arg1, INTEGER_TYPE)
11044 || TREE_SIDE_EFFECTS (arg1))
11045 return NULL_TREE;
11046 else
11047 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
11048 ignore, fcode);
11049
11050 default:
11051 break;
11052 }
11053 return NULL_TREE;
11054 }
11055
11056 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
11057 ARG2, and ARG3. IGNORE is true if the result of the function call is
11058 ignored. This function returns NULL_TREE if no simplification was
11059 possible. */
11060
11061 static tree
11062 fold_builtin_4 (location_t loc, tree fndecl,
11063 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
11064 {
11065 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11066
11067 switch (fcode)
11068 {
11069 case BUILT_IN_MEMCPY_CHK:
11070 case BUILT_IN_MEMPCPY_CHK:
11071 case BUILT_IN_MEMMOVE_CHK:
11072 case BUILT_IN_MEMSET_CHK:
11073 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
11074 NULL_TREE, ignore,
11075 DECL_FUNCTION_CODE (fndecl));
11076
11077 case BUILT_IN_STRNCPY_CHK:
11078 case BUILT_IN_STPNCPY_CHK:
11079 return fold_builtin_stxncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE,
11080 ignore, fcode);
11081
11082 case BUILT_IN_STRNCAT_CHK:
11083 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
11084
11085 case BUILT_IN_SNPRINTF:
11086 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
11087
11088 case BUILT_IN_FPRINTF_CHK:
11089 case BUILT_IN_VFPRINTF_CHK:
11090 if (!validate_arg (arg1, INTEGER_TYPE)
11091 || TREE_SIDE_EFFECTS (arg1))
11092 return NULL_TREE;
11093 else
11094 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
11095 ignore, fcode);
11096 break;
11097
11098 default:
11099 break;
11100 }
11101 return NULL_TREE;
11102 }
11103
11104 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
11105 arguments, where NARGS <= 4. IGNORE is true if the result of the
11106 function call is ignored. This function returns NULL_TREE if no
11107 simplification was possible. Note that this only folds builtins with
11108 fixed argument patterns. Foldings that do varargs-to-varargs
11109 transformations, or that match calls with more than 4 arguments,
11110 need to be handled with fold_builtin_varargs instead. */
11111
11112 #define MAX_ARGS_TO_FOLD_BUILTIN 4
11113
11114 static tree
11115 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
11116 {
11117 tree ret = NULL_TREE;
11118
11119 switch (nargs)
11120 {
11121 case 0:
11122 ret = fold_builtin_0 (loc, fndecl, ignore);
11123 break;
11124 case 1:
11125 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
11126 break;
11127 case 2:
11128 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
11129 break;
11130 case 3:
11131 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
11132 break;
11133 case 4:
11134 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
11135 ignore);
11136 break;
11137 default:
11138 break;
11139 }
11140 if (ret)
11141 {
11142 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11143 SET_EXPR_LOCATION (ret, loc);
11144 TREE_NO_WARNING (ret) = 1;
11145 return ret;
11146 }
11147 return NULL_TREE;
11148 }
11149
11150 /* Builtins with folding operations that operate on "..." arguments
11151 need special handling; we need to store the arguments in a convenient
11152 data structure before attempting any folding. Fortunately there are
11153 only a few builtins that fall into this category. FNDECL is the
11154 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11155 result of the function call is ignored. */
11156
11157 static tree
11158 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
11159 bool ignore ATTRIBUTE_UNUSED)
11160 {
11161 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11162 tree ret = NULL_TREE;
11163
11164 switch (fcode)
11165 {
11166 case BUILT_IN_SPRINTF_CHK:
11167 case BUILT_IN_VSPRINTF_CHK:
11168 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
11169 break;
11170
11171 case BUILT_IN_SNPRINTF_CHK:
11172 case BUILT_IN_VSNPRINTF_CHK:
11173 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
11174 break;
11175
11176 case BUILT_IN_FPCLASSIFY:
11177 ret = fold_builtin_fpclassify (loc, exp);
11178 break;
11179
11180 default:
11181 break;
11182 }
11183 if (ret)
11184 {
11185 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11186 SET_EXPR_LOCATION (ret, loc);
11187 TREE_NO_WARNING (ret) = 1;
11188 return ret;
11189 }
11190 return NULL_TREE;
11191 }
11192
11193 /* Return true if FNDECL shouldn't be folded right now.
11194 If a built-in function has an inline attribute always_inline
11195 wrapper, defer folding it after always_inline functions have
11196 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11197 might not be performed. */
11198
11199 bool
11200 avoid_folding_inline_builtin (tree fndecl)
11201 {
11202 return (DECL_DECLARED_INLINE_P (fndecl)
11203 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11204 && cfun
11205 && !cfun->always_inline_functions_inlined
11206 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11207 }
11208
11209 /* A wrapper function for builtin folding that prevents warnings for
11210 "statement without effect" and the like, caused by removing the
11211 call node earlier than the warning is generated. */
11212
11213 tree
11214 fold_call_expr (location_t loc, tree exp, bool ignore)
11215 {
11216 tree ret = NULL_TREE;
11217 tree fndecl = get_callee_fndecl (exp);
11218 if (fndecl
11219 && TREE_CODE (fndecl) == FUNCTION_DECL
11220 && DECL_BUILT_IN (fndecl)
11221 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11222 yet. Defer folding until we see all the arguments
11223 (after inlining). */
11224 && !CALL_EXPR_VA_ARG_PACK (exp))
11225 {
11226 int nargs = call_expr_nargs (exp);
11227
11228 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11229 instead last argument is __builtin_va_arg_pack (). Defer folding
11230 even in that case, until arguments are finalized. */
11231 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11232 {
11233 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11234 if (fndecl2
11235 && TREE_CODE (fndecl2) == FUNCTION_DECL
11236 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11237 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11238 return NULL_TREE;
11239 }
11240
11241 if (avoid_folding_inline_builtin (fndecl))
11242 return NULL_TREE;
11243
11244 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11245 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11246 CALL_EXPR_ARGP (exp), ignore);
11247 else
11248 {
11249 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11250 {
11251 tree *args = CALL_EXPR_ARGP (exp);
11252 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11253 }
11254 if (!ret)
11255 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11256 if (ret)
11257 return ret;
11258 }
11259 }
11260 return NULL_TREE;
11261 }
11262
11263 /* Conveniently construct a function call expression. FNDECL names the
11264 function to be called and N arguments are passed in the array
11265 ARGARRAY. */
11266
11267 tree
11268 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11269 {
11270 tree fntype = TREE_TYPE (fndecl);
11271 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11272
11273 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11274 }
11275
11276 /* Conveniently construct a function call expression. FNDECL names the
11277 function to be called and the arguments are passed in the vector
11278 VEC. */
11279
11280 tree
11281 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11282 {
11283 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11284 vec_safe_address (vec));
11285 }
11286
11287
11288 /* Conveniently construct a function call expression. FNDECL names the
11289 function to be called, N is the number of arguments, and the "..."
11290 parameters are the argument expressions. */
11291
11292 tree
11293 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11294 {
11295 va_list ap;
11296 tree *argarray = XALLOCAVEC (tree, n);
11297 int i;
11298
11299 va_start (ap, n);
11300 for (i = 0; i < n; i++)
11301 argarray[i] = va_arg (ap, tree);
11302 va_end (ap);
11303 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11304 }
11305
11306 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11307 varargs macros aren't supported by all bootstrap compilers. */
11308
11309 tree
11310 build_call_expr (tree fndecl, int n, ...)
11311 {
11312 va_list ap;
11313 tree *argarray = XALLOCAVEC (tree, n);
11314 int i;
11315
11316 va_start (ap, n);
11317 for (i = 0; i < n; i++)
11318 argarray[i] = va_arg (ap, tree);
11319 va_end (ap);
11320 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11321 }
11322
11323 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11324 N arguments are passed in the array ARGARRAY. */
11325
11326 tree
11327 fold_builtin_call_array (location_t loc, tree type,
11328 tree fn,
11329 int n,
11330 tree *argarray)
11331 {
11332 tree ret = NULL_TREE;
11333 tree exp;
11334
11335 if (TREE_CODE (fn) == ADDR_EXPR)
11336 {
11337 tree fndecl = TREE_OPERAND (fn, 0);
11338 if (TREE_CODE (fndecl) == FUNCTION_DECL
11339 && DECL_BUILT_IN (fndecl))
11340 {
11341 /* If last argument is __builtin_va_arg_pack (), arguments to this
11342 function are not finalized yet. Defer folding until they are. */
11343 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11344 {
11345 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11346 if (fndecl2
11347 && TREE_CODE (fndecl2) == FUNCTION_DECL
11348 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11349 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11350 return build_call_array_loc (loc, type, fn, n, argarray);
11351 }
11352 if (avoid_folding_inline_builtin (fndecl))
11353 return build_call_array_loc (loc, type, fn, n, argarray);
11354 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11355 {
11356 ret = targetm.fold_builtin (fndecl, n, argarray, false);
11357 if (ret)
11358 return ret;
11359
11360 return build_call_array_loc (loc, type, fn, n, argarray);
11361 }
11362 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11363 {
11364 /* First try the transformations that don't require consing up
11365 an exp. */
11366 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11367 if (ret)
11368 return ret;
11369 }
11370
11371 /* If we got this far, we need to build an exp. */
11372 exp = build_call_array_loc (loc, type, fn, n, argarray);
11373 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11374 return ret ? ret : exp;
11375 }
11376 }
11377
11378 return build_call_array_loc (loc, type, fn, n, argarray);
11379 }
11380
11381 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11382 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11383 of arguments in ARGS to be omitted. OLDNARGS is the number of
11384 elements in ARGS. */
11385
11386 static tree
11387 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11388 int skip, tree fndecl, int n, va_list newargs)
11389 {
11390 int nargs = oldnargs - skip + n;
11391 tree *buffer;
11392
11393 if (n > 0)
11394 {
11395 int i, j;
11396
11397 buffer = XALLOCAVEC (tree, nargs);
11398 for (i = 0; i < n; i++)
11399 buffer[i] = va_arg (newargs, tree);
11400 for (j = skip; j < oldnargs; j++, i++)
11401 buffer[i] = args[j];
11402 }
11403 else
11404 buffer = args + skip;
11405
11406 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11407 }
11408
11409 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11410 list ARGS along with N new arguments specified as the "..."
11411 parameters. SKIP is the number of arguments in ARGS to be omitted.
11412 OLDNARGS is the number of elements in ARGS. */
11413
11414 static tree
11415 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
11416 int skip, tree fndecl, int n, ...)
11417 {
11418 va_list ap;
11419 tree t;
11420
11421 va_start (ap, n);
11422 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
11423 va_end (ap);
11424
11425 return t;
11426 }
11427
11428 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11429 along with N new arguments specified as the "..." parameters. SKIP
11430 is the number of arguments in EXP to be omitted. This function is used
11431 to do varargs-to-varargs transformations. */
11432
11433 static tree
11434 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11435 {
11436 va_list ap;
11437 tree t;
11438
11439 va_start (ap, n);
11440 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11441 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11442 va_end (ap);
11443
11444 return t;
11445 }
11446
11447 /* Validate a single argument ARG against a tree code CODE representing
11448 a type. */
11449
11450 static bool
11451 validate_arg (const_tree arg, enum tree_code code)
11452 {
11453 if (!arg)
11454 return false;
11455 else if (code == POINTER_TYPE)
11456 return POINTER_TYPE_P (TREE_TYPE (arg));
11457 else if (code == INTEGER_TYPE)
11458 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11459 return code == TREE_CODE (TREE_TYPE (arg));
11460 }
11461
11462 /* This function validates the types of a function call argument list
11463 against a specified list of tree_codes. If the last specifier is a 0,
11464 that represents an ellipses, otherwise the last specifier must be a
11465 VOID_TYPE.
11466
11467 This is the GIMPLE version of validate_arglist. Eventually we want to
11468 completely convert builtins.c to work from GIMPLEs and the tree based
11469 validate_arglist will then be removed. */
11470
11471 bool
11472 validate_gimple_arglist (const_gimple call, ...)
11473 {
11474 enum tree_code code;
11475 bool res = 0;
11476 va_list ap;
11477 const_tree arg;
11478 size_t i;
11479
11480 va_start (ap, call);
11481 i = 0;
11482
11483 do
11484 {
11485 code = (enum tree_code) va_arg (ap, int);
11486 switch (code)
11487 {
11488 case 0:
11489 /* This signifies an ellipses, any further arguments are all ok. */
11490 res = true;
11491 goto end;
11492 case VOID_TYPE:
11493 /* This signifies an endlink, if no arguments remain, return
11494 true, otherwise return false. */
11495 res = (i == gimple_call_num_args (call));
11496 goto end;
11497 default:
11498 /* If no parameters remain or the parameter's code does not
11499 match the specified code, return false. Otherwise continue
11500 checking any remaining arguments. */
11501 arg = gimple_call_arg (call, i++);
11502 if (!validate_arg (arg, code))
11503 goto end;
11504 break;
11505 }
11506 }
11507 while (1);
11508
11509 /* We need gotos here since we can only have one VA_CLOSE in a
11510 function. */
11511 end: ;
11512 va_end (ap);
11513
11514 return res;
11515 }
11516
11517 /* This function validates the types of a function call argument list
11518 against a specified list of tree_codes. If the last specifier is a 0,
11519 that represents an ellipses, otherwise the last specifier must be a
11520 VOID_TYPE. */
11521
11522 bool
11523 validate_arglist (const_tree callexpr, ...)
11524 {
11525 enum tree_code code;
11526 bool res = 0;
11527 va_list ap;
11528 const_call_expr_arg_iterator iter;
11529 const_tree arg;
11530
11531 va_start (ap, callexpr);
11532 init_const_call_expr_arg_iterator (callexpr, &iter);
11533
11534 do
11535 {
11536 code = (enum tree_code) va_arg (ap, int);
11537 switch (code)
11538 {
11539 case 0:
11540 /* This signifies an ellipses, any further arguments are all ok. */
11541 res = true;
11542 goto end;
11543 case VOID_TYPE:
11544 /* This signifies an endlink, if no arguments remain, return
11545 true, otherwise return false. */
11546 res = !more_const_call_expr_args_p (&iter);
11547 goto end;
11548 default:
11549 /* If no parameters remain or the parameter's code does not
11550 match the specified code, return false. Otherwise continue
11551 checking any remaining arguments. */
11552 arg = next_const_call_expr_arg (&iter);
11553 if (!validate_arg (arg, code))
11554 goto end;
11555 break;
11556 }
11557 }
11558 while (1);
11559
11560 /* We need gotos here since we can only have one VA_CLOSE in a
11561 function. */
11562 end: ;
11563 va_end (ap);
11564
11565 return res;
11566 }
11567
11568 /* Default target-specific builtin expander that does nothing. */
11569
11570 rtx
11571 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11572 rtx target ATTRIBUTE_UNUSED,
11573 rtx subtarget ATTRIBUTE_UNUSED,
11574 enum machine_mode mode ATTRIBUTE_UNUSED,
11575 int ignore ATTRIBUTE_UNUSED)
11576 {
11577 return NULL_RTX;
11578 }
11579
11580 /* Returns true is EXP represents data that would potentially reside
11581 in a readonly section. */
11582
11583 static bool
11584 readonly_data_expr (tree exp)
11585 {
11586 STRIP_NOPS (exp);
11587
11588 if (TREE_CODE (exp) != ADDR_EXPR)
11589 return false;
11590
11591 exp = get_base_address (TREE_OPERAND (exp, 0));
11592 if (!exp)
11593 return false;
11594
11595 /* Make sure we call decl_readonly_section only for trees it
11596 can handle (since it returns true for everything it doesn't
11597 understand). */
11598 if (TREE_CODE (exp) == STRING_CST
11599 || TREE_CODE (exp) == CONSTRUCTOR
11600 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11601 return decl_readonly_section (exp, 0);
11602 else
11603 return false;
11604 }
11605
11606 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11607 to the call, and TYPE is its return type.
11608
11609 Return NULL_TREE if no simplification was possible, otherwise return the
11610 simplified form of the call as a tree.
11611
11612 The simplified form may be a constant or other expression which
11613 computes the same value, but in a more efficient manner (including
11614 calls to other builtin functions).
11615
11616 The call may contain arguments which need to be evaluated, but
11617 which are not useful to determine the result of the call. In
11618 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11619 COMPOUND_EXPR will be an argument which must be evaluated.
11620 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11621 COMPOUND_EXPR in the chain will contain the tree for the simplified
11622 form of the builtin function call. */
11623
11624 static tree
11625 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11626 {
11627 if (!validate_arg (s1, POINTER_TYPE)
11628 || !validate_arg (s2, POINTER_TYPE))
11629 return NULL_TREE;
11630 else
11631 {
11632 tree fn;
11633 const char *p1, *p2;
11634
11635 p2 = c_getstr (s2);
11636 if (p2 == NULL)
11637 return NULL_TREE;
11638
11639 p1 = c_getstr (s1);
11640 if (p1 != NULL)
11641 {
11642 const char *r = strstr (p1, p2);
11643 tree tem;
11644
11645 if (r == NULL)
11646 return build_int_cst (TREE_TYPE (s1), 0);
11647
11648 /* Return an offset into the constant string argument. */
11649 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11650 return fold_convert_loc (loc, type, tem);
11651 }
11652
11653 /* The argument is const char *, and the result is char *, so we need
11654 a type conversion here to avoid a warning. */
11655 if (p2[0] == '\0')
11656 return fold_convert_loc (loc, type, s1);
11657
11658 if (p2[1] != '\0')
11659 return NULL_TREE;
11660
11661 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11662 if (!fn)
11663 return NULL_TREE;
11664
11665 /* New argument list transforming strstr(s1, s2) to
11666 strchr(s1, s2[0]). */
11667 return build_call_expr_loc (loc, fn, 2, s1,
11668 build_int_cst (integer_type_node, p2[0]));
11669 }
11670 }
11671
11672 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11673 the call, and TYPE is its return type.
11674
11675 Return NULL_TREE if no simplification was possible, otherwise return the
11676 simplified form of the call as a tree.
11677
11678 The simplified form may be a constant or other expression which
11679 computes the same value, but in a more efficient manner (including
11680 calls to other builtin functions).
11681
11682 The call may contain arguments which need to be evaluated, but
11683 which are not useful to determine the result of the call. In
11684 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11685 COMPOUND_EXPR will be an argument which must be evaluated.
11686 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11687 COMPOUND_EXPR in the chain will contain the tree for the simplified
11688 form of the builtin function call. */
11689
11690 static tree
11691 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11692 {
11693 if (!validate_arg (s1, POINTER_TYPE)
11694 || !validate_arg (s2, INTEGER_TYPE))
11695 return NULL_TREE;
11696 else
11697 {
11698 const char *p1;
11699
11700 if (TREE_CODE (s2) != INTEGER_CST)
11701 return NULL_TREE;
11702
11703 p1 = c_getstr (s1);
11704 if (p1 != NULL)
11705 {
11706 char c;
11707 const char *r;
11708 tree tem;
11709
11710 if (target_char_cast (s2, &c))
11711 return NULL_TREE;
11712
11713 r = strchr (p1, c);
11714
11715 if (r == NULL)
11716 return build_int_cst (TREE_TYPE (s1), 0);
11717
11718 /* Return an offset into the constant string argument. */
11719 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11720 return fold_convert_loc (loc, type, tem);
11721 }
11722 return NULL_TREE;
11723 }
11724 }
11725
11726 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11727 the call, and TYPE is its return type.
11728
11729 Return NULL_TREE if no simplification was possible, otherwise return the
11730 simplified form of the call as a tree.
11731
11732 The simplified form may be a constant or other expression which
11733 computes the same value, but in a more efficient manner (including
11734 calls to other builtin functions).
11735
11736 The call may contain arguments which need to be evaluated, but
11737 which are not useful to determine the result of the call. In
11738 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11739 COMPOUND_EXPR will be an argument which must be evaluated.
11740 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11741 COMPOUND_EXPR in the chain will contain the tree for the simplified
11742 form of the builtin function call. */
11743
11744 static tree
11745 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11746 {
11747 if (!validate_arg (s1, POINTER_TYPE)
11748 || !validate_arg (s2, INTEGER_TYPE))
11749 return NULL_TREE;
11750 else
11751 {
11752 tree fn;
11753 const char *p1;
11754
11755 if (TREE_CODE (s2) != INTEGER_CST)
11756 return NULL_TREE;
11757
11758 p1 = c_getstr (s1);
11759 if (p1 != NULL)
11760 {
11761 char c;
11762 const char *r;
11763 tree tem;
11764
11765 if (target_char_cast (s2, &c))
11766 return NULL_TREE;
11767
11768 r = strrchr (p1, c);
11769
11770 if (r == NULL)
11771 return build_int_cst (TREE_TYPE (s1), 0);
11772
11773 /* Return an offset into the constant string argument. */
11774 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11775 return fold_convert_loc (loc, type, tem);
11776 }
11777
11778 if (! integer_zerop (s2))
11779 return NULL_TREE;
11780
11781 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11782 if (!fn)
11783 return NULL_TREE;
11784
11785 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11786 return build_call_expr_loc (loc, fn, 2, s1, s2);
11787 }
11788 }
11789
11790 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11791 to the call, and TYPE is its return type.
11792
11793 Return NULL_TREE if no simplification was possible, otherwise return the
11794 simplified form of the call as a tree.
11795
11796 The simplified form may be a constant or other expression which
11797 computes the same value, but in a more efficient manner (including
11798 calls to other builtin functions).
11799
11800 The call may contain arguments which need to be evaluated, but
11801 which are not useful to determine the result of the call. In
11802 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11803 COMPOUND_EXPR will be an argument which must be evaluated.
11804 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11805 COMPOUND_EXPR in the chain will contain the tree for the simplified
11806 form of the builtin function call. */
11807
11808 static tree
11809 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11810 {
11811 if (!validate_arg (s1, POINTER_TYPE)
11812 || !validate_arg (s2, POINTER_TYPE))
11813 return NULL_TREE;
11814 else
11815 {
11816 tree fn;
11817 const char *p1, *p2;
11818
11819 p2 = c_getstr (s2);
11820 if (p2 == NULL)
11821 return NULL_TREE;
11822
11823 p1 = c_getstr (s1);
11824 if (p1 != NULL)
11825 {
11826 const char *r = strpbrk (p1, p2);
11827 tree tem;
11828
11829 if (r == NULL)
11830 return build_int_cst (TREE_TYPE (s1), 0);
11831
11832 /* Return an offset into the constant string argument. */
11833 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11834 return fold_convert_loc (loc, type, tem);
11835 }
11836
11837 if (p2[0] == '\0')
11838 /* strpbrk(x, "") == NULL.
11839 Evaluate and ignore s1 in case it had side-effects. */
11840 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11841
11842 if (p2[1] != '\0')
11843 return NULL_TREE; /* Really call strpbrk. */
11844
11845 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11846 if (!fn)
11847 return NULL_TREE;
11848
11849 /* New argument list transforming strpbrk(s1, s2) to
11850 strchr(s1, s2[0]). */
11851 return build_call_expr_loc (loc, fn, 2, s1,
11852 build_int_cst (integer_type_node, p2[0]));
11853 }
11854 }
11855
11856 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11857 to the call.
11858
11859 Return NULL_TREE if no simplification was possible, otherwise return the
11860 simplified form of the call as a tree.
11861
11862 The simplified form may be a constant or other expression which
11863 computes the same value, but in a more efficient manner (including
11864 calls to other builtin functions).
11865
11866 The call may contain arguments which need to be evaluated, but
11867 which are not useful to determine the result of the call. In
11868 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11869 COMPOUND_EXPR will be an argument which must be evaluated.
11870 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11871 COMPOUND_EXPR in the chain will contain the tree for the simplified
11872 form of the builtin function call. */
11873
11874 static tree
11875 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11876 {
11877 if (!validate_arg (dst, POINTER_TYPE)
11878 || !validate_arg (src, POINTER_TYPE))
11879 return NULL_TREE;
11880 else
11881 {
11882 const char *p = c_getstr (src);
11883
11884 /* If the string length is zero, return the dst parameter. */
11885 if (p && *p == '\0')
11886 return dst;
11887
11888 if (optimize_insn_for_speed_p ())
11889 {
11890 /* See if we can store by pieces into (dst + strlen(dst)). */
11891 tree newdst, call;
11892 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11893 tree strcpy_fn = builtin_decl_implicit (BUILT_IN_STRCPY);
11894
11895 if (!strlen_fn || !strcpy_fn)
11896 return NULL_TREE;
11897
11898 /* If we don't have a movstr we don't want to emit an strcpy
11899 call. We have to do that if the length of the source string
11900 isn't computable (in that case we can use memcpy probably
11901 later expanding to a sequence of mov instructions). If we
11902 have movstr instructions we can emit strcpy calls. */
11903 if (!HAVE_movstr)
11904 {
11905 tree len = c_strlen (src, 1);
11906 if (! len || TREE_SIDE_EFFECTS (len))
11907 return NULL_TREE;
11908 }
11909
11910 /* Stabilize the argument list. */
11911 dst = builtin_save_expr (dst);
11912
11913 /* Create strlen (dst). */
11914 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11915 /* Create (dst p+ strlen (dst)). */
11916
11917 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11918 newdst = builtin_save_expr (newdst);
11919
11920 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11921 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11922 }
11923 return NULL_TREE;
11924 }
11925 }
11926
11927 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11928 arguments to the call.
11929
11930 Return NULL_TREE if no simplification was possible, otherwise return the
11931 simplified form of the call as a tree.
11932
11933 The simplified form may be a constant or other expression which
11934 computes the same value, but in a more efficient manner (including
11935 calls to other builtin functions).
11936
11937 The call may contain arguments which need to be evaluated, but
11938 which are not useful to determine the result of the call. In
11939 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11940 COMPOUND_EXPR will be an argument which must be evaluated.
11941 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11942 COMPOUND_EXPR in the chain will contain the tree for the simplified
11943 form of the builtin function call. */
11944
11945 static tree
11946 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11947 {
11948 if (!validate_arg (dst, POINTER_TYPE)
11949 || !validate_arg (src, POINTER_TYPE)
11950 || !validate_arg (len, INTEGER_TYPE))
11951 return NULL_TREE;
11952 else
11953 {
11954 const char *p = c_getstr (src);
11955
11956 /* If the requested length is zero, or the src parameter string
11957 length is zero, return the dst parameter. */
11958 if (integer_zerop (len) || (p && *p == '\0'))
11959 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11960
11961 /* If the requested len is greater than or equal to the string
11962 length, call strcat. */
11963 if (TREE_CODE (len) == INTEGER_CST && p
11964 && compare_tree_int (len, strlen (p)) >= 0)
11965 {
11966 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11967
11968 /* If the replacement _DECL isn't initialized, don't do the
11969 transformation. */
11970 if (!fn)
11971 return NULL_TREE;
11972
11973 return build_call_expr_loc (loc, fn, 2, dst, src);
11974 }
11975 return NULL_TREE;
11976 }
11977 }
11978
11979 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11980 to the call.
11981
11982 Return NULL_TREE if no simplification was possible, otherwise return the
11983 simplified form of the call as a tree.
11984
11985 The simplified form may be a constant or other expression which
11986 computes the same value, but in a more efficient manner (including
11987 calls to other builtin functions).
11988
11989 The call may contain arguments which need to be evaluated, but
11990 which are not useful to determine the result of the call. In
11991 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11992 COMPOUND_EXPR will be an argument which must be evaluated.
11993 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11994 COMPOUND_EXPR in the chain will contain the tree for the simplified
11995 form of the builtin function call. */
11996
11997 static tree
11998 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11999 {
12000 if (!validate_arg (s1, POINTER_TYPE)
12001 || !validate_arg (s2, POINTER_TYPE))
12002 return NULL_TREE;
12003 else
12004 {
12005 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
12006
12007 /* If both arguments are constants, evaluate at compile-time. */
12008 if (p1 && p2)
12009 {
12010 const size_t r = strspn (p1, p2);
12011 return build_int_cst (size_type_node, r);
12012 }
12013
12014 /* If either argument is "", return NULL_TREE. */
12015 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
12016 /* Evaluate and ignore both arguments in case either one has
12017 side-effects. */
12018 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
12019 s1, s2);
12020 return NULL_TREE;
12021 }
12022 }
12023
12024 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
12025 to the call.
12026
12027 Return NULL_TREE if no simplification was possible, otherwise return the
12028 simplified form of the call as a tree.
12029
12030 The simplified form may be a constant or other expression which
12031 computes the same value, but in a more efficient manner (including
12032 calls to other builtin functions).
12033
12034 The call may contain arguments which need to be evaluated, but
12035 which are not useful to determine the result of the call. In
12036 this case we return a chain of COMPOUND_EXPRs. The LHS of each
12037 COMPOUND_EXPR will be an argument which must be evaluated.
12038 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
12039 COMPOUND_EXPR in the chain will contain the tree for the simplified
12040 form of the builtin function call. */
12041
12042 static tree
12043 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
12044 {
12045 if (!validate_arg (s1, POINTER_TYPE)
12046 || !validate_arg (s2, POINTER_TYPE))
12047 return NULL_TREE;
12048 else
12049 {
12050 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
12051
12052 /* If both arguments are constants, evaluate at compile-time. */
12053 if (p1 && p2)
12054 {
12055 const size_t r = strcspn (p1, p2);
12056 return build_int_cst (size_type_node, r);
12057 }
12058
12059 /* If the first argument is "", return NULL_TREE. */
12060 if (p1 && *p1 == '\0')
12061 {
12062 /* Evaluate and ignore argument s2 in case it has
12063 side-effects. */
12064 return omit_one_operand_loc (loc, size_type_node,
12065 size_zero_node, s2);
12066 }
12067
12068 /* If the second argument is "", return __builtin_strlen(s1). */
12069 if (p2 && *p2 == '\0')
12070 {
12071 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
12072
12073 /* If the replacement _DECL isn't initialized, don't do the
12074 transformation. */
12075 if (!fn)
12076 return NULL_TREE;
12077
12078 return build_call_expr_loc (loc, fn, 1, s1);
12079 }
12080 return NULL_TREE;
12081 }
12082 }
12083
12084 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
12085 to the call. IGNORE is true if the value returned
12086 by the builtin will be ignored. UNLOCKED is true is true if this
12087 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
12088 the known length of the string. Return NULL_TREE if no simplification
12089 was possible. */
12090
12091 tree
12092 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
12093 bool ignore, bool unlocked, tree len)
12094 {
12095 /* If we're using an unlocked function, assume the other unlocked
12096 functions exist explicitly. */
12097 tree const fn_fputc = (unlocked
12098 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
12099 : builtin_decl_implicit (BUILT_IN_FPUTC));
12100 tree const fn_fwrite = (unlocked
12101 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
12102 : builtin_decl_implicit (BUILT_IN_FWRITE));
12103
12104 /* If the return value is used, don't do the transformation. */
12105 if (!ignore)
12106 return NULL_TREE;
12107
12108 /* Verify the arguments in the original call. */
12109 if (!validate_arg (arg0, POINTER_TYPE)
12110 || !validate_arg (arg1, POINTER_TYPE))
12111 return NULL_TREE;
12112
12113 if (! len)
12114 len = c_strlen (arg0, 0);
12115
12116 /* Get the length of the string passed to fputs. If the length
12117 can't be determined, punt. */
12118 if (!len
12119 || TREE_CODE (len) != INTEGER_CST)
12120 return NULL_TREE;
12121
12122 switch (compare_tree_int (len, 1))
12123 {
12124 case -1: /* length is 0, delete the call entirely . */
12125 return omit_one_operand_loc (loc, integer_type_node,
12126 integer_zero_node, arg1);;
12127
12128 case 0: /* length is 1, call fputc. */
12129 {
12130 const char *p = c_getstr (arg0);
12131
12132 if (p != NULL)
12133 {
12134 if (fn_fputc)
12135 return build_call_expr_loc (loc, fn_fputc, 2,
12136 build_int_cst
12137 (integer_type_node, p[0]), arg1);
12138 else
12139 return NULL_TREE;
12140 }
12141 }
12142 /* FALLTHROUGH */
12143 case 1: /* length is greater than 1, call fwrite. */
12144 {
12145 /* If optimizing for size keep fputs. */
12146 if (optimize_function_for_size_p (cfun))
12147 return NULL_TREE;
12148 /* New argument list transforming fputs(string, stream) to
12149 fwrite(string, 1, len, stream). */
12150 if (fn_fwrite)
12151 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
12152 size_one_node, len, arg1);
12153 else
12154 return NULL_TREE;
12155 }
12156 default:
12157 gcc_unreachable ();
12158 }
12159 return NULL_TREE;
12160 }
12161
12162 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12163 produced. False otherwise. This is done so that we don't output the error
12164 or warning twice or three times. */
12165
12166 bool
12167 fold_builtin_next_arg (tree exp, bool va_start_p)
12168 {
12169 tree fntype = TREE_TYPE (current_function_decl);
12170 int nargs = call_expr_nargs (exp);
12171 tree arg;
12172 /* There is good chance the current input_location points inside the
12173 definition of the va_start macro (perhaps on the token for
12174 builtin) in a system header, so warnings will not be emitted.
12175 Use the location in real source code. */
12176 source_location current_location =
12177 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
12178 NULL);
12179
12180 if (!stdarg_p (fntype))
12181 {
12182 error ("%<va_start%> used in function with fixed args");
12183 return true;
12184 }
12185
12186 if (va_start_p)
12187 {
12188 if (va_start_p && (nargs != 2))
12189 {
12190 error ("wrong number of arguments to function %<va_start%>");
12191 return true;
12192 }
12193 arg = CALL_EXPR_ARG (exp, 1);
12194 }
12195 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12196 when we checked the arguments and if needed issued a warning. */
12197 else
12198 {
12199 if (nargs == 0)
12200 {
12201 /* Evidently an out of date version of <stdarg.h>; can't validate
12202 va_start's second argument, but can still work as intended. */
12203 warning_at (current_location,
12204 OPT_Wvarargs,
12205 "%<__builtin_next_arg%> called without an argument");
12206 return true;
12207 }
12208 else if (nargs > 1)
12209 {
12210 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12211 return true;
12212 }
12213 arg = CALL_EXPR_ARG (exp, 0);
12214 }
12215
12216 if (TREE_CODE (arg) == SSA_NAME)
12217 arg = SSA_NAME_VAR (arg);
12218
12219 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12220 or __builtin_next_arg (0) the first time we see it, after checking
12221 the arguments and if needed issuing a warning. */
12222 if (!integer_zerop (arg))
12223 {
12224 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12225
12226 /* Strip off all nops for the sake of the comparison. This
12227 is not quite the same as STRIP_NOPS. It does more.
12228 We must also strip off INDIRECT_EXPR for C++ reference
12229 parameters. */
12230 while (CONVERT_EXPR_P (arg)
12231 || TREE_CODE (arg) == INDIRECT_REF)
12232 arg = TREE_OPERAND (arg, 0);
12233 if (arg != last_parm)
12234 {
12235 /* FIXME: Sometimes with the tree optimizers we can get the
12236 not the last argument even though the user used the last
12237 argument. We just warn and set the arg to be the last
12238 argument so that we will get wrong-code because of
12239 it. */
12240 warning_at (current_location,
12241 OPT_Wvarargs,
12242 "second parameter of %<va_start%> not last named argument");
12243 }
12244
12245 /* Undefined by C99 7.15.1.4p4 (va_start):
12246 "If the parameter parmN is declared with the register storage
12247 class, with a function or array type, or with a type that is
12248 not compatible with the type that results after application of
12249 the default argument promotions, the behavior is undefined."
12250 */
12251 else if (DECL_REGISTER (arg))
12252 {
12253 warning_at (current_location,
12254 OPT_Wvarargs,
12255 "undefined behaviour when second parameter of "
12256 "%<va_start%> is declared with %<register%> storage");
12257 }
12258
12259 /* We want to verify the second parameter just once before the tree
12260 optimizers are run and then avoid keeping it in the tree,
12261 as otherwise we could warn even for correct code like:
12262 void foo (int i, ...)
12263 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12264 if (va_start_p)
12265 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12266 else
12267 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12268 }
12269 return false;
12270 }
12271
12272
12273 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12274 ORIG may be null if this is a 2-argument call. We don't attempt to
12275 simplify calls with more than 3 arguments.
12276
12277 Return NULL_TREE if no simplification was possible, otherwise return the
12278 simplified form of the call as a tree. If IGNORED is true, it means that
12279 the caller does not use the returned value of the function. */
12280
12281 static tree
12282 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12283 tree orig, int ignored)
12284 {
12285 tree call, retval;
12286 const char *fmt_str = NULL;
12287
12288 /* Verify the required arguments in the original call. We deal with two
12289 types of sprintf() calls: 'sprintf (str, fmt)' and
12290 'sprintf (dest, "%s", orig)'. */
12291 if (!validate_arg (dest, POINTER_TYPE)
12292 || !validate_arg (fmt, POINTER_TYPE))
12293 return NULL_TREE;
12294 if (orig && !validate_arg (orig, POINTER_TYPE))
12295 return NULL_TREE;
12296
12297 /* Check whether the format is a literal string constant. */
12298 fmt_str = c_getstr (fmt);
12299 if (fmt_str == NULL)
12300 return NULL_TREE;
12301
12302 call = NULL_TREE;
12303 retval = NULL_TREE;
12304
12305 if (!init_target_chars ())
12306 return NULL_TREE;
12307
12308 /* If the format doesn't contain % args or %%, use strcpy. */
12309 if (strchr (fmt_str, target_percent) == NULL)
12310 {
12311 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12312
12313 if (!fn)
12314 return NULL_TREE;
12315
12316 /* Don't optimize sprintf (buf, "abc", ptr++). */
12317 if (orig)
12318 return NULL_TREE;
12319
12320 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12321 'format' is known to contain no % formats. */
12322 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12323 if (!ignored)
12324 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12325 }
12326
12327 /* If the format is "%s", use strcpy if the result isn't used. */
12328 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12329 {
12330 tree fn;
12331 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12332
12333 if (!fn)
12334 return NULL_TREE;
12335
12336 /* Don't crash on sprintf (str1, "%s"). */
12337 if (!orig)
12338 return NULL_TREE;
12339
12340 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12341 if (!ignored)
12342 {
12343 retval = c_strlen (orig, 1);
12344 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12345 return NULL_TREE;
12346 }
12347 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12348 }
12349
12350 if (call && retval)
12351 {
12352 retval = fold_convert_loc
12353 (loc, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF))),
12354 retval);
12355 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12356 }
12357 else
12358 return call;
12359 }
12360
12361 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12362 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12363 attempt to simplify calls with more than 4 arguments.
12364
12365 Return NULL_TREE if no simplification was possible, otherwise return the
12366 simplified form of the call as a tree. If IGNORED is true, it means that
12367 the caller does not use the returned value of the function. */
12368
12369 static tree
12370 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
12371 tree orig, int ignored)
12372 {
12373 tree call, retval;
12374 const char *fmt_str = NULL;
12375 unsigned HOST_WIDE_INT destlen;
12376
12377 /* Verify the required arguments in the original call. We deal with two
12378 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12379 'snprintf (dest, cst, "%s", orig)'. */
12380 if (!validate_arg (dest, POINTER_TYPE)
12381 || !validate_arg (destsize, INTEGER_TYPE)
12382 || !validate_arg (fmt, POINTER_TYPE))
12383 return NULL_TREE;
12384 if (orig && !validate_arg (orig, POINTER_TYPE))
12385 return NULL_TREE;
12386
12387 if (!host_integerp (destsize, 1))
12388 return NULL_TREE;
12389
12390 /* Check whether the format is a literal string constant. */
12391 fmt_str = c_getstr (fmt);
12392 if (fmt_str == NULL)
12393 return NULL_TREE;
12394
12395 call = NULL_TREE;
12396 retval = NULL_TREE;
12397
12398 if (!init_target_chars ())
12399 return NULL_TREE;
12400
12401 destlen = tree_low_cst (destsize, 1);
12402
12403 /* If the format doesn't contain % args or %%, use strcpy. */
12404 if (strchr (fmt_str, target_percent) == NULL)
12405 {
12406 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12407 size_t len = strlen (fmt_str);
12408
12409 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12410 if (orig)
12411 return NULL_TREE;
12412
12413 /* We could expand this as
12414 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12415 or to
12416 memcpy (str, fmt_with_nul_at_cstm1, cst);
12417 but in the former case that might increase code size
12418 and in the latter case grow .rodata section too much.
12419 So punt for now. */
12420 if (len >= destlen)
12421 return NULL_TREE;
12422
12423 if (!fn)
12424 return NULL_TREE;
12425
12426 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12427 'format' is known to contain no % formats and
12428 strlen (fmt) < cst. */
12429 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12430
12431 if (!ignored)
12432 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12433 }
12434
12435 /* If the format is "%s", use strcpy if the result isn't used. */
12436 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12437 {
12438 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12439 unsigned HOST_WIDE_INT origlen;
12440
12441 /* Don't crash on snprintf (str1, cst, "%s"). */
12442 if (!orig)
12443 return NULL_TREE;
12444
12445 retval = c_strlen (orig, 1);
12446 if (!retval || !host_integerp (retval, 1))
12447 return NULL_TREE;
12448
12449 origlen = tree_low_cst (retval, 1);
12450 /* We could expand this as
12451 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12452 or to
12453 memcpy (str1, str2_with_nul_at_cstm1, cst);
12454 but in the former case that might increase code size
12455 and in the latter case grow .rodata section too much.
12456 So punt for now. */
12457 if (origlen >= destlen)
12458 return NULL_TREE;
12459
12460 /* Convert snprintf (str1, cst, "%s", str2) into
12461 strcpy (str1, str2) if strlen (str2) < cst. */
12462 if (!fn)
12463 return NULL_TREE;
12464
12465 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12466
12467 if (ignored)
12468 retval = NULL_TREE;
12469 }
12470
12471 if (call && retval)
12472 {
12473 tree fn = builtin_decl_explicit (BUILT_IN_SNPRINTF);
12474 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
12475 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12476 }
12477 else
12478 return call;
12479 }
12480
12481 /* Expand a call EXP to __builtin_object_size. */
12482
12483 rtx
12484 expand_builtin_object_size (tree exp)
12485 {
12486 tree ost;
12487 int object_size_type;
12488 tree fndecl = get_callee_fndecl (exp);
12489
12490 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12491 {
12492 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12493 exp, fndecl);
12494 expand_builtin_trap ();
12495 return const0_rtx;
12496 }
12497
12498 ost = CALL_EXPR_ARG (exp, 1);
12499 STRIP_NOPS (ost);
12500
12501 if (TREE_CODE (ost) != INTEGER_CST
12502 || tree_int_cst_sgn (ost) < 0
12503 || compare_tree_int (ost, 3) > 0)
12504 {
12505 error ("%Klast argument of %D is not integer constant between 0 and 3",
12506 exp, fndecl);
12507 expand_builtin_trap ();
12508 return const0_rtx;
12509 }
12510
12511 object_size_type = tree_low_cst (ost, 0);
12512
12513 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12514 }
12515
12516 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12517 FCODE is the BUILT_IN_* to use.
12518 Return NULL_RTX if we failed; the caller should emit a normal call,
12519 otherwise try to get the result in TARGET, if convenient (and in
12520 mode MODE if that's convenient). */
12521
12522 static rtx
12523 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12524 enum built_in_function fcode)
12525 {
12526 tree dest, src, len, size;
12527
12528 if (!validate_arglist (exp,
12529 POINTER_TYPE,
12530 fcode == BUILT_IN_MEMSET_CHK
12531 ? INTEGER_TYPE : POINTER_TYPE,
12532 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12533 return NULL_RTX;
12534
12535 dest = CALL_EXPR_ARG (exp, 0);
12536 src = CALL_EXPR_ARG (exp, 1);
12537 len = CALL_EXPR_ARG (exp, 2);
12538 size = CALL_EXPR_ARG (exp, 3);
12539
12540 if (! host_integerp (size, 1))
12541 return NULL_RTX;
12542
12543 if (host_integerp (len, 1) || integer_all_onesp (size))
12544 {
12545 tree fn;
12546
12547 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12548 {
12549 warning_at (tree_nonartificial_location (exp),
12550 0, "%Kcall to %D will always overflow destination buffer",
12551 exp, get_callee_fndecl (exp));
12552 return NULL_RTX;
12553 }
12554
12555 fn = NULL_TREE;
12556 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12557 mem{cpy,pcpy,move,set} is available. */
12558 switch (fcode)
12559 {
12560 case BUILT_IN_MEMCPY_CHK:
12561 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12562 break;
12563 case BUILT_IN_MEMPCPY_CHK:
12564 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12565 break;
12566 case BUILT_IN_MEMMOVE_CHK:
12567 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12568 break;
12569 case BUILT_IN_MEMSET_CHK:
12570 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12571 break;
12572 default:
12573 break;
12574 }
12575
12576 if (! fn)
12577 return NULL_RTX;
12578
12579 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12580 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12581 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12582 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12583 }
12584 else if (fcode == BUILT_IN_MEMSET_CHK)
12585 return NULL_RTX;
12586 else
12587 {
12588 unsigned int dest_align = get_pointer_alignment (dest);
12589
12590 /* If DEST is not a pointer type, call the normal function. */
12591 if (dest_align == 0)
12592 return NULL_RTX;
12593
12594 /* If SRC and DEST are the same (and not volatile), do nothing. */
12595 if (operand_equal_p (src, dest, 0))
12596 {
12597 tree expr;
12598
12599 if (fcode != BUILT_IN_MEMPCPY_CHK)
12600 {
12601 /* Evaluate and ignore LEN in case it has side-effects. */
12602 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12603 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12604 }
12605
12606 expr = fold_build_pointer_plus (dest, len);
12607 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12608 }
12609
12610 /* __memmove_chk special case. */
12611 if (fcode == BUILT_IN_MEMMOVE_CHK)
12612 {
12613 unsigned int src_align = get_pointer_alignment (src);
12614
12615 if (src_align == 0)
12616 return NULL_RTX;
12617
12618 /* If src is categorized for a readonly section we can use
12619 normal __memcpy_chk. */
12620 if (readonly_data_expr (src))
12621 {
12622 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12623 if (!fn)
12624 return NULL_RTX;
12625 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12626 dest, src, len, size);
12627 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12628 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12629 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12630 }
12631 }
12632 return NULL_RTX;
12633 }
12634 }
12635
12636 /* Emit warning if a buffer overflow is detected at compile time. */
12637
12638 static void
12639 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12640 {
12641 int is_strlen = 0;
12642 tree len, size;
12643 location_t loc = tree_nonartificial_location (exp);
12644
12645 switch (fcode)
12646 {
12647 case BUILT_IN_STRCPY_CHK:
12648 case BUILT_IN_STPCPY_CHK:
12649 /* For __strcat_chk the warning will be emitted only if overflowing
12650 by at least strlen (dest) + 1 bytes. */
12651 case BUILT_IN_STRCAT_CHK:
12652 len = CALL_EXPR_ARG (exp, 1);
12653 size = CALL_EXPR_ARG (exp, 2);
12654 is_strlen = 1;
12655 break;
12656 case BUILT_IN_STRNCAT_CHK:
12657 case BUILT_IN_STRNCPY_CHK:
12658 case BUILT_IN_STPNCPY_CHK:
12659 len = CALL_EXPR_ARG (exp, 2);
12660 size = CALL_EXPR_ARG (exp, 3);
12661 break;
12662 case BUILT_IN_SNPRINTF_CHK:
12663 case BUILT_IN_VSNPRINTF_CHK:
12664 len = CALL_EXPR_ARG (exp, 1);
12665 size = CALL_EXPR_ARG (exp, 3);
12666 break;
12667 default:
12668 gcc_unreachable ();
12669 }
12670
12671 if (!len || !size)
12672 return;
12673
12674 if (! host_integerp (size, 1) || integer_all_onesp (size))
12675 return;
12676
12677 if (is_strlen)
12678 {
12679 len = c_strlen (len, 1);
12680 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12681 return;
12682 }
12683 else if (fcode == BUILT_IN_STRNCAT_CHK)
12684 {
12685 tree src = CALL_EXPR_ARG (exp, 1);
12686 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12687 return;
12688 src = c_strlen (src, 1);
12689 if (! src || ! host_integerp (src, 1))
12690 {
12691 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12692 exp, get_callee_fndecl (exp));
12693 return;
12694 }
12695 else if (tree_int_cst_lt (src, size))
12696 return;
12697 }
12698 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12699 return;
12700
12701 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12702 exp, get_callee_fndecl (exp));
12703 }
12704
12705 /* Emit warning if a buffer overflow is detected at compile time
12706 in __sprintf_chk/__vsprintf_chk calls. */
12707
12708 static void
12709 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12710 {
12711 tree size, len, fmt;
12712 const char *fmt_str;
12713 int nargs = call_expr_nargs (exp);
12714
12715 /* Verify the required arguments in the original call. */
12716
12717 if (nargs < 4)
12718 return;
12719 size = CALL_EXPR_ARG (exp, 2);
12720 fmt = CALL_EXPR_ARG (exp, 3);
12721
12722 if (! host_integerp (size, 1) || integer_all_onesp (size))
12723 return;
12724
12725 /* Check whether the format is a literal string constant. */
12726 fmt_str = c_getstr (fmt);
12727 if (fmt_str == NULL)
12728 return;
12729
12730 if (!init_target_chars ())
12731 return;
12732
12733 /* If the format doesn't contain % args or %%, we know its size. */
12734 if (strchr (fmt_str, target_percent) == 0)
12735 len = build_int_cstu (size_type_node, strlen (fmt_str));
12736 /* If the format is "%s" and first ... argument is a string literal,
12737 we know it too. */
12738 else if (fcode == BUILT_IN_SPRINTF_CHK
12739 && strcmp (fmt_str, target_percent_s) == 0)
12740 {
12741 tree arg;
12742
12743 if (nargs < 5)
12744 return;
12745 arg = CALL_EXPR_ARG (exp, 4);
12746 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12747 return;
12748
12749 len = c_strlen (arg, 1);
12750 if (!len || ! host_integerp (len, 1))
12751 return;
12752 }
12753 else
12754 return;
12755
12756 if (! tree_int_cst_lt (len, size))
12757 warning_at (tree_nonartificial_location (exp),
12758 0, "%Kcall to %D will always overflow destination buffer",
12759 exp, get_callee_fndecl (exp));
12760 }
12761
12762 /* Emit warning if a free is called with address of a variable. */
12763
12764 static void
12765 maybe_emit_free_warning (tree exp)
12766 {
12767 tree arg = CALL_EXPR_ARG (exp, 0);
12768
12769 STRIP_NOPS (arg);
12770 if (TREE_CODE (arg) != ADDR_EXPR)
12771 return;
12772
12773 arg = get_base_address (TREE_OPERAND (arg, 0));
12774 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12775 return;
12776
12777 if (SSA_VAR_P (arg))
12778 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12779 "%Kattempt to free a non-heap object %qD", exp, arg);
12780 else
12781 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12782 "%Kattempt to free a non-heap object", exp);
12783 }
12784
12785 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12786 if possible. */
12787
12788 tree
12789 fold_builtin_object_size (tree ptr, tree ost)
12790 {
12791 unsigned HOST_WIDE_INT bytes;
12792 int object_size_type;
12793
12794 if (!validate_arg (ptr, POINTER_TYPE)
12795 || !validate_arg (ost, INTEGER_TYPE))
12796 return NULL_TREE;
12797
12798 STRIP_NOPS (ost);
12799
12800 if (TREE_CODE (ost) != INTEGER_CST
12801 || tree_int_cst_sgn (ost) < 0
12802 || compare_tree_int (ost, 3) > 0)
12803 return NULL_TREE;
12804
12805 object_size_type = tree_low_cst (ost, 0);
12806
12807 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12808 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12809 and (size_t) 0 for types 2 and 3. */
12810 if (TREE_SIDE_EFFECTS (ptr))
12811 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12812
12813 if (TREE_CODE (ptr) == ADDR_EXPR)
12814 {
12815 bytes = compute_builtin_object_size (ptr, object_size_type);
12816 if (double_int_fits_to_tree_p (size_type_node,
12817 double_int::from_uhwi (bytes)))
12818 return build_int_cstu (size_type_node, bytes);
12819 }
12820 else if (TREE_CODE (ptr) == SSA_NAME)
12821 {
12822 /* If object size is not known yet, delay folding until
12823 later. Maybe subsequent passes will help determining
12824 it. */
12825 bytes = compute_builtin_object_size (ptr, object_size_type);
12826 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12827 && double_int_fits_to_tree_p (size_type_node,
12828 double_int::from_uhwi (bytes)))
12829 return build_int_cstu (size_type_node, bytes);
12830 }
12831
12832 return NULL_TREE;
12833 }
12834
12835 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12836 DEST, SRC, LEN, and SIZE are the arguments to the call.
12837 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12838 code of the builtin. If MAXLEN is not NULL, it is maximum length
12839 passed as third argument. */
12840
12841 tree
12842 fold_builtin_memory_chk (location_t loc, tree fndecl,
12843 tree dest, tree src, tree len, tree size,
12844 tree maxlen, bool ignore,
12845 enum built_in_function fcode)
12846 {
12847 tree fn;
12848
12849 if (!validate_arg (dest, POINTER_TYPE)
12850 || !validate_arg (src,
12851 (fcode == BUILT_IN_MEMSET_CHK
12852 ? INTEGER_TYPE : POINTER_TYPE))
12853 || !validate_arg (len, INTEGER_TYPE)
12854 || !validate_arg (size, INTEGER_TYPE))
12855 return NULL_TREE;
12856
12857 /* If SRC and DEST are the same (and not volatile), return DEST
12858 (resp. DEST+LEN for __mempcpy_chk). */
12859 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12860 {
12861 if (fcode != BUILT_IN_MEMPCPY_CHK)
12862 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12863 dest, len);
12864 else
12865 {
12866 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12867 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12868 }
12869 }
12870
12871 if (! host_integerp (size, 1))
12872 return NULL_TREE;
12873
12874 if (! integer_all_onesp (size))
12875 {
12876 if (! host_integerp (len, 1))
12877 {
12878 /* If LEN is not constant, try MAXLEN too.
12879 For MAXLEN only allow optimizing into non-_ocs function
12880 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12881 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12882 {
12883 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12884 {
12885 /* (void) __mempcpy_chk () can be optimized into
12886 (void) __memcpy_chk (). */
12887 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12888 if (!fn)
12889 return NULL_TREE;
12890
12891 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12892 }
12893 return NULL_TREE;
12894 }
12895 }
12896 else
12897 maxlen = len;
12898
12899 if (tree_int_cst_lt (size, maxlen))
12900 return NULL_TREE;
12901 }
12902
12903 fn = NULL_TREE;
12904 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12905 mem{cpy,pcpy,move,set} is available. */
12906 switch (fcode)
12907 {
12908 case BUILT_IN_MEMCPY_CHK:
12909 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12910 break;
12911 case BUILT_IN_MEMPCPY_CHK:
12912 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12913 break;
12914 case BUILT_IN_MEMMOVE_CHK:
12915 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12916 break;
12917 case BUILT_IN_MEMSET_CHK:
12918 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12919 break;
12920 default:
12921 break;
12922 }
12923
12924 if (!fn)
12925 return NULL_TREE;
12926
12927 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12928 }
12929
12930 /* Fold a call to the __st[rp]cpy_chk builtin.
12931 DEST, SRC, and SIZE are the arguments to the call.
12932 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12933 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12934 strings passed as second argument. */
12935
12936 tree
12937 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12938 tree src, tree size,
12939 tree maxlen, bool ignore,
12940 enum built_in_function fcode)
12941 {
12942 tree len, fn;
12943
12944 if (!validate_arg (dest, POINTER_TYPE)
12945 || !validate_arg (src, POINTER_TYPE)
12946 || !validate_arg (size, INTEGER_TYPE))
12947 return NULL_TREE;
12948
12949 /* If SRC and DEST are the same (and not volatile), return DEST. */
12950 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12951 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12952
12953 if (! host_integerp (size, 1))
12954 return NULL_TREE;
12955
12956 if (! integer_all_onesp (size))
12957 {
12958 len = c_strlen (src, 1);
12959 if (! len || ! host_integerp (len, 1))
12960 {
12961 /* If LEN is not constant, try MAXLEN too.
12962 For MAXLEN only allow optimizing into non-_ocs function
12963 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12964 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12965 {
12966 if (fcode == BUILT_IN_STPCPY_CHK)
12967 {
12968 if (! ignore)
12969 return NULL_TREE;
12970
12971 /* If return value of __stpcpy_chk is ignored,
12972 optimize into __strcpy_chk. */
12973 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
12974 if (!fn)
12975 return NULL_TREE;
12976
12977 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12978 }
12979
12980 if (! len || TREE_SIDE_EFFECTS (len))
12981 return NULL_TREE;
12982
12983 /* If c_strlen returned something, but not a constant,
12984 transform __strcpy_chk into __memcpy_chk. */
12985 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12986 if (!fn)
12987 return NULL_TREE;
12988
12989 len = fold_convert_loc (loc, size_type_node, len);
12990 len = size_binop_loc (loc, PLUS_EXPR, len,
12991 build_int_cst (size_type_node, 1));
12992 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12993 build_call_expr_loc (loc, fn, 4,
12994 dest, src, len, size));
12995 }
12996 }
12997 else
12998 maxlen = len;
12999
13000 if (! tree_int_cst_lt (maxlen, size))
13001 return NULL_TREE;
13002 }
13003
13004 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
13005 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
13006 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
13007 if (!fn)
13008 return NULL_TREE;
13009
13010 return build_call_expr_loc (loc, fn, 2, dest, src);
13011 }
13012
13013 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
13014 are the arguments to the call. If MAXLEN is not NULL, it is maximum
13015 length passed as third argument. IGNORE is true if return value can be
13016 ignored. FCODE is the BUILT_IN_* code of the builtin. */
13017
13018 tree
13019 fold_builtin_stxncpy_chk (location_t loc, tree dest, tree src,
13020 tree len, tree size, tree maxlen, bool ignore,
13021 enum built_in_function fcode)
13022 {
13023 tree fn;
13024
13025 if (!validate_arg (dest, POINTER_TYPE)
13026 || !validate_arg (src, POINTER_TYPE)
13027 || !validate_arg (len, INTEGER_TYPE)
13028 || !validate_arg (size, INTEGER_TYPE))
13029 return NULL_TREE;
13030
13031 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
13032 {
13033 /* If return value of __stpncpy_chk is ignored,
13034 optimize into __strncpy_chk. */
13035 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
13036 if (fn)
13037 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
13038 }
13039
13040 if (! host_integerp (size, 1))
13041 return NULL_TREE;
13042
13043 if (! integer_all_onesp (size))
13044 {
13045 if (! host_integerp (len, 1))
13046 {
13047 /* If LEN is not constant, try MAXLEN too.
13048 For MAXLEN only allow optimizing into non-_ocs function
13049 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13050 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13051 return NULL_TREE;
13052 }
13053 else
13054 maxlen = len;
13055
13056 if (tree_int_cst_lt (size, maxlen))
13057 return NULL_TREE;
13058 }
13059
13060 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
13061 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
13062 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
13063 if (!fn)
13064 return NULL_TREE;
13065
13066 return build_call_expr_loc (loc, fn, 3, dest, src, len);
13067 }
13068
13069 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
13070 are the arguments to the call. */
13071
13072 static tree
13073 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
13074 tree src, tree size)
13075 {
13076 tree fn;
13077 const char *p;
13078
13079 if (!validate_arg (dest, POINTER_TYPE)
13080 || !validate_arg (src, POINTER_TYPE)
13081 || !validate_arg (size, INTEGER_TYPE))
13082 return NULL_TREE;
13083
13084 p = c_getstr (src);
13085 /* If the SRC parameter is "", return DEST. */
13086 if (p && *p == '\0')
13087 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
13088
13089 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
13090 return NULL_TREE;
13091
13092 /* If __builtin_strcat_chk is used, assume strcat is available. */
13093 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
13094 if (!fn)
13095 return NULL_TREE;
13096
13097 return build_call_expr_loc (loc, fn, 2, dest, src);
13098 }
13099
13100 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
13101 LEN, and SIZE. */
13102
13103 static tree
13104 fold_builtin_strncat_chk (location_t loc, tree fndecl,
13105 tree dest, tree src, tree len, tree size)
13106 {
13107 tree fn;
13108 const char *p;
13109
13110 if (!validate_arg (dest, POINTER_TYPE)
13111 || !validate_arg (src, POINTER_TYPE)
13112 || !validate_arg (size, INTEGER_TYPE)
13113 || !validate_arg (size, INTEGER_TYPE))
13114 return NULL_TREE;
13115
13116 p = c_getstr (src);
13117 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
13118 if (p && *p == '\0')
13119 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
13120 else if (integer_zerop (len))
13121 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
13122
13123 if (! host_integerp (size, 1))
13124 return NULL_TREE;
13125
13126 if (! integer_all_onesp (size))
13127 {
13128 tree src_len = c_strlen (src, 1);
13129 if (src_len
13130 && host_integerp (src_len, 1)
13131 && host_integerp (len, 1)
13132 && ! tree_int_cst_lt (len, src_len))
13133 {
13134 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
13135 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
13136 if (!fn)
13137 return NULL_TREE;
13138
13139 return build_call_expr_loc (loc, fn, 3, dest, src, size);
13140 }
13141 return NULL_TREE;
13142 }
13143
13144 /* If __builtin_strncat_chk is used, assume strncat is available. */
13145 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
13146 if (!fn)
13147 return NULL_TREE;
13148
13149 return build_call_expr_loc (loc, fn, 3, dest, src, len);
13150 }
13151
13152 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
13153 Return NULL_TREE if a normal call should be emitted rather than
13154 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
13155 or BUILT_IN_VSPRINTF_CHK. */
13156
13157 static tree
13158 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
13159 enum built_in_function fcode)
13160 {
13161 tree dest, size, len, fn, fmt, flag;
13162 const char *fmt_str;
13163
13164 /* Verify the required arguments in the original call. */
13165 if (nargs < 4)
13166 return NULL_TREE;
13167 dest = args[0];
13168 if (!validate_arg (dest, POINTER_TYPE))
13169 return NULL_TREE;
13170 flag = args[1];
13171 if (!validate_arg (flag, INTEGER_TYPE))
13172 return NULL_TREE;
13173 size = args[2];
13174 if (!validate_arg (size, INTEGER_TYPE))
13175 return NULL_TREE;
13176 fmt = args[3];
13177 if (!validate_arg (fmt, POINTER_TYPE))
13178 return NULL_TREE;
13179
13180 if (! host_integerp (size, 1))
13181 return NULL_TREE;
13182
13183 len = NULL_TREE;
13184
13185 if (!init_target_chars ())
13186 return NULL_TREE;
13187
13188 /* Check whether the format is a literal string constant. */
13189 fmt_str = c_getstr (fmt);
13190 if (fmt_str != NULL)
13191 {
13192 /* If the format doesn't contain % args or %%, we know the size. */
13193 if (strchr (fmt_str, target_percent) == 0)
13194 {
13195 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13196 len = build_int_cstu (size_type_node, strlen (fmt_str));
13197 }
13198 /* If the format is "%s" and first ... argument is a string literal,
13199 we know the size too. */
13200 else if (fcode == BUILT_IN_SPRINTF_CHK
13201 && strcmp (fmt_str, target_percent_s) == 0)
13202 {
13203 tree arg;
13204
13205 if (nargs == 5)
13206 {
13207 arg = args[4];
13208 if (validate_arg (arg, POINTER_TYPE))
13209 {
13210 len = c_strlen (arg, 1);
13211 if (! len || ! host_integerp (len, 1))
13212 len = NULL_TREE;
13213 }
13214 }
13215 }
13216 }
13217
13218 if (! integer_all_onesp (size))
13219 {
13220 if (! len || ! tree_int_cst_lt (len, size))
13221 return NULL_TREE;
13222 }
13223
13224 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13225 or if format doesn't contain % chars or is "%s". */
13226 if (! integer_zerop (flag))
13227 {
13228 if (fmt_str == NULL)
13229 return NULL_TREE;
13230 if (strchr (fmt_str, target_percent) != NULL
13231 && strcmp (fmt_str, target_percent_s))
13232 return NULL_TREE;
13233 }
13234
13235 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13236 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
13237 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
13238 if (!fn)
13239 return NULL_TREE;
13240
13241 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
13242 }
13243
13244 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13245 a normal call should be emitted rather than expanding the function
13246 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13247
13248 static tree
13249 fold_builtin_sprintf_chk (location_t loc, tree exp,
13250 enum built_in_function fcode)
13251 {
13252 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
13253 CALL_EXPR_ARGP (exp), fcode);
13254 }
13255
13256 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13257 NULL_TREE if a normal call should be emitted rather than expanding
13258 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13259 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13260 passed as second argument. */
13261
13262 static tree
13263 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
13264 tree maxlen, enum built_in_function fcode)
13265 {
13266 tree dest, size, len, fn, fmt, flag;
13267 const char *fmt_str;
13268
13269 /* Verify the required arguments in the original call. */
13270 if (nargs < 5)
13271 return NULL_TREE;
13272 dest = args[0];
13273 if (!validate_arg (dest, POINTER_TYPE))
13274 return NULL_TREE;
13275 len = args[1];
13276 if (!validate_arg (len, INTEGER_TYPE))
13277 return NULL_TREE;
13278 flag = args[2];
13279 if (!validate_arg (flag, INTEGER_TYPE))
13280 return NULL_TREE;
13281 size = args[3];
13282 if (!validate_arg (size, INTEGER_TYPE))
13283 return NULL_TREE;
13284 fmt = args[4];
13285 if (!validate_arg (fmt, POINTER_TYPE))
13286 return NULL_TREE;
13287
13288 if (! host_integerp (size, 1))
13289 return NULL_TREE;
13290
13291 if (! integer_all_onesp (size))
13292 {
13293 if (! host_integerp (len, 1))
13294 {
13295 /* If LEN is not constant, try MAXLEN too.
13296 For MAXLEN only allow optimizing into non-_ocs function
13297 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13298 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13299 return NULL_TREE;
13300 }
13301 else
13302 maxlen = len;
13303
13304 if (tree_int_cst_lt (size, maxlen))
13305 return NULL_TREE;
13306 }
13307
13308 if (!init_target_chars ())
13309 return NULL_TREE;
13310
13311 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13312 or if format doesn't contain % chars or is "%s". */
13313 if (! integer_zerop (flag))
13314 {
13315 fmt_str = c_getstr (fmt);
13316 if (fmt_str == NULL)
13317 return NULL_TREE;
13318 if (strchr (fmt_str, target_percent) != NULL
13319 && strcmp (fmt_str, target_percent_s))
13320 return NULL_TREE;
13321 }
13322
13323 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13324 available. */
13325 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
13326 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
13327 if (!fn)
13328 return NULL_TREE;
13329
13330 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
13331 }
13332
13333 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13334 a normal call should be emitted rather than expanding the function
13335 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13336 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13337 passed as second argument. */
13338
13339 tree
13340 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
13341 enum built_in_function fcode)
13342 {
13343 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
13344 CALL_EXPR_ARGP (exp), maxlen, fcode);
13345 }
13346
13347 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13348 FMT and ARG are the arguments to the call; we don't fold cases with
13349 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13350
13351 Return NULL_TREE if no simplification was possible, otherwise return the
13352 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13353 code of the function to be simplified. */
13354
13355 static tree
13356 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13357 tree arg, bool ignore,
13358 enum built_in_function fcode)
13359 {
13360 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13361 const char *fmt_str = NULL;
13362
13363 /* If the return value is used, don't do the transformation. */
13364 if (! ignore)
13365 return NULL_TREE;
13366
13367 /* Verify the required arguments in the original call. */
13368 if (!validate_arg (fmt, POINTER_TYPE))
13369 return NULL_TREE;
13370
13371 /* Check whether the format is a literal string constant. */
13372 fmt_str = c_getstr (fmt);
13373 if (fmt_str == NULL)
13374 return NULL_TREE;
13375
13376 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13377 {
13378 /* If we're using an unlocked function, assume the other
13379 unlocked functions exist explicitly. */
13380 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
13381 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
13382 }
13383 else
13384 {
13385 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
13386 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
13387 }
13388
13389 if (!init_target_chars ())
13390 return NULL_TREE;
13391
13392 if (strcmp (fmt_str, target_percent_s) == 0
13393 || strchr (fmt_str, target_percent) == NULL)
13394 {
13395 const char *str;
13396
13397 if (strcmp (fmt_str, target_percent_s) == 0)
13398 {
13399 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13400 return NULL_TREE;
13401
13402 if (!arg || !validate_arg (arg, POINTER_TYPE))
13403 return NULL_TREE;
13404
13405 str = c_getstr (arg);
13406 if (str == NULL)
13407 return NULL_TREE;
13408 }
13409 else
13410 {
13411 /* The format specifier doesn't contain any '%' characters. */
13412 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13413 && arg)
13414 return NULL_TREE;
13415 str = fmt_str;
13416 }
13417
13418 /* If the string was "", printf does nothing. */
13419 if (str[0] == '\0')
13420 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13421
13422 /* If the string has length of 1, call putchar. */
13423 if (str[1] == '\0')
13424 {
13425 /* Given printf("c"), (where c is any one character,)
13426 convert "c"[0] to an int and pass that to the replacement
13427 function. */
13428 newarg = build_int_cst (integer_type_node, str[0]);
13429 if (fn_putchar)
13430 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13431 }
13432 else
13433 {
13434 /* If the string was "string\n", call puts("string"). */
13435 size_t len = strlen (str);
13436 if ((unsigned char)str[len - 1] == target_newline
13437 && (size_t) (int) len == len
13438 && (int) len > 0)
13439 {
13440 char *newstr;
13441 tree offset_node, string_cst;
13442
13443 /* Create a NUL-terminated string that's one char shorter
13444 than the original, stripping off the trailing '\n'. */
13445 newarg = build_string_literal (len, str);
13446 string_cst = string_constant (newarg, &offset_node);
13447 gcc_checking_assert (string_cst
13448 && (TREE_STRING_LENGTH (string_cst)
13449 == (int) len)
13450 && integer_zerop (offset_node)
13451 && (unsigned char)
13452 TREE_STRING_POINTER (string_cst)[len - 1]
13453 == target_newline);
13454 /* build_string_literal creates a new STRING_CST,
13455 modify it in place to avoid double copying. */
13456 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
13457 newstr[len - 1] = '\0';
13458 if (fn_puts)
13459 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13460 }
13461 else
13462 /* We'd like to arrange to call fputs(string,stdout) here,
13463 but we need stdout and don't have a way to get it yet. */
13464 return NULL_TREE;
13465 }
13466 }
13467
13468 /* The other optimizations can be done only on the non-va_list variants. */
13469 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13470 return NULL_TREE;
13471
13472 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13473 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13474 {
13475 if (!arg || !validate_arg (arg, POINTER_TYPE))
13476 return NULL_TREE;
13477 if (fn_puts)
13478 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13479 }
13480
13481 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13482 else if (strcmp (fmt_str, target_percent_c) == 0)
13483 {
13484 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13485 return NULL_TREE;
13486 if (fn_putchar)
13487 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13488 }
13489
13490 if (!call)
13491 return NULL_TREE;
13492
13493 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13494 }
13495
13496 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13497 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13498 more than 3 arguments, and ARG may be null in the 2-argument case.
13499
13500 Return NULL_TREE if no simplification was possible, otherwise return the
13501 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13502 code of the function to be simplified. */
13503
13504 static tree
13505 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13506 tree fmt, tree arg, bool ignore,
13507 enum built_in_function fcode)
13508 {
13509 tree fn_fputc, fn_fputs, call = NULL_TREE;
13510 const char *fmt_str = NULL;
13511
13512 /* If the return value is used, don't do the transformation. */
13513 if (! ignore)
13514 return NULL_TREE;
13515
13516 /* Verify the required arguments in the original call. */
13517 if (!validate_arg (fp, POINTER_TYPE))
13518 return NULL_TREE;
13519 if (!validate_arg (fmt, POINTER_TYPE))
13520 return NULL_TREE;
13521
13522 /* Check whether the format is a literal string constant. */
13523 fmt_str = c_getstr (fmt);
13524 if (fmt_str == NULL)
13525 return NULL_TREE;
13526
13527 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13528 {
13529 /* If we're using an unlocked function, assume the other
13530 unlocked functions exist explicitly. */
13531 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
13532 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
13533 }
13534 else
13535 {
13536 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
13537 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
13538 }
13539
13540 if (!init_target_chars ())
13541 return NULL_TREE;
13542
13543 /* If the format doesn't contain % args or %%, use strcpy. */
13544 if (strchr (fmt_str, target_percent) == NULL)
13545 {
13546 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13547 && arg)
13548 return NULL_TREE;
13549
13550 /* If the format specifier was "", fprintf does nothing. */
13551 if (fmt_str[0] == '\0')
13552 {
13553 /* If FP has side-effects, just wait until gimplification is
13554 done. */
13555 if (TREE_SIDE_EFFECTS (fp))
13556 return NULL_TREE;
13557
13558 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13559 }
13560
13561 /* When "string" doesn't contain %, replace all cases of
13562 fprintf (fp, string) with fputs (string, fp). The fputs
13563 builtin will take care of special cases like length == 1. */
13564 if (fn_fputs)
13565 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13566 }
13567
13568 /* The other optimizations can be done only on the non-va_list variants. */
13569 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13570 return NULL_TREE;
13571
13572 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13573 else if (strcmp (fmt_str, target_percent_s) == 0)
13574 {
13575 if (!arg || !validate_arg (arg, POINTER_TYPE))
13576 return NULL_TREE;
13577 if (fn_fputs)
13578 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13579 }
13580
13581 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13582 else if (strcmp (fmt_str, target_percent_c) == 0)
13583 {
13584 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13585 return NULL_TREE;
13586 if (fn_fputc)
13587 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13588 }
13589
13590 if (!call)
13591 return NULL_TREE;
13592 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13593 }
13594
13595 /* Initialize format string characters in the target charset. */
13596
13597 static bool
13598 init_target_chars (void)
13599 {
13600 static bool init;
13601 if (!init)
13602 {
13603 target_newline = lang_hooks.to_target_charset ('\n');
13604 target_percent = lang_hooks.to_target_charset ('%');
13605 target_c = lang_hooks.to_target_charset ('c');
13606 target_s = lang_hooks.to_target_charset ('s');
13607 if (target_newline == 0 || target_percent == 0 || target_c == 0
13608 || target_s == 0)
13609 return false;
13610
13611 target_percent_c[0] = target_percent;
13612 target_percent_c[1] = target_c;
13613 target_percent_c[2] = '\0';
13614
13615 target_percent_s[0] = target_percent;
13616 target_percent_s[1] = target_s;
13617 target_percent_s[2] = '\0';
13618
13619 target_percent_s_newline[0] = target_percent;
13620 target_percent_s_newline[1] = target_s;
13621 target_percent_s_newline[2] = target_newline;
13622 target_percent_s_newline[3] = '\0';
13623
13624 init = true;
13625 }
13626 return true;
13627 }
13628
13629 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13630 and no overflow/underflow occurred. INEXACT is true if M was not
13631 exactly calculated. TYPE is the tree type for the result. This
13632 function assumes that you cleared the MPFR flags and then
13633 calculated M to see if anything subsequently set a flag prior to
13634 entering this function. Return NULL_TREE if any checks fail. */
13635
13636 static tree
13637 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13638 {
13639 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13640 overflow/underflow occurred. If -frounding-math, proceed iff the
13641 result of calling FUNC was exact. */
13642 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13643 && (!flag_rounding_math || !inexact))
13644 {
13645 REAL_VALUE_TYPE rr;
13646
13647 real_from_mpfr (&rr, m, type, GMP_RNDN);
13648 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13649 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13650 but the mpft_t is not, then we underflowed in the
13651 conversion. */
13652 if (real_isfinite (&rr)
13653 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13654 {
13655 REAL_VALUE_TYPE rmode;
13656
13657 real_convert (&rmode, TYPE_MODE (type), &rr);
13658 /* Proceed iff the specified mode can hold the value. */
13659 if (real_identical (&rmode, &rr))
13660 return build_real (type, rmode);
13661 }
13662 }
13663 return NULL_TREE;
13664 }
13665
13666 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13667 number and no overflow/underflow occurred. INEXACT is true if M
13668 was not exactly calculated. TYPE is the tree type for the result.
13669 This function assumes that you cleared the MPFR flags and then
13670 calculated M to see if anything subsequently set a flag prior to
13671 entering this function. Return NULL_TREE if any checks fail, if
13672 FORCE_CONVERT is true, then bypass the checks. */
13673
13674 static tree
13675 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13676 {
13677 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13678 overflow/underflow occurred. If -frounding-math, proceed iff the
13679 result of calling FUNC was exact. */
13680 if (force_convert
13681 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13682 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13683 && (!flag_rounding_math || !inexact)))
13684 {
13685 REAL_VALUE_TYPE re, im;
13686
13687 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13688 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13689 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13690 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13691 but the mpft_t is not, then we underflowed in the
13692 conversion. */
13693 if (force_convert
13694 || (real_isfinite (&re) && real_isfinite (&im)
13695 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13696 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13697 {
13698 REAL_VALUE_TYPE re_mode, im_mode;
13699
13700 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13701 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13702 /* Proceed iff the specified mode can hold the value. */
13703 if (force_convert
13704 || (real_identical (&re_mode, &re)
13705 && real_identical (&im_mode, &im)))
13706 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13707 build_real (TREE_TYPE (type), im_mode));
13708 }
13709 }
13710 return NULL_TREE;
13711 }
13712
13713 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13714 FUNC on it and return the resulting value as a tree with type TYPE.
13715 If MIN and/or MAX are not NULL, then the supplied ARG must be
13716 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13717 acceptable values, otherwise they are not. The mpfr precision is
13718 set to the precision of TYPE. We assume that function FUNC returns
13719 zero if the result could be calculated exactly within the requested
13720 precision. */
13721
13722 static tree
13723 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13724 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13725 bool inclusive)
13726 {
13727 tree result = NULL_TREE;
13728
13729 STRIP_NOPS (arg);
13730
13731 /* To proceed, MPFR must exactly represent the target floating point
13732 format, which only happens when the target base equals two. */
13733 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13734 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13735 {
13736 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13737
13738 if (real_isfinite (ra)
13739 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13740 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13741 {
13742 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13743 const int prec = fmt->p;
13744 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13745 int inexact;
13746 mpfr_t m;
13747
13748 mpfr_init2 (m, prec);
13749 mpfr_from_real (m, ra, GMP_RNDN);
13750 mpfr_clear_flags ();
13751 inexact = func (m, m, rnd);
13752 result = do_mpfr_ckconv (m, type, inexact);
13753 mpfr_clear (m);
13754 }
13755 }
13756
13757 return result;
13758 }
13759
13760 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13761 FUNC on it and return the resulting value as a tree with type TYPE.
13762 The mpfr precision is set to the precision of TYPE. We assume that
13763 function FUNC returns zero if the result could be calculated
13764 exactly within the requested precision. */
13765
13766 static tree
13767 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13768 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13769 {
13770 tree result = NULL_TREE;
13771
13772 STRIP_NOPS (arg1);
13773 STRIP_NOPS (arg2);
13774
13775 /* To proceed, MPFR must exactly represent the target floating point
13776 format, which only happens when the target base equals two. */
13777 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13778 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13779 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13780 {
13781 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13782 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13783
13784 if (real_isfinite (ra1) && real_isfinite (ra2))
13785 {
13786 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13787 const int prec = fmt->p;
13788 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13789 int inexact;
13790 mpfr_t m1, m2;
13791
13792 mpfr_inits2 (prec, m1, m2, NULL);
13793 mpfr_from_real (m1, ra1, GMP_RNDN);
13794 mpfr_from_real (m2, ra2, GMP_RNDN);
13795 mpfr_clear_flags ();
13796 inexact = func (m1, m1, m2, rnd);
13797 result = do_mpfr_ckconv (m1, type, inexact);
13798 mpfr_clears (m1, m2, NULL);
13799 }
13800 }
13801
13802 return result;
13803 }
13804
13805 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13806 FUNC on it and return the resulting value as a tree with type TYPE.
13807 The mpfr precision is set to the precision of TYPE. We assume that
13808 function FUNC returns zero if the result could be calculated
13809 exactly within the requested precision. */
13810
13811 static tree
13812 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13813 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13814 {
13815 tree result = NULL_TREE;
13816
13817 STRIP_NOPS (arg1);
13818 STRIP_NOPS (arg2);
13819 STRIP_NOPS (arg3);
13820
13821 /* To proceed, MPFR must exactly represent the target floating point
13822 format, which only happens when the target base equals two. */
13823 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13824 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13825 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13826 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13827 {
13828 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13829 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13830 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13831
13832 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13833 {
13834 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13835 const int prec = fmt->p;
13836 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13837 int inexact;
13838 mpfr_t m1, m2, m3;
13839
13840 mpfr_inits2 (prec, m1, m2, m3, NULL);
13841 mpfr_from_real (m1, ra1, GMP_RNDN);
13842 mpfr_from_real (m2, ra2, GMP_RNDN);
13843 mpfr_from_real (m3, ra3, GMP_RNDN);
13844 mpfr_clear_flags ();
13845 inexact = func (m1, m1, m2, m3, rnd);
13846 result = do_mpfr_ckconv (m1, type, inexact);
13847 mpfr_clears (m1, m2, m3, NULL);
13848 }
13849 }
13850
13851 return result;
13852 }
13853
13854 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13855 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13856 If ARG_SINP and ARG_COSP are NULL then the result is returned
13857 as a complex value.
13858 The type is taken from the type of ARG and is used for setting the
13859 precision of the calculation and results. */
13860
13861 static tree
13862 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13863 {
13864 tree const type = TREE_TYPE (arg);
13865 tree result = NULL_TREE;
13866
13867 STRIP_NOPS (arg);
13868
13869 /* To proceed, MPFR must exactly represent the target floating point
13870 format, which only happens when the target base equals two. */
13871 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13872 && TREE_CODE (arg) == REAL_CST
13873 && !TREE_OVERFLOW (arg))
13874 {
13875 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13876
13877 if (real_isfinite (ra))
13878 {
13879 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13880 const int prec = fmt->p;
13881 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13882 tree result_s, result_c;
13883 int inexact;
13884 mpfr_t m, ms, mc;
13885
13886 mpfr_inits2 (prec, m, ms, mc, NULL);
13887 mpfr_from_real (m, ra, GMP_RNDN);
13888 mpfr_clear_flags ();
13889 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13890 result_s = do_mpfr_ckconv (ms, type, inexact);
13891 result_c = do_mpfr_ckconv (mc, type, inexact);
13892 mpfr_clears (m, ms, mc, NULL);
13893 if (result_s && result_c)
13894 {
13895 /* If we are to return in a complex value do so. */
13896 if (!arg_sinp && !arg_cosp)
13897 return build_complex (build_complex_type (type),
13898 result_c, result_s);
13899
13900 /* Dereference the sin/cos pointer arguments. */
13901 arg_sinp = build_fold_indirect_ref (arg_sinp);
13902 arg_cosp = build_fold_indirect_ref (arg_cosp);
13903 /* Proceed if valid pointer type were passed in. */
13904 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13905 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13906 {
13907 /* Set the values. */
13908 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13909 result_s);
13910 TREE_SIDE_EFFECTS (result_s) = 1;
13911 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13912 result_c);
13913 TREE_SIDE_EFFECTS (result_c) = 1;
13914 /* Combine the assignments into a compound expr. */
13915 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13916 result_s, result_c));
13917 }
13918 }
13919 }
13920 }
13921 return result;
13922 }
13923
13924 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13925 two-argument mpfr order N Bessel function FUNC on them and return
13926 the resulting value as a tree with type TYPE. The mpfr precision
13927 is set to the precision of TYPE. We assume that function FUNC
13928 returns zero if the result could be calculated exactly within the
13929 requested precision. */
13930 static tree
13931 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13932 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13933 const REAL_VALUE_TYPE *min, bool inclusive)
13934 {
13935 tree result = NULL_TREE;
13936
13937 STRIP_NOPS (arg1);
13938 STRIP_NOPS (arg2);
13939
13940 /* To proceed, MPFR must exactly represent the target floating point
13941 format, which only happens when the target base equals two. */
13942 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13943 && host_integerp (arg1, 0)
13944 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13945 {
13946 const HOST_WIDE_INT n = tree_low_cst (arg1, 0);
13947 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13948
13949 if (n == (long)n
13950 && real_isfinite (ra)
13951 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13952 {
13953 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13954 const int prec = fmt->p;
13955 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13956 int inexact;
13957 mpfr_t m;
13958
13959 mpfr_init2 (m, prec);
13960 mpfr_from_real (m, ra, GMP_RNDN);
13961 mpfr_clear_flags ();
13962 inexact = func (m, n, m, rnd);
13963 result = do_mpfr_ckconv (m, type, inexact);
13964 mpfr_clear (m);
13965 }
13966 }
13967
13968 return result;
13969 }
13970
13971 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13972 the pointer *(ARG_QUO) and return the result. The type is taken
13973 from the type of ARG0 and is used for setting the precision of the
13974 calculation and results. */
13975
13976 static tree
13977 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13978 {
13979 tree const type = TREE_TYPE (arg0);
13980 tree result = NULL_TREE;
13981
13982 STRIP_NOPS (arg0);
13983 STRIP_NOPS (arg1);
13984
13985 /* To proceed, MPFR must exactly represent the target floating point
13986 format, which only happens when the target base equals two. */
13987 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13988 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13989 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13990 {
13991 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13992 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13993
13994 if (real_isfinite (ra0) && real_isfinite (ra1))
13995 {
13996 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13997 const int prec = fmt->p;
13998 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13999 tree result_rem;
14000 long integer_quo;
14001 mpfr_t m0, m1;
14002
14003 mpfr_inits2 (prec, m0, m1, NULL);
14004 mpfr_from_real (m0, ra0, GMP_RNDN);
14005 mpfr_from_real (m1, ra1, GMP_RNDN);
14006 mpfr_clear_flags ();
14007 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
14008 /* Remquo is independent of the rounding mode, so pass
14009 inexact=0 to do_mpfr_ckconv(). */
14010 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
14011 mpfr_clears (m0, m1, NULL);
14012 if (result_rem)
14013 {
14014 /* MPFR calculates quo in the host's long so it may
14015 return more bits in quo than the target int can hold
14016 if sizeof(host long) > sizeof(target int). This can
14017 happen even for native compilers in LP64 mode. In
14018 these cases, modulo the quo value with the largest
14019 number that the target int can hold while leaving one
14020 bit for the sign. */
14021 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
14022 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
14023
14024 /* Dereference the quo pointer argument. */
14025 arg_quo = build_fold_indirect_ref (arg_quo);
14026 /* Proceed iff a valid pointer type was passed in. */
14027 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
14028 {
14029 /* Set the value. */
14030 tree result_quo
14031 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
14032 build_int_cst (TREE_TYPE (arg_quo),
14033 integer_quo));
14034 TREE_SIDE_EFFECTS (result_quo) = 1;
14035 /* Combine the quo assignment with the rem. */
14036 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
14037 result_quo, result_rem));
14038 }
14039 }
14040 }
14041 }
14042 return result;
14043 }
14044
14045 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
14046 resulting value as a tree with type TYPE. The mpfr precision is
14047 set to the precision of TYPE. We assume that this mpfr function
14048 returns zero if the result could be calculated exactly within the
14049 requested precision. In addition, the integer pointer represented
14050 by ARG_SG will be dereferenced and set to the appropriate signgam
14051 (-1,1) value. */
14052
14053 static tree
14054 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
14055 {
14056 tree result = NULL_TREE;
14057
14058 STRIP_NOPS (arg);
14059
14060 /* To proceed, MPFR must exactly represent the target floating point
14061 format, which only happens when the target base equals two. Also
14062 verify ARG is a constant and that ARG_SG is an int pointer. */
14063 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
14064 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
14065 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
14066 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
14067 {
14068 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
14069
14070 /* In addition to NaN and Inf, the argument cannot be zero or a
14071 negative integer. */
14072 if (real_isfinite (ra)
14073 && ra->cl != rvc_zero
14074 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
14075 {
14076 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
14077 const int prec = fmt->p;
14078 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
14079 int inexact, sg;
14080 mpfr_t m;
14081 tree result_lg;
14082
14083 mpfr_init2 (m, prec);
14084 mpfr_from_real (m, ra, GMP_RNDN);
14085 mpfr_clear_flags ();
14086 inexact = mpfr_lgamma (m, &sg, m, rnd);
14087 result_lg = do_mpfr_ckconv (m, type, inexact);
14088 mpfr_clear (m);
14089 if (result_lg)
14090 {
14091 tree result_sg;
14092
14093 /* Dereference the arg_sg pointer argument. */
14094 arg_sg = build_fold_indirect_ref (arg_sg);
14095 /* Assign the signgam value into *arg_sg. */
14096 result_sg = fold_build2 (MODIFY_EXPR,
14097 TREE_TYPE (arg_sg), arg_sg,
14098 build_int_cst (TREE_TYPE (arg_sg), sg));
14099 TREE_SIDE_EFFECTS (result_sg) = 1;
14100 /* Combine the signgam assignment with the lgamma result. */
14101 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
14102 result_sg, result_lg));
14103 }
14104 }
14105 }
14106
14107 return result;
14108 }
14109
14110 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
14111 function FUNC on it and return the resulting value as a tree with
14112 type TYPE. The mpfr precision is set to the precision of TYPE. We
14113 assume that function FUNC returns zero if the result could be
14114 calculated exactly within the requested precision. */
14115
14116 static tree
14117 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
14118 {
14119 tree result = NULL_TREE;
14120
14121 STRIP_NOPS (arg);
14122
14123 /* To proceed, MPFR must exactly represent the target floating point
14124 format, which only happens when the target base equals two. */
14125 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
14126 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
14127 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
14128 {
14129 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
14130 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
14131
14132 if (real_isfinite (re) && real_isfinite (im))
14133 {
14134 const struct real_format *const fmt =
14135 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14136 const int prec = fmt->p;
14137 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14138 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14139 int inexact;
14140 mpc_t m;
14141
14142 mpc_init2 (m, prec);
14143 mpfr_from_real (mpc_realref (m), re, rnd);
14144 mpfr_from_real (mpc_imagref (m), im, rnd);
14145 mpfr_clear_flags ();
14146 inexact = func (m, m, crnd);
14147 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
14148 mpc_clear (m);
14149 }
14150 }
14151
14152 return result;
14153 }
14154
14155 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
14156 mpc function FUNC on it and return the resulting value as a tree
14157 with type TYPE. The mpfr precision is set to the precision of
14158 TYPE. We assume that function FUNC returns zero if the result
14159 could be calculated exactly within the requested precision. If
14160 DO_NONFINITE is true, then fold expressions containing Inf or NaN
14161 in the arguments and/or results. */
14162
14163 tree
14164 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
14165 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
14166 {
14167 tree result = NULL_TREE;
14168
14169 STRIP_NOPS (arg0);
14170 STRIP_NOPS (arg1);
14171
14172 /* To proceed, MPFR must exactly represent the target floating point
14173 format, which only happens when the target base equals two. */
14174 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
14175 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
14176 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
14177 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
14178 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
14179 {
14180 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
14181 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
14182 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
14183 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
14184
14185 if (do_nonfinite
14186 || (real_isfinite (re0) && real_isfinite (im0)
14187 && real_isfinite (re1) && real_isfinite (im1)))
14188 {
14189 const struct real_format *const fmt =
14190 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14191 const int prec = fmt->p;
14192 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14193 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14194 int inexact;
14195 mpc_t m0, m1;
14196
14197 mpc_init2 (m0, prec);
14198 mpc_init2 (m1, prec);
14199 mpfr_from_real (mpc_realref (m0), re0, rnd);
14200 mpfr_from_real (mpc_imagref (m0), im0, rnd);
14201 mpfr_from_real (mpc_realref (m1), re1, rnd);
14202 mpfr_from_real (mpc_imagref (m1), im1, rnd);
14203 mpfr_clear_flags ();
14204 inexact = func (m0, m0, m1, crnd);
14205 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
14206 mpc_clear (m0);
14207 mpc_clear (m1);
14208 }
14209 }
14210
14211 return result;
14212 }
14213
14214 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14215 a normal call should be emitted rather than expanding the function
14216 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14217
14218 static tree
14219 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
14220 {
14221 int nargs = gimple_call_num_args (stmt);
14222
14223 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
14224 (nargs > 0
14225 ? gimple_call_arg_ptr (stmt, 0)
14226 : &error_mark_node), fcode);
14227 }
14228
14229 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14230 a normal call should be emitted rather than expanding the function
14231 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14232 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14233 passed as second argument. */
14234
14235 tree
14236 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14237 enum built_in_function fcode)
14238 {
14239 int nargs = gimple_call_num_args (stmt);
14240
14241 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
14242 (nargs > 0
14243 ? gimple_call_arg_ptr (stmt, 0)
14244 : &error_mark_node), maxlen, fcode);
14245 }
14246
14247 /* Builtins with folding operations that operate on "..." arguments
14248 need special handling; we need to store the arguments in a convenient
14249 data structure before attempting any folding. Fortunately there are
14250 only a few builtins that fall into this category. FNDECL is the
14251 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14252 result of the function call is ignored. */
14253
14254 static tree
14255 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14256 bool ignore ATTRIBUTE_UNUSED)
14257 {
14258 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14259 tree ret = NULL_TREE;
14260
14261 switch (fcode)
14262 {
14263 case BUILT_IN_SPRINTF_CHK:
14264 case BUILT_IN_VSPRINTF_CHK:
14265 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14266 break;
14267
14268 case BUILT_IN_SNPRINTF_CHK:
14269 case BUILT_IN_VSNPRINTF_CHK:
14270 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14271
14272 default:
14273 break;
14274 }
14275 if (ret)
14276 {
14277 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14278 TREE_NO_WARNING (ret) = 1;
14279 return ret;
14280 }
14281 return NULL_TREE;
14282 }
14283
14284 /* A wrapper function for builtin folding that prevents warnings for
14285 "statement without effect" and the like, caused by removing the
14286 call node earlier than the warning is generated. */
14287
14288 tree
14289 fold_call_stmt (gimple stmt, bool ignore)
14290 {
14291 tree ret = NULL_TREE;
14292 tree fndecl = gimple_call_fndecl (stmt);
14293 location_t loc = gimple_location (stmt);
14294 if (fndecl
14295 && TREE_CODE (fndecl) == FUNCTION_DECL
14296 && DECL_BUILT_IN (fndecl)
14297 && !gimple_call_va_arg_pack_p (stmt))
14298 {
14299 int nargs = gimple_call_num_args (stmt);
14300 tree *args = (nargs > 0
14301 ? gimple_call_arg_ptr (stmt, 0)
14302 : &error_mark_node);
14303
14304 if (avoid_folding_inline_builtin (fndecl))
14305 return NULL_TREE;
14306 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14307 {
14308 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14309 }
14310 else
14311 {
14312 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14313 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14314 if (!ret)
14315 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14316 if (ret)
14317 {
14318 /* Propagate location information from original call to
14319 expansion of builtin. Otherwise things like
14320 maybe_emit_chk_warning, that operate on the expansion
14321 of a builtin, will use the wrong location information. */
14322 if (gimple_has_location (stmt))
14323 {
14324 tree realret = ret;
14325 if (TREE_CODE (ret) == NOP_EXPR)
14326 realret = TREE_OPERAND (ret, 0);
14327 if (CAN_HAVE_LOCATION_P (realret)
14328 && !EXPR_HAS_LOCATION (realret))
14329 SET_EXPR_LOCATION (realret, loc);
14330 return realret;
14331 }
14332 return ret;
14333 }
14334 }
14335 }
14336 return NULL_TREE;
14337 }
14338
14339 /* Look up the function in builtin_decl that corresponds to DECL
14340 and set ASMSPEC as its user assembler name. DECL must be a
14341 function decl that declares a builtin. */
14342
14343 void
14344 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14345 {
14346 tree builtin;
14347 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
14348 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
14349 && asmspec != 0);
14350
14351 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14352 set_user_assembler_name (builtin, asmspec);
14353 switch (DECL_FUNCTION_CODE (decl))
14354 {
14355 case BUILT_IN_MEMCPY:
14356 init_block_move_fn (asmspec);
14357 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
14358 break;
14359 case BUILT_IN_MEMSET:
14360 init_block_clear_fn (asmspec);
14361 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
14362 break;
14363 case BUILT_IN_MEMMOVE:
14364 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
14365 break;
14366 case BUILT_IN_MEMCMP:
14367 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
14368 break;
14369 case BUILT_IN_ABORT:
14370 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
14371 break;
14372 case BUILT_IN_FFS:
14373 if (INT_TYPE_SIZE < BITS_PER_WORD)
14374 {
14375 set_user_assembler_libfunc ("ffs", asmspec);
14376 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
14377 MODE_INT, 0), "ffs");
14378 }
14379 break;
14380 default:
14381 break;
14382 }
14383 }
14384
14385 /* Return true if DECL is a builtin that expands to a constant or similarly
14386 simple code. */
14387 bool
14388 is_simple_builtin (tree decl)
14389 {
14390 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14391 switch (DECL_FUNCTION_CODE (decl))
14392 {
14393 /* Builtins that expand to constants. */
14394 case BUILT_IN_CONSTANT_P:
14395 case BUILT_IN_EXPECT:
14396 case BUILT_IN_OBJECT_SIZE:
14397 case BUILT_IN_UNREACHABLE:
14398 /* Simple register moves or loads from stack. */
14399 case BUILT_IN_ASSUME_ALIGNED:
14400 case BUILT_IN_RETURN_ADDRESS:
14401 case BUILT_IN_EXTRACT_RETURN_ADDR:
14402 case BUILT_IN_FROB_RETURN_ADDR:
14403 case BUILT_IN_RETURN:
14404 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14405 case BUILT_IN_FRAME_ADDRESS:
14406 case BUILT_IN_VA_END:
14407 case BUILT_IN_STACK_SAVE:
14408 case BUILT_IN_STACK_RESTORE:
14409 /* Exception state returns or moves registers around. */
14410 case BUILT_IN_EH_FILTER:
14411 case BUILT_IN_EH_POINTER:
14412 case BUILT_IN_EH_COPY_VALUES:
14413 return true;
14414
14415 default:
14416 return false;
14417 }
14418
14419 return false;
14420 }
14421
14422 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14423 most probably expanded inline into reasonably simple code. This is a
14424 superset of is_simple_builtin. */
14425 bool
14426 is_inexpensive_builtin (tree decl)
14427 {
14428 if (!decl)
14429 return false;
14430 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14431 return true;
14432 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14433 switch (DECL_FUNCTION_CODE (decl))
14434 {
14435 case BUILT_IN_ABS:
14436 case BUILT_IN_ALLOCA:
14437 case BUILT_IN_ALLOCA_WITH_ALIGN:
14438 case BUILT_IN_BSWAP16:
14439 case BUILT_IN_BSWAP32:
14440 case BUILT_IN_BSWAP64:
14441 case BUILT_IN_CLZ:
14442 case BUILT_IN_CLZIMAX:
14443 case BUILT_IN_CLZL:
14444 case BUILT_IN_CLZLL:
14445 case BUILT_IN_CTZ:
14446 case BUILT_IN_CTZIMAX:
14447 case BUILT_IN_CTZL:
14448 case BUILT_IN_CTZLL:
14449 case BUILT_IN_FFS:
14450 case BUILT_IN_FFSIMAX:
14451 case BUILT_IN_FFSL:
14452 case BUILT_IN_FFSLL:
14453 case BUILT_IN_IMAXABS:
14454 case BUILT_IN_FINITE:
14455 case BUILT_IN_FINITEF:
14456 case BUILT_IN_FINITEL:
14457 case BUILT_IN_FINITED32:
14458 case BUILT_IN_FINITED64:
14459 case BUILT_IN_FINITED128:
14460 case BUILT_IN_FPCLASSIFY:
14461 case BUILT_IN_ISFINITE:
14462 case BUILT_IN_ISINF_SIGN:
14463 case BUILT_IN_ISINF:
14464 case BUILT_IN_ISINFF:
14465 case BUILT_IN_ISINFL:
14466 case BUILT_IN_ISINFD32:
14467 case BUILT_IN_ISINFD64:
14468 case BUILT_IN_ISINFD128:
14469 case BUILT_IN_ISNAN:
14470 case BUILT_IN_ISNANF:
14471 case BUILT_IN_ISNANL:
14472 case BUILT_IN_ISNAND32:
14473 case BUILT_IN_ISNAND64:
14474 case BUILT_IN_ISNAND128:
14475 case BUILT_IN_ISNORMAL:
14476 case BUILT_IN_ISGREATER:
14477 case BUILT_IN_ISGREATEREQUAL:
14478 case BUILT_IN_ISLESS:
14479 case BUILT_IN_ISLESSEQUAL:
14480 case BUILT_IN_ISLESSGREATER:
14481 case BUILT_IN_ISUNORDERED:
14482 case BUILT_IN_VA_ARG_PACK:
14483 case BUILT_IN_VA_ARG_PACK_LEN:
14484 case BUILT_IN_VA_COPY:
14485 case BUILT_IN_TRAP:
14486 case BUILT_IN_SAVEREGS:
14487 case BUILT_IN_POPCOUNTL:
14488 case BUILT_IN_POPCOUNTLL:
14489 case BUILT_IN_POPCOUNTIMAX:
14490 case BUILT_IN_POPCOUNT:
14491 case BUILT_IN_PARITYL:
14492 case BUILT_IN_PARITYLL:
14493 case BUILT_IN_PARITYIMAX:
14494 case BUILT_IN_PARITY:
14495 case BUILT_IN_LABS:
14496 case BUILT_IN_LLABS:
14497 case BUILT_IN_PREFETCH:
14498 return true;
14499
14500 default:
14501 return is_simple_builtin (decl);
14502 }
14503
14504 return false;
14505 }