builtin-types.def (BT_FN_VOID_CONST_PTR): New.
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "realmpfr.h"
28 #include "gimple.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "expr.h"
36 #include "optabs.h"
37 #include "libfuncs.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "predict.h"
42 #include "tm_p.h"
43 #include "target.h"
44 #include "langhooks.h"
45 #include "basic-block.h"
46 #include "tree-ssanames.h"
47 #include "tree-dfa.h"
48 #include "value-prof.h"
49 #include "diagnostic-core.h"
50 #include "builtins.h"
51 #include "ubsan.h"
52 #include "cilk.h"
53
54
55 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
56
57 struct target_builtins default_target_builtins;
58 #if SWITCHABLE_TARGET
59 struct target_builtins *this_target_builtins = &default_target_builtins;
60 #endif
61
62 /* Define the names of the builtin function types and codes. */
63 const char *const built_in_class_names[BUILT_IN_LAST]
64 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
65
66 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
67 const char * built_in_names[(int) END_BUILTINS] =
68 {
69 #include "builtins.def"
70 };
71 #undef DEF_BUILTIN
72
73 /* Setup an array of _DECL trees, make sure each element is
74 initialized to NULL_TREE. */
75 builtin_info_type builtin_info;
76
77 /* Non-zero if __builtin_constant_p should be folded right away. */
78 bool force_folding_builtin_constant_p;
79
80 static const char *c_getstr (tree);
81 static rtx c_readstr (const char *, enum machine_mode);
82 static int target_char_cast (tree, char *);
83 static rtx get_memory_rtx (tree, tree);
84 static int apply_args_size (void);
85 static int apply_result_size (void);
86 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
87 static rtx result_vector (int, rtx);
88 #endif
89 static void expand_builtin_update_setjmp_buf (rtx);
90 static void expand_builtin_prefetch (tree);
91 static rtx expand_builtin_apply_args (void);
92 static rtx expand_builtin_apply_args_1 (void);
93 static rtx expand_builtin_apply (rtx, rtx, rtx);
94 static void expand_builtin_return (rtx);
95 static enum type_class type_to_class (tree);
96 static rtx expand_builtin_classify_type (tree);
97 static void expand_errno_check (tree, rtx);
98 static rtx expand_builtin_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
100 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
102 static rtx expand_builtin_interclass_mathfn (tree, rtx);
103 static rtx expand_builtin_sincos (tree);
104 static rtx expand_builtin_cexpi (tree, rtx);
105 static rtx expand_builtin_int_roundingfn (tree, rtx);
106 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
107 static rtx expand_builtin_next_arg (void);
108 static rtx expand_builtin_va_start (tree);
109 static rtx expand_builtin_va_end (tree);
110 static rtx expand_builtin_va_copy (tree);
111 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
112 static rtx expand_builtin_strcmp (tree, rtx);
113 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
114 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
115 static rtx expand_builtin_memcpy (tree, rtx);
116 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
118 enum machine_mode, int);
119 static rtx expand_builtin_strcpy (tree, rtx);
120 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
121 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_strncpy (tree, rtx);
123 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
124 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
126 static rtx expand_builtin_bzero (tree);
127 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_alloca (tree, bool);
129 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
130 static rtx expand_builtin_frame_address (tree, tree);
131 static tree stabilize_va_list_loc (location_t, tree, int);
132 static rtx expand_builtin_expect (tree, rtx);
133 static tree fold_builtin_constant_p (tree);
134 static tree fold_builtin_expect (location_t, tree, tree);
135 static tree fold_builtin_classify_type (tree);
136 static tree fold_builtin_strlen (location_t, tree, tree);
137 static tree fold_builtin_inf (location_t, tree, int);
138 static tree fold_builtin_nan (tree, tree, int);
139 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
140 static bool validate_arg (const_tree, enum tree_code code);
141 static bool integer_valued_real_p (tree);
142 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
143 static bool readonly_data_expr (tree);
144 static rtx expand_builtin_fabs (tree, rtx, rtx);
145 static rtx expand_builtin_signbit (tree, rtx);
146 static tree fold_builtin_sqrt (location_t, tree, tree);
147 static tree fold_builtin_cbrt (location_t, tree, tree);
148 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
149 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
150 static tree fold_builtin_cos (location_t, tree, tree, tree);
151 static tree fold_builtin_cosh (location_t, tree, tree, tree);
152 static tree fold_builtin_tan (tree, tree);
153 static tree fold_builtin_trunc (location_t, tree, tree);
154 static tree fold_builtin_floor (location_t, tree, tree);
155 static tree fold_builtin_ceil (location_t, tree, tree);
156 static tree fold_builtin_round (location_t, tree, tree);
157 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
158 static tree fold_builtin_bitop (tree, tree);
159 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
160 static tree fold_builtin_strchr (location_t, tree, tree, tree);
161 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
162 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
163 static tree fold_builtin_strcmp (location_t, tree, tree);
164 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
165 static tree fold_builtin_signbit (location_t, tree, tree);
166 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
167 static tree fold_builtin_isascii (location_t, tree);
168 static tree fold_builtin_toascii (location_t, tree);
169 static tree fold_builtin_isdigit (location_t, tree);
170 static tree fold_builtin_fabs (location_t, tree, tree);
171 static tree fold_builtin_abs (location_t, tree, tree);
172 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
173 enum tree_code);
174 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
175 static tree fold_builtin_0 (location_t, tree, bool);
176 static tree fold_builtin_1 (location_t, tree, tree, bool);
177 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
178 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
179 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
180 static tree fold_builtin_varargs (location_t, tree, tree, bool);
181
182 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
183 static tree fold_builtin_strstr (location_t, tree, tree, tree);
184 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
185 static tree fold_builtin_strcat (location_t, tree, tree);
186 static tree fold_builtin_strncat (location_t, tree, tree, tree);
187 static tree fold_builtin_strspn (location_t, tree, tree);
188 static tree fold_builtin_strcspn (location_t, tree, tree);
189 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
190 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
191
192 static rtx expand_builtin_object_size (tree);
193 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
194 enum built_in_function);
195 static void maybe_emit_chk_warning (tree, enum built_in_function);
196 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
197 static void maybe_emit_free_warning (tree);
198 static tree fold_builtin_object_size (tree, tree);
199 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
200 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
201 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
202 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
203 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
204 enum built_in_function);
205 static bool init_target_chars (void);
206
207 static unsigned HOST_WIDE_INT target_newline;
208 static unsigned HOST_WIDE_INT target_percent;
209 static unsigned HOST_WIDE_INT target_c;
210 static unsigned HOST_WIDE_INT target_s;
211 static char target_percent_c[3];
212 static char target_percent_s[3];
213 static char target_percent_s_newline[4];
214 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
215 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
216 static tree do_mpfr_arg2 (tree, tree, tree,
217 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
218 static tree do_mpfr_arg3 (tree, tree, tree, tree,
219 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
220 static tree do_mpfr_sincos (tree, tree, tree);
221 static tree do_mpfr_bessel_n (tree, tree, tree,
222 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
223 const REAL_VALUE_TYPE *, bool);
224 static tree do_mpfr_remquo (tree, tree, tree);
225 static tree do_mpfr_lgamma_r (tree, tree, tree);
226 static void expand_builtin_sync_synchronize (void);
227
228 /* Return true if NAME starts with __builtin_ or __sync_. */
229
230 static bool
231 is_builtin_name (const char *name)
232 {
233 if (strncmp (name, "__builtin_", 10) == 0)
234 return true;
235 if (strncmp (name, "__sync_", 7) == 0)
236 return true;
237 if (strncmp (name, "__atomic_", 9) == 0)
238 return true;
239 if (flag_enable_cilkplus
240 && (!strcmp (name, "__cilkrts_detach")
241 || !strcmp (name, "__cilkrts_pop_frame")))
242 return true;
243 return false;
244 }
245
246
247 /* Return true if DECL is a function symbol representing a built-in. */
248
249 bool
250 is_builtin_fn (tree decl)
251 {
252 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
253 }
254
255 /* By default we assume that c99 functions are present at the runtime,
256 but sincos is not. */
257 bool
258 default_libc_has_function (enum function_class fn_class)
259 {
260 if (fn_class == function_c94
261 || fn_class == function_c99_misc
262 || fn_class == function_c99_math_complex)
263 return true;
264
265 return false;
266 }
267
268 bool
269 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
270 {
271 return true;
272 }
273
274 bool
275 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
276 {
277 return false;
278 }
279
280 /* Return true if NODE should be considered for inline expansion regardless
281 of the optimization level. This means whenever a function is invoked with
282 its "internal" name, which normally contains the prefix "__builtin". */
283
284 static bool
285 called_as_built_in (tree node)
286 {
287 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
288 we want the name used to call the function, not the name it
289 will have. */
290 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
291 return is_builtin_name (name);
292 }
293
294 /* Compute values M and N such that M divides (address of EXP - N) and such
295 that N < M. If these numbers can be determined, store M in alignp and N in
296 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
297 *alignp and any bit-offset to *bitposp.
298
299 Note that the address (and thus the alignment) computed here is based
300 on the address to which a symbol resolves, whereas DECL_ALIGN is based
301 on the address at which an object is actually located. These two
302 addresses are not always the same. For example, on ARM targets,
303 the address &foo of a Thumb function foo() has the lowest bit set,
304 whereas foo() itself starts on an even address.
305
306 If ADDR_P is true we are taking the address of the memory reference EXP
307 and thus cannot rely on the access taking place. */
308
309 static bool
310 get_object_alignment_2 (tree exp, unsigned int *alignp,
311 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
312 {
313 HOST_WIDE_INT bitsize, bitpos;
314 tree offset;
315 enum machine_mode mode;
316 int unsignedp, volatilep;
317 unsigned int inner, align = BITS_PER_UNIT;
318 bool known_alignment = false;
319
320 /* Get the innermost object and the constant (bitpos) and possibly
321 variable (offset) offset of the access. */
322 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
323 &mode, &unsignedp, &volatilep, true);
324
325 /* Extract alignment information from the innermost object and
326 possibly adjust bitpos and offset. */
327 if (TREE_CODE (exp) == FUNCTION_DECL)
328 {
329 /* Function addresses can encode extra information besides their
330 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
331 allows the low bit to be used as a virtual bit, we know
332 that the address itself must be at least 2-byte aligned. */
333 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
334 align = 2 * BITS_PER_UNIT;
335 }
336 else if (TREE_CODE (exp) == LABEL_DECL)
337 ;
338 else if (TREE_CODE (exp) == CONST_DECL)
339 {
340 /* The alignment of a CONST_DECL is determined by its initializer. */
341 exp = DECL_INITIAL (exp);
342 align = TYPE_ALIGN (TREE_TYPE (exp));
343 #ifdef CONSTANT_ALIGNMENT
344 if (CONSTANT_CLASS_P (exp))
345 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
346 #endif
347 known_alignment = true;
348 }
349 else if (DECL_P (exp))
350 {
351 align = DECL_ALIGN (exp);
352 known_alignment = true;
353 }
354 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
355 {
356 align = TYPE_ALIGN (TREE_TYPE (exp));
357 }
358 else if (TREE_CODE (exp) == INDIRECT_REF
359 || TREE_CODE (exp) == MEM_REF
360 || TREE_CODE (exp) == TARGET_MEM_REF)
361 {
362 tree addr = TREE_OPERAND (exp, 0);
363 unsigned ptr_align;
364 unsigned HOST_WIDE_INT ptr_bitpos;
365
366 if (TREE_CODE (addr) == BIT_AND_EXPR
367 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
368 {
369 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
370 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
371 align *= BITS_PER_UNIT;
372 addr = TREE_OPERAND (addr, 0);
373 }
374
375 known_alignment
376 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
377 align = MAX (ptr_align, align);
378
379 /* The alignment of the pointer operand in a TARGET_MEM_REF
380 has to take the variable offset parts into account. */
381 if (TREE_CODE (exp) == TARGET_MEM_REF)
382 {
383 if (TMR_INDEX (exp))
384 {
385 unsigned HOST_WIDE_INT step = 1;
386 if (TMR_STEP (exp))
387 step = TREE_INT_CST_LOW (TMR_STEP (exp));
388 align = MIN (align, (step & -step) * BITS_PER_UNIT);
389 }
390 if (TMR_INDEX2 (exp))
391 align = BITS_PER_UNIT;
392 known_alignment = false;
393 }
394
395 /* When EXP is an actual memory reference then we can use
396 TYPE_ALIGN of a pointer indirection to derive alignment.
397 Do so only if get_pointer_alignment_1 did not reveal absolute
398 alignment knowledge and if using that alignment would
399 improve the situation. */
400 if (!addr_p && !known_alignment
401 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
402 align = TYPE_ALIGN (TREE_TYPE (exp));
403 else
404 {
405 /* Else adjust bitpos accordingly. */
406 bitpos += ptr_bitpos;
407 if (TREE_CODE (exp) == MEM_REF
408 || TREE_CODE (exp) == TARGET_MEM_REF)
409 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
410 }
411 }
412 else if (TREE_CODE (exp) == STRING_CST)
413 {
414 /* STRING_CST are the only constant objects we allow to be not
415 wrapped inside a CONST_DECL. */
416 align = TYPE_ALIGN (TREE_TYPE (exp));
417 #ifdef CONSTANT_ALIGNMENT
418 if (CONSTANT_CLASS_P (exp))
419 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
420 #endif
421 known_alignment = true;
422 }
423
424 /* If there is a non-constant offset part extract the maximum
425 alignment that can prevail. */
426 inner = ~0U;
427 while (offset)
428 {
429 tree next_offset;
430
431 if (TREE_CODE (offset) == PLUS_EXPR)
432 {
433 next_offset = TREE_OPERAND (offset, 0);
434 offset = TREE_OPERAND (offset, 1);
435 }
436 else
437 next_offset = NULL;
438 if (host_integerp (offset, 1))
439 {
440 /* Any overflow in calculating offset_bits won't change
441 the alignment. */
442 unsigned offset_bits
443 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
444
445 if (offset_bits)
446 inner = MIN (inner, (offset_bits & -offset_bits));
447 }
448 else if (TREE_CODE (offset) == MULT_EXPR
449 && host_integerp (TREE_OPERAND (offset, 1), 1))
450 {
451 /* Any overflow in calculating offset_factor won't change
452 the alignment. */
453 unsigned offset_factor
454 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
455 * BITS_PER_UNIT);
456
457 if (offset_factor)
458 inner = MIN (inner, (offset_factor & -offset_factor));
459 }
460 else
461 {
462 inner = MIN (inner, BITS_PER_UNIT);
463 break;
464 }
465 offset = next_offset;
466 }
467 /* Alignment is innermost object alignment adjusted by the constant
468 and non-constant offset parts. */
469 align = MIN (align, inner);
470
471 *alignp = align;
472 *bitposp = bitpos & (*alignp - 1);
473 return known_alignment;
474 }
475
476 /* For a memory reference expression EXP compute values M and N such that M
477 divides (&EXP - N) and such that N < M. If these numbers can be determined,
478 store M in alignp and N in *BITPOSP and return true. Otherwise return false
479 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
480
481 bool
482 get_object_alignment_1 (tree exp, unsigned int *alignp,
483 unsigned HOST_WIDE_INT *bitposp)
484 {
485 return get_object_alignment_2 (exp, alignp, bitposp, false);
486 }
487
488 /* Return the alignment in bits of EXP, an object. */
489
490 unsigned int
491 get_object_alignment (tree exp)
492 {
493 unsigned HOST_WIDE_INT bitpos = 0;
494 unsigned int align;
495
496 get_object_alignment_1 (exp, &align, &bitpos);
497
498 /* align and bitpos now specify known low bits of the pointer.
499 ptr & (align - 1) == bitpos. */
500
501 if (bitpos != 0)
502 align = (bitpos & -bitpos);
503 return align;
504 }
505
506 /* For a pointer valued expression EXP compute values M and N such that M
507 divides (EXP - N) and such that N < M. If these numbers can be determined,
508 store M in alignp and N in *BITPOSP and return true. Return false if
509 the results are just a conservative approximation.
510
511 If EXP is not a pointer, false is returned too. */
512
513 bool
514 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
515 unsigned HOST_WIDE_INT *bitposp)
516 {
517 STRIP_NOPS (exp);
518
519 if (TREE_CODE (exp) == ADDR_EXPR)
520 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
521 alignp, bitposp, true);
522 else if (TREE_CODE (exp) == SSA_NAME
523 && POINTER_TYPE_P (TREE_TYPE (exp)))
524 {
525 unsigned int ptr_align, ptr_misalign;
526 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
527
528 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
529 {
530 *bitposp = ptr_misalign * BITS_PER_UNIT;
531 *alignp = ptr_align * BITS_PER_UNIT;
532 /* We cannot really tell whether this result is an approximation. */
533 return true;
534 }
535 else
536 {
537 *bitposp = 0;
538 *alignp = BITS_PER_UNIT;
539 return false;
540 }
541 }
542 else if (TREE_CODE (exp) == INTEGER_CST)
543 {
544 *alignp = BIGGEST_ALIGNMENT;
545 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
546 & (BIGGEST_ALIGNMENT - 1));
547 return true;
548 }
549
550 *bitposp = 0;
551 *alignp = BITS_PER_UNIT;
552 return false;
553 }
554
555 /* Return the alignment in bits of EXP, a pointer valued expression.
556 The alignment returned is, by default, the alignment of the thing that
557 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
558
559 Otherwise, look at the expression to see if we can do better, i.e., if the
560 expression is actually pointing at an object whose alignment is tighter. */
561
562 unsigned int
563 get_pointer_alignment (tree exp)
564 {
565 unsigned HOST_WIDE_INT bitpos = 0;
566 unsigned int align;
567
568 get_pointer_alignment_1 (exp, &align, &bitpos);
569
570 /* align and bitpos now specify known low bits of the pointer.
571 ptr & (align - 1) == bitpos. */
572
573 if (bitpos != 0)
574 align = (bitpos & -bitpos);
575
576 return align;
577 }
578
579 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
580 way, because it could contain a zero byte in the middle.
581 TREE_STRING_LENGTH is the size of the character array, not the string.
582
583 ONLY_VALUE should be nonzero if the result is not going to be emitted
584 into the instruction stream and zero if it is going to be expanded.
585 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
586 is returned, otherwise NULL, since
587 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
588 evaluate the side-effects.
589
590 The value returned is of type `ssizetype'.
591
592 Unfortunately, string_constant can't access the values of const char
593 arrays with initializers, so neither can we do so here. */
594
595 tree
596 c_strlen (tree src, int only_value)
597 {
598 tree offset_node;
599 HOST_WIDE_INT offset;
600 int max;
601 const char *ptr;
602 location_t loc;
603
604 STRIP_NOPS (src);
605 if (TREE_CODE (src) == COND_EXPR
606 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
607 {
608 tree len1, len2;
609
610 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
611 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
612 if (tree_int_cst_equal (len1, len2))
613 return len1;
614 }
615
616 if (TREE_CODE (src) == COMPOUND_EXPR
617 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
618 return c_strlen (TREE_OPERAND (src, 1), only_value);
619
620 loc = EXPR_LOC_OR_HERE (src);
621
622 src = string_constant (src, &offset_node);
623 if (src == 0)
624 return NULL_TREE;
625
626 max = TREE_STRING_LENGTH (src) - 1;
627 ptr = TREE_STRING_POINTER (src);
628
629 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
630 {
631 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
632 compute the offset to the following null if we don't know where to
633 start searching for it. */
634 int i;
635
636 for (i = 0; i < max; i++)
637 if (ptr[i] == 0)
638 return NULL_TREE;
639
640 /* We don't know the starting offset, but we do know that the string
641 has no internal zero bytes. We can assume that the offset falls
642 within the bounds of the string; otherwise, the programmer deserves
643 what he gets. Subtract the offset from the length of the string,
644 and return that. This would perhaps not be valid if we were dealing
645 with named arrays in addition to literal string constants. */
646
647 return size_diffop_loc (loc, size_int (max), offset_node);
648 }
649
650 /* We have a known offset into the string. Start searching there for
651 a null character if we can represent it as a single HOST_WIDE_INT. */
652 if (offset_node == 0)
653 offset = 0;
654 else if (! host_integerp (offset_node, 0))
655 offset = -1;
656 else
657 offset = tree_low_cst (offset_node, 0);
658
659 /* If the offset is known to be out of bounds, warn, and call strlen at
660 runtime. */
661 if (offset < 0 || offset > max)
662 {
663 /* Suppress multiple warnings for propagated constant strings. */
664 if (! TREE_NO_WARNING (src))
665 {
666 warning_at (loc, 0, "offset outside bounds of constant string");
667 TREE_NO_WARNING (src) = 1;
668 }
669 return NULL_TREE;
670 }
671
672 /* Use strlen to search for the first zero byte. Since any strings
673 constructed with build_string will have nulls appended, we win even
674 if we get handed something like (char[4])"abcd".
675
676 Since OFFSET is our starting index into the string, no further
677 calculation is needed. */
678 return ssize_int (strlen (ptr + offset));
679 }
680
681 /* Return a char pointer for a C string if it is a string constant
682 or sum of string constant and integer constant. */
683
684 static const char *
685 c_getstr (tree src)
686 {
687 tree offset_node;
688
689 src = string_constant (src, &offset_node);
690 if (src == 0)
691 return 0;
692
693 if (offset_node == 0)
694 return TREE_STRING_POINTER (src);
695 else if (!host_integerp (offset_node, 1)
696 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
697 return 0;
698
699 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
700 }
701
702 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
703 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
704
705 static rtx
706 c_readstr (const char *str, enum machine_mode mode)
707 {
708 HOST_WIDE_INT c[2];
709 HOST_WIDE_INT ch;
710 unsigned int i, j;
711
712 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
713
714 c[0] = 0;
715 c[1] = 0;
716 ch = 1;
717 for (i = 0; i < GET_MODE_SIZE (mode); i++)
718 {
719 j = i;
720 if (WORDS_BIG_ENDIAN)
721 j = GET_MODE_SIZE (mode) - i - 1;
722 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
723 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
724 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
725 j *= BITS_PER_UNIT;
726 gcc_assert (j < HOST_BITS_PER_DOUBLE_INT);
727
728 if (ch)
729 ch = (unsigned char) str[i];
730 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
731 }
732 return immed_double_const (c[0], c[1], mode);
733 }
734
735 /* Cast a target constant CST to target CHAR and if that value fits into
736 host char type, return zero and put that value into variable pointed to by
737 P. */
738
739 static int
740 target_char_cast (tree cst, char *p)
741 {
742 unsigned HOST_WIDE_INT val, hostval;
743
744 if (TREE_CODE (cst) != INTEGER_CST
745 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
746 return 1;
747
748 val = TREE_INT_CST_LOW (cst);
749 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
750 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
751
752 hostval = val;
753 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
754 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
755
756 if (val != hostval)
757 return 1;
758
759 *p = hostval;
760 return 0;
761 }
762
763 /* Similar to save_expr, but assumes that arbitrary code is not executed
764 in between the multiple evaluations. In particular, we assume that a
765 non-addressable local variable will not be modified. */
766
767 static tree
768 builtin_save_expr (tree exp)
769 {
770 if (TREE_CODE (exp) == SSA_NAME
771 || (TREE_ADDRESSABLE (exp) == 0
772 && (TREE_CODE (exp) == PARM_DECL
773 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
774 return exp;
775
776 return save_expr (exp);
777 }
778
779 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
780 times to get the address of either a higher stack frame, or a return
781 address located within it (depending on FNDECL_CODE). */
782
783 static rtx
784 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
785 {
786 int i;
787
788 #ifdef INITIAL_FRAME_ADDRESS_RTX
789 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
790 #else
791 rtx tem;
792
793 /* For a zero count with __builtin_return_address, we don't care what
794 frame address we return, because target-specific definitions will
795 override us. Therefore frame pointer elimination is OK, and using
796 the soft frame pointer is OK.
797
798 For a nonzero count, or a zero count with __builtin_frame_address,
799 we require a stable offset from the current frame pointer to the
800 previous one, so we must use the hard frame pointer, and
801 we must disable frame pointer elimination. */
802 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
803 tem = frame_pointer_rtx;
804 else
805 {
806 tem = hard_frame_pointer_rtx;
807
808 /* Tell reload not to eliminate the frame pointer. */
809 crtl->accesses_prior_frames = 1;
810 }
811 #endif
812
813 /* Some machines need special handling before we can access
814 arbitrary frames. For example, on the SPARC, we must first flush
815 all register windows to the stack. */
816 #ifdef SETUP_FRAME_ADDRESSES
817 if (count > 0)
818 SETUP_FRAME_ADDRESSES ();
819 #endif
820
821 /* On the SPARC, the return address is not in the frame, it is in a
822 register. There is no way to access it off of the current frame
823 pointer, but it can be accessed off the previous frame pointer by
824 reading the value from the register window save area. */
825 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
826 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
827 count--;
828 #endif
829
830 /* Scan back COUNT frames to the specified frame. */
831 for (i = 0; i < count; i++)
832 {
833 /* Assume the dynamic chain pointer is in the word that the
834 frame address points to, unless otherwise specified. */
835 #ifdef DYNAMIC_CHAIN_ADDRESS
836 tem = DYNAMIC_CHAIN_ADDRESS (tem);
837 #endif
838 tem = memory_address (Pmode, tem);
839 tem = gen_frame_mem (Pmode, tem);
840 tem = copy_to_reg (tem);
841 }
842
843 /* For __builtin_frame_address, return what we've got. But, on
844 the SPARC for example, we may have to add a bias. */
845 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
846 #ifdef FRAME_ADDR_RTX
847 return FRAME_ADDR_RTX (tem);
848 #else
849 return tem;
850 #endif
851
852 /* For __builtin_return_address, get the return address from that frame. */
853 #ifdef RETURN_ADDR_RTX
854 tem = RETURN_ADDR_RTX (count, tem);
855 #else
856 tem = memory_address (Pmode,
857 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
858 tem = gen_frame_mem (Pmode, tem);
859 #endif
860 return tem;
861 }
862
863 /* Alias set used for setjmp buffer. */
864 static alias_set_type setjmp_alias_set = -1;
865
866 /* Construct the leading half of a __builtin_setjmp call. Control will
867 return to RECEIVER_LABEL. This is also called directly by the SJLJ
868 exception handling code. */
869
870 void
871 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
872 {
873 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
874 rtx stack_save;
875 rtx mem;
876
877 if (setjmp_alias_set == -1)
878 setjmp_alias_set = new_alias_set ();
879
880 buf_addr = convert_memory_address (Pmode, buf_addr);
881
882 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
883
884 /* We store the frame pointer and the address of receiver_label in
885 the buffer and use the rest of it for the stack save area, which
886 is machine-dependent. */
887
888 mem = gen_rtx_MEM (Pmode, buf_addr);
889 set_mem_alias_set (mem, setjmp_alias_set);
890 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
891
892 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
893 GET_MODE_SIZE (Pmode))),
894 set_mem_alias_set (mem, setjmp_alias_set);
895
896 emit_move_insn (validize_mem (mem),
897 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
898
899 stack_save = gen_rtx_MEM (sa_mode,
900 plus_constant (Pmode, buf_addr,
901 2 * GET_MODE_SIZE (Pmode)));
902 set_mem_alias_set (stack_save, setjmp_alias_set);
903 emit_stack_save (SAVE_NONLOCAL, &stack_save);
904
905 /* If there is further processing to do, do it. */
906 #ifdef HAVE_builtin_setjmp_setup
907 if (HAVE_builtin_setjmp_setup)
908 emit_insn (gen_builtin_setjmp_setup (buf_addr));
909 #endif
910
911 /* We have a nonlocal label. */
912 cfun->has_nonlocal_label = 1;
913 }
914
915 /* Construct the trailing part of a __builtin_setjmp call. This is
916 also called directly by the SJLJ exception handling code.
917 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
918
919 void
920 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
921 {
922 rtx chain;
923
924 /* Mark the FP as used when we get here, so we have to make sure it's
925 marked as used by this function. */
926 emit_use (hard_frame_pointer_rtx);
927
928 /* Mark the static chain as clobbered here so life information
929 doesn't get messed up for it. */
930 chain = targetm.calls.static_chain (current_function_decl, true);
931 if (chain && REG_P (chain))
932 emit_clobber (chain);
933
934 /* Now put in the code to restore the frame pointer, and argument
935 pointer, if needed. */
936 #ifdef HAVE_nonlocal_goto
937 if (! HAVE_nonlocal_goto)
938 #endif
939 /* First adjust our frame pointer to its actual value. It was
940 previously set to the start of the virtual area corresponding to
941 the stacked variables when we branched here and now needs to be
942 adjusted to the actual hardware fp value.
943
944 Assignments to virtual registers are converted by
945 instantiate_virtual_regs into the corresponding assignment
946 to the underlying register (fp in this case) that makes
947 the original assignment true.
948 So the following insn will actually be decrementing fp by
949 STARTING_FRAME_OFFSET. */
950 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
951
952 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
953 if (fixed_regs[ARG_POINTER_REGNUM])
954 {
955 #ifdef ELIMINABLE_REGS
956 /* If the argument pointer can be eliminated in favor of the
957 frame pointer, we don't need to restore it. We assume here
958 that if such an elimination is present, it can always be used.
959 This is the case on all known machines; if we don't make this
960 assumption, we do unnecessary saving on many machines. */
961 size_t i;
962 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
963
964 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
965 if (elim_regs[i].from == ARG_POINTER_REGNUM
966 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
967 break;
968
969 if (i == ARRAY_SIZE (elim_regs))
970 #endif
971 {
972 /* Now restore our arg pointer from the address at which it
973 was saved in our stack frame. */
974 emit_move_insn (crtl->args.internal_arg_pointer,
975 copy_to_reg (get_arg_pointer_save_area ()));
976 }
977 }
978 #endif
979
980 #ifdef HAVE_builtin_setjmp_receiver
981 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
982 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
983 else
984 #endif
985 #ifdef HAVE_nonlocal_goto_receiver
986 if (HAVE_nonlocal_goto_receiver)
987 emit_insn (gen_nonlocal_goto_receiver ());
988 else
989 #endif
990 { /* Nothing */ }
991
992 /* We must not allow the code we just generated to be reordered by
993 scheduling. Specifically, the update of the frame pointer must
994 happen immediately, not later. Similarly, we must block
995 (frame-related) register values to be used across this code. */
996 emit_insn (gen_blockage ());
997 }
998
999 /* __builtin_longjmp is passed a pointer to an array of five words (not
1000 all will be used on all machines). It operates similarly to the C
1001 library function of the same name, but is more efficient. Much of
1002 the code below is copied from the handling of non-local gotos. */
1003
1004 static void
1005 expand_builtin_longjmp (rtx buf_addr, rtx value)
1006 {
1007 rtx fp, lab, stack, insn, last;
1008 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1009
1010 /* DRAP is needed for stack realign if longjmp is expanded to current
1011 function */
1012 if (SUPPORTS_STACK_ALIGNMENT)
1013 crtl->need_drap = true;
1014
1015 if (setjmp_alias_set == -1)
1016 setjmp_alias_set = new_alias_set ();
1017
1018 buf_addr = convert_memory_address (Pmode, buf_addr);
1019
1020 buf_addr = force_reg (Pmode, buf_addr);
1021
1022 /* We require that the user must pass a second argument of 1, because
1023 that is what builtin_setjmp will return. */
1024 gcc_assert (value == const1_rtx);
1025
1026 last = get_last_insn ();
1027 #ifdef HAVE_builtin_longjmp
1028 if (HAVE_builtin_longjmp)
1029 emit_insn (gen_builtin_longjmp (buf_addr));
1030 else
1031 #endif
1032 {
1033 fp = gen_rtx_MEM (Pmode, buf_addr);
1034 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1035 GET_MODE_SIZE (Pmode)));
1036
1037 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1038 2 * GET_MODE_SIZE (Pmode)));
1039 set_mem_alias_set (fp, setjmp_alias_set);
1040 set_mem_alias_set (lab, setjmp_alias_set);
1041 set_mem_alias_set (stack, setjmp_alias_set);
1042
1043 /* Pick up FP, label, and SP from the block and jump. This code is
1044 from expand_goto in stmt.c; see there for detailed comments. */
1045 #ifdef HAVE_nonlocal_goto
1046 if (HAVE_nonlocal_goto)
1047 /* We have to pass a value to the nonlocal_goto pattern that will
1048 get copied into the static_chain pointer, but it does not matter
1049 what that value is, because builtin_setjmp does not use it. */
1050 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1051 else
1052 #endif
1053 {
1054 lab = copy_to_reg (lab);
1055
1056 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1057 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1058
1059 emit_move_insn (hard_frame_pointer_rtx, fp);
1060 emit_stack_restore (SAVE_NONLOCAL, stack);
1061
1062 emit_use (hard_frame_pointer_rtx);
1063 emit_use (stack_pointer_rtx);
1064 emit_indirect_jump (lab);
1065 }
1066 }
1067
1068 /* Search backwards and mark the jump insn as a non-local goto.
1069 Note that this precludes the use of __builtin_longjmp to a
1070 __builtin_setjmp target in the same function. However, we've
1071 already cautioned the user that these functions are for
1072 internal exception handling use only. */
1073 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1074 {
1075 gcc_assert (insn != last);
1076
1077 if (JUMP_P (insn))
1078 {
1079 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1080 break;
1081 }
1082 else if (CALL_P (insn))
1083 break;
1084 }
1085 }
1086
1087 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1088 and the address of the save area. */
1089
1090 static rtx
1091 expand_builtin_nonlocal_goto (tree exp)
1092 {
1093 tree t_label, t_save_area;
1094 rtx r_label, r_save_area, r_fp, r_sp, insn;
1095
1096 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1097 return NULL_RTX;
1098
1099 t_label = CALL_EXPR_ARG (exp, 0);
1100 t_save_area = CALL_EXPR_ARG (exp, 1);
1101
1102 r_label = expand_normal (t_label);
1103 r_label = convert_memory_address (Pmode, r_label);
1104 r_save_area = expand_normal (t_save_area);
1105 r_save_area = convert_memory_address (Pmode, r_save_area);
1106 /* Copy the address of the save location to a register just in case it was
1107 based on the frame pointer. */
1108 r_save_area = copy_to_reg (r_save_area);
1109 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1110 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1111 plus_constant (Pmode, r_save_area,
1112 GET_MODE_SIZE (Pmode)));
1113
1114 crtl->has_nonlocal_goto = 1;
1115
1116 #ifdef HAVE_nonlocal_goto
1117 /* ??? We no longer need to pass the static chain value, afaik. */
1118 if (HAVE_nonlocal_goto)
1119 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1120 else
1121 #endif
1122 {
1123 r_label = copy_to_reg (r_label);
1124
1125 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1126 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1127
1128 /* Restore frame pointer for containing function. */
1129 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1130 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1131
1132 /* USE of hard_frame_pointer_rtx added for consistency;
1133 not clear if really needed. */
1134 emit_use (hard_frame_pointer_rtx);
1135 emit_use (stack_pointer_rtx);
1136
1137 /* If the architecture is using a GP register, we must
1138 conservatively assume that the target function makes use of it.
1139 The prologue of functions with nonlocal gotos must therefore
1140 initialize the GP register to the appropriate value, and we
1141 must then make sure that this value is live at the point
1142 of the jump. (Note that this doesn't necessarily apply
1143 to targets with a nonlocal_goto pattern; they are free
1144 to implement it in their own way. Note also that this is
1145 a no-op if the GP register is a global invariant.) */
1146 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1147 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1148 emit_use (pic_offset_table_rtx);
1149
1150 emit_indirect_jump (r_label);
1151 }
1152
1153 /* Search backwards to the jump insn and mark it as a
1154 non-local goto. */
1155 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1156 {
1157 if (JUMP_P (insn))
1158 {
1159 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1160 break;
1161 }
1162 else if (CALL_P (insn))
1163 break;
1164 }
1165
1166 return const0_rtx;
1167 }
1168
1169 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1170 (not all will be used on all machines) that was passed to __builtin_setjmp.
1171 It updates the stack pointer in that block to correspond to the current
1172 stack pointer. */
1173
1174 static void
1175 expand_builtin_update_setjmp_buf (rtx buf_addr)
1176 {
1177 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1178 rtx stack_save
1179 = gen_rtx_MEM (sa_mode,
1180 memory_address
1181 (sa_mode,
1182 plus_constant (Pmode, buf_addr,
1183 2 * GET_MODE_SIZE (Pmode))));
1184
1185 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1186 }
1187
1188 /* Expand a call to __builtin_prefetch. For a target that does not support
1189 data prefetch, evaluate the memory address argument in case it has side
1190 effects. */
1191
1192 static void
1193 expand_builtin_prefetch (tree exp)
1194 {
1195 tree arg0, arg1, arg2;
1196 int nargs;
1197 rtx op0, op1, op2;
1198
1199 if (!validate_arglist (exp, POINTER_TYPE, 0))
1200 return;
1201
1202 arg0 = CALL_EXPR_ARG (exp, 0);
1203
1204 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1205 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1206 locality). */
1207 nargs = call_expr_nargs (exp);
1208 if (nargs > 1)
1209 arg1 = CALL_EXPR_ARG (exp, 1);
1210 else
1211 arg1 = integer_zero_node;
1212 if (nargs > 2)
1213 arg2 = CALL_EXPR_ARG (exp, 2);
1214 else
1215 arg2 = integer_three_node;
1216
1217 /* Argument 0 is an address. */
1218 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1219
1220 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1221 if (TREE_CODE (arg1) != INTEGER_CST)
1222 {
1223 error ("second argument to %<__builtin_prefetch%> must be a constant");
1224 arg1 = integer_zero_node;
1225 }
1226 op1 = expand_normal (arg1);
1227 /* Argument 1 must be either zero or one. */
1228 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1229 {
1230 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1231 " using zero");
1232 op1 = const0_rtx;
1233 }
1234
1235 /* Argument 2 (locality) must be a compile-time constant int. */
1236 if (TREE_CODE (arg2) != INTEGER_CST)
1237 {
1238 error ("third argument to %<__builtin_prefetch%> must be a constant");
1239 arg2 = integer_zero_node;
1240 }
1241 op2 = expand_normal (arg2);
1242 /* Argument 2 must be 0, 1, 2, or 3. */
1243 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1244 {
1245 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1246 op2 = const0_rtx;
1247 }
1248
1249 #ifdef HAVE_prefetch
1250 if (HAVE_prefetch)
1251 {
1252 struct expand_operand ops[3];
1253
1254 create_address_operand (&ops[0], op0);
1255 create_integer_operand (&ops[1], INTVAL (op1));
1256 create_integer_operand (&ops[2], INTVAL (op2));
1257 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1258 return;
1259 }
1260 #endif
1261
1262 /* Don't do anything with direct references to volatile memory, but
1263 generate code to handle other side effects. */
1264 if (!MEM_P (op0) && side_effects_p (op0))
1265 emit_insn (op0);
1266 }
1267
1268 /* Get a MEM rtx for expression EXP which is the address of an operand
1269 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1270 the maximum length of the block of memory that might be accessed or
1271 NULL if unknown. */
1272
1273 static rtx
1274 get_memory_rtx (tree exp, tree len)
1275 {
1276 tree orig_exp = exp;
1277 rtx addr, mem;
1278
1279 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1280 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1281 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1282 exp = TREE_OPERAND (exp, 0);
1283
1284 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1285 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1286
1287 /* Get an expression we can use to find the attributes to assign to MEM.
1288 First remove any nops. */
1289 while (CONVERT_EXPR_P (exp)
1290 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1291 exp = TREE_OPERAND (exp, 0);
1292
1293 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1294 (as builtin stringops may alias with anything). */
1295 exp = fold_build2 (MEM_REF,
1296 build_array_type (char_type_node,
1297 build_range_type (sizetype,
1298 size_one_node, len)),
1299 exp, build_int_cst (ptr_type_node, 0));
1300
1301 /* If the MEM_REF has no acceptable address, try to get the base object
1302 from the original address we got, and build an all-aliasing
1303 unknown-sized access to that one. */
1304 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1305 set_mem_attributes (mem, exp, 0);
1306 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1307 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1308 0))))
1309 {
1310 exp = build_fold_addr_expr (exp);
1311 exp = fold_build2 (MEM_REF,
1312 build_array_type (char_type_node,
1313 build_range_type (sizetype,
1314 size_zero_node,
1315 NULL)),
1316 exp, build_int_cst (ptr_type_node, 0));
1317 set_mem_attributes (mem, exp, 0);
1318 }
1319 set_mem_alias_set (mem, 0);
1320 return mem;
1321 }
1322 \f
1323 /* Built-in functions to perform an untyped call and return. */
1324
1325 #define apply_args_mode \
1326 (this_target_builtins->x_apply_args_mode)
1327 #define apply_result_mode \
1328 (this_target_builtins->x_apply_result_mode)
1329
1330 /* Return the size required for the block returned by __builtin_apply_args,
1331 and initialize apply_args_mode. */
1332
1333 static int
1334 apply_args_size (void)
1335 {
1336 static int size = -1;
1337 int align;
1338 unsigned int regno;
1339 enum machine_mode mode;
1340
1341 /* The values computed by this function never change. */
1342 if (size < 0)
1343 {
1344 /* The first value is the incoming arg-pointer. */
1345 size = GET_MODE_SIZE (Pmode);
1346
1347 /* The second value is the structure value address unless this is
1348 passed as an "invisible" first argument. */
1349 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1350 size += GET_MODE_SIZE (Pmode);
1351
1352 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1353 if (FUNCTION_ARG_REGNO_P (regno))
1354 {
1355 mode = targetm.calls.get_raw_arg_mode (regno);
1356
1357 gcc_assert (mode != VOIDmode);
1358
1359 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1360 if (size % align != 0)
1361 size = CEIL (size, align) * align;
1362 size += GET_MODE_SIZE (mode);
1363 apply_args_mode[regno] = mode;
1364 }
1365 else
1366 {
1367 apply_args_mode[regno] = VOIDmode;
1368 }
1369 }
1370 return size;
1371 }
1372
1373 /* Return the size required for the block returned by __builtin_apply,
1374 and initialize apply_result_mode. */
1375
1376 static int
1377 apply_result_size (void)
1378 {
1379 static int size = -1;
1380 int align, regno;
1381 enum machine_mode mode;
1382
1383 /* The values computed by this function never change. */
1384 if (size < 0)
1385 {
1386 size = 0;
1387
1388 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1389 if (targetm.calls.function_value_regno_p (regno))
1390 {
1391 mode = targetm.calls.get_raw_result_mode (regno);
1392
1393 gcc_assert (mode != VOIDmode);
1394
1395 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1396 if (size % align != 0)
1397 size = CEIL (size, align) * align;
1398 size += GET_MODE_SIZE (mode);
1399 apply_result_mode[regno] = mode;
1400 }
1401 else
1402 apply_result_mode[regno] = VOIDmode;
1403
1404 /* Allow targets that use untyped_call and untyped_return to override
1405 the size so that machine-specific information can be stored here. */
1406 #ifdef APPLY_RESULT_SIZE
1407 size = APPLY_RESULT_SIZE;
1408 #endif
1409 }
1410 return size;
1411 }
1412
1413 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1414 /* Create a vector describing the result block RESULT. If SAVEP is true,
1415 the result block is used to save the values; otherwise it is used to
1416 restore the values. */
1417
1418 static rtx
1419 result_vector (int savep, rtx result)
1420 {
1421 int regno, size, align, nelts;
1422 enum machine_mode mode;
1423 rtx reg, mem;
1424 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1425
1426 size = nelts = 0;
1427 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1428 if ((mode = apply_result_mode[regno]) != VOIDmode)
1429 {
1430 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1431 if (size % align != 0)
1432 size = CEIL (size, align) * align;
1433 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1434 mem = adjust_address (result, mode, size);
1435 savevec[nelts++] = (savep
1436 ? gen_rtx_SET (VOIDmode, mem, reg)
1437 : gen_rtx_SET (VOIDmode, reg, mem));
1438 size += GET_MODE_SIZE (mode);
1439 }
1440 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1441 }
1442 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1443
1444 /* Save the state required to perform an untyped call with the same
1445 arguments as were passed to the current function. */
1446
1447 static rtx
1448 expand_builtin_apply_args_1 (void)
1449 {
1450 rtx registers, tem;
1451 int size, align, regno;
1452 enum machine_mode mode;
1453 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1454
1455 /* Create a block where the arg-pointer, structure value address,
1456 and argument registers can be saved. */
1457 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1458
1459 /* Walk past the arg-pointer and structure value address. */
1460 size = GET_MODE_SIZE (Pmode);
1461 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1462 size += GET_MODE_SIZE (Pmode);
1463
1464 /* Save each register used in calling a function to the block. */
1465 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1466 if ((mode = apply_args_mode[regno]) != VOIDmode)
1467 {
1468 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1469 if (size % align != 0)
1470 size = CEIL (size, align) * align;
1471
1472 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1473
1474 emit_move_insn (adjust_address (registers, mode, size), tem);
1475 size += GET_MODE_SIZE (mode);
1476 }
1477
1478 /* Save the arg pointer to the block. */
1479 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1480 #ifdef STACK_GROWS_DOWNWARD
1481 /* We need the pointer as the caller actually passed them to us, not
1482 as we might have pretended they were passed. Make sure it's a valid
1483 operand, as emit_move_insn isn't expected to handle a PLUS. */
1484 tem
1485 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1486 NULL_RTX);
1487 #endif
1488 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1489
1490 size = GET_MODE_SIZE (Pmode);
1491
1492 /* Save the structure value address unless this is passed as an
1493 "invisible" first argument. */
1494 if (struct_incoming_value)
1495 {
1496 emit_move_insn (adjust_address (registers, Pmode, size),
1497 copy_to_reg (struct_incoming_value));
1498 size += GET_MODE_SIZE (Pmode);
1499 }
1500
1501 /* Return the address of the block. */
1502 return copy_addr_to_reg (XEXP (registers, 0));
1503 }
1504
1505 /* __builtin_apply_args returns block of memory allocated on
1506 the stack into which is stored the arg pointer, structure
1507 value address, static chain, and all the registers that might
1508 possibly be used in performing a function call. The code is
1509 moved to the start of the function so the incoming values are
1510 saved. */
1511
1512 static rtx
1513 expand_builtin_apply_args (void)
1514 {
1515 /* Don't do __builtin_apply_args more than once in a function.
1516 Save the result of the first call and reuse it. */
1517 if (apply_args_value != 0)
1518 return apply_args_value;
1519 {
1520 /* When this function is called, it means that registers must be
1521 saved on entry to this function. So we migrate the
1522 call to the first insn of this function. */
1523 rtx temp;
1524 rtx seq;
1525
1526 start_sequence ();
1527 temp = expand_builtin_apply_args_1 ();
1528 seq = get_insns ();
1529 end_sequence ();
1530
1531 apply_args_value = temp;
1532
1533 /* Put the insns after the NOTE that starts the function.
1534 If this is inside a start_sequence, make the outer-level insn
1535 chain current, so the code is placed at the start of the
1536 function. If internal_arg_pointer is a non-virtual pseudo,
1537 it needs to be placed after the function that initializes
1538 that pseudo. */
1539 push_topmost_sequence ();
1540 if (REG_P (crtl->args.internal_arg_pointer)
1541 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1542 emit_insn_before (seq, parm_birth_insn);
1543 else
1544 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1545 pop_topmost_sequence ();
1546 return temp;
1547 }
1548 }
1549
1550 /* Perform an untyped call and save the state required to perform an
1551 untyped return of whatever value was returned by the given function. */
1552
1553 static rtx
1554 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1555 {
1556 int size, align, regno;
1557 enum machine_mode mode;
1558 rtx incoming_args, result, reg, dest, src, call_insn;
1559 rtx old_stack_level = 0;
1560 rtx call_fusage = 0;
1561 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1562
1563 arguments = convert_memory_address (Pmode, arguments);
1564
1565 /* Create a block where the return registers can be saved. */
1566 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1567
1568 /* Fetch the arg pointer from the ARGUMENTS block. */
1569 incoming_args = gen_reg_rtx (Pmode);
1570 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1571 #ifndef STACK_GROWS_DOWNWARD
1572 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1573 incoming_args, 0, OPTAB_LIB_WIDEN);
1574 #endif
1575
1576 /* Push a new argument block and copy the arguments. Do not allow
1577 the (potential) memcpy call below to interfere with our stack
1578 manipulations. */
1579 do_pending_stack_adjust ();
1580 NO_DEFER_POP;
1581
1582 /* Save the stack with nonlocal if available. */
1583 #ifdef HAVE_save_stack_nonlocal
1584 if (HAVE_save_stack_nonlocal)
1585 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1586 else
1587 #endif
1588 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1589
1590 /* Allocate a block of memory onto the stack and copy the memory
1591 arguments to the outgoing arguments address. We can pass TRUE
1592 as the 4th argument because we just saved the stack pointer
1593 and will restore it right after the call. */
1594 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1595
1596 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1597 may have already set current_function_calls_alloca to true.
1598 current_function_calls_alloca won't be set if argsize is zero,
1599 so we have to guarantee need_drap is true here. */
1600 if (SUPPORTS_STACK_ALIGNMENT)
1601 crtl->need_drap = true;
1602
1603 dest = virtual_outgoing_args_rtx;
1604 #ifndef STACK_GROWS_DOWNWARD
1605 if (CONST_INT_P (argsize))
1606 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1607 else
1608 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1609 #endif
1610 dest = gen_rtx_MEM (BLKmode, dest);
1611 set_mem_align (dest, PARM_BOUNDARY);
1612 src = gen_rtx_MEM (BLKmode, incoming_args);
1613 set_mem_align (src, PARM_BOUNDARY);
1614 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1615
1616 /* Refer to the argument block. */
1617 apply_args_size ();
1618 arguments = gen_rtx_MEM (BLKmode, arguments);
1619 set_mem_align (arguments, PARM_BOUNDARY);
1620
1621 /* Walk past the arg-pointer and structure value address. */
1622 size = GET_MODE_SIZE (Pmode);
1623 if (struct_value)
1624 size += GET_MODE_SIZE (Pmode);
1625
1626 /* Restore each of the registers previously saved. Make USE insns
1627 for each of these registers for use in making the call. */
1628 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1629 if ((mode = apply_args_mode[regno]) != VOIDmode)
1630 {
1631 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1632 if (size % align != 0)
1633 size = CEIL (size, align) * align;
1634 reg = gen_rtx_REG (mode, regno);
1635 emit_move_insn (reg, adjust_address (arguments, mode, size));
1636 use_reg (&call_fusage, reg);
1637 size += GET_MODE_SIZE (mode);
1638 }
1639
1640 /* Restore the structure value address unless this is passed as an
1641 "invisible" first argument. */
1642 size = GET_MODE_SIZE (Pmode);
1643 if (struct_value)
1644 {
1645 rtx value = gen_reg_rtx (Pmode);
1646 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1647 emit_move_insn (struct_value, value);
1648 if (REG_P (struct_value))
1649 use_reg (&call_fusage, struct_value);
1650 size += GET_MODE_SIZE (Pmode);
1651 }
1652
1653 /* All arguments and registers used for the call are set up by now! */
1654 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1655
1656 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1657 and we don't want to load it into a register as an optimization,
1658 because prepare_call_address already did it if it should be done. */
1659 if (GET_CODE (function) != SYMBOL_REF)
1660 function = memory_address (FUNCTION_MODE, function);
1661
1662 /* Generate the actual call instruction and save the return value. */
1663 #ifdef HAVE_untyped_call
1664 if (HAVE_untyped_call)
1665 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1666 result, result_vector (1, result)));
1667 else
1668 #endif
1669 #ifdef HAVE_call_value
1670 if (HAVE_call_value)
1671 {
1672 rtx valreg = 0;
1673
1674 /* Locate the unique return register. It is not possible to
1675 express a call that sets more than one return register using
1676 call_value; use untyped_call for that. In fact, untyped_call
1677 only needs to save the return registers in the given block. */
1678 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1679 if ((mode = apply_result_mode[regno]) != VOIDmode)
1680 {
1681 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1682
1683 valreg = gen_rtx_REG (mode, regno);
1684 }
1685
1686 emit_call_insn (GEN_CALL_VALUE (valreg,
1687 gen_rtx_MEM (FUNCTION_MODE, function),
1688 const0_rtx, NULL_RTX, const0_rtx));
1689
1690 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1691 }
1692 else
1693 #endif
1694 gcc_unreachable ();
1695
1696 /* Find the CALL insn we just emitted, and attach the register usage
1697 information. */
1698 call_insn = last_call_insn ();
1699 add_function_usage_to (call_insn, call_fusage);
1700
1701 /* Restore the stack. */
1702 #ifdef HAVE_save_stack_nonlocal
1703 if (HAVE_save_stack_nonlocal)
1704 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1705 else
1706 #endif
1707 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1708 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1709
1710 OK_DEFER_POP;
1711
1712 /* Return the address of the result block. */
1713 result = copy_addr_to_reg (XEXP (result, 0));
1714 return convert_memory_address (ptr_mode, result);
1715 }
1716
1717 /* Perform an untyped return. */
1718
1719 static void
1720 expand_builtin_return (rtx result)
1721 {
1722 int size, align, regno;
1723 enum machine_mode mode;
1724 rtx reg;
1725 rtx call_fusage = 0;
1726
1727 result = convert_memory_address (Pmode, result);
1728
1729 apply_result_size ();
1730 result = gen_rtx_MEM (BLKmode, result);
1731
1732 #ifdef HAVE_untyped_return
1733 if (HAVE_untyped_return)
1734 {
1735 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1736 emit_barrier ();
1737 return;
1738 }
1739 #endif
1740
1741 /* Restore the return value and note that each value is used. */
1742 size = 0;
1743 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1744 if ((mode = apply_result_mode[regno]) != VOIDmode)
1745 {
1746 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1747 if (size % align != 0)
1748 size = CEIL (size, align) * align;
1749 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1750 emit_move_insn (reg, adjust_address (result, mode, size));
1751
1752 push_to_sequence (call_fusage);
1753 emit_use (reg);
1754 call_fusage = get_insns ();
1755 end_sequence ();
1756 size += GET_MODE_SIZE (mode);
1757 }
1758
1759 /* Put the USE insns before the return. */
1760 emit_insn (call_fusage);
1761
1762 /* Return whatever values was restored by jumping directly to the end
1763 of the function. */
1764 expand_naked_return ();
1765 }
1766
1767 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1768
1769 static enum type_class
1770 type_to_class (tree type)
1771 {
1772 switch (TREE_CODE (type))
1773 {
1774 case VOID_TYPE: return void_type_class;
1775 case INTEGER_TYPE: return integer_type_class;
1776 case ENUMERAL_TYPE: return enumeral_type_class;
1777 case BOOLEAN_TYPE: return boolean_type_class;
1778 case POINTER_TYPE: return pointer_type_class;
1779 case REFERENCE_TYPE: return reference_type_class;
1780 case OFFSET_TYPE: return offset_type_class;
1781 case REAL_TYPE: return real_type_class;
1782 case COMPLEX_TYPE: return complex_type_class;
1783 case FUNCTION_TYPE: return function_type_class;
1784 case METHOD_TYPE: return method_type_class;
1785 case RECORD_TYPE: return record_type_class;
1786 case UNION_TYPE:
1787 case QUAL_UNION_TYPE: return union_type_class;
1788 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1789 ? string_type_class : array_type_class);
1790 case LANG_TYPE: return lang_type_class;
1791 default: return no_type_class;
1792 }
1793 }
1794
1795 /* Expand a call EXP to __builtin_classify_type. */
1796
1797 static rtx
1798 expand_builtin_classify_type (tree exp)
1799 {
1800 if (call_expr_nargs (exp))
1801 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1802 return GEN_INT (no_type_class);
1803 }
1804
1805 /* This helper macro, meant to be used in mathfn_built_in below,
1806 determines which among a set of three builtin math functions is
1807 appropriate for a given type mode. The `F' and `L' cases are
1808 automatically generated from the `double' case. */
1809 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1810 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1811 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1812 fcodel = BUILT_IN_MATHFN##L ; break;
1813 /* Similar to above, but appends _R after any F/L suffix. */
1814 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1815 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1816 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1817 fcodel = BUILT_IN_MATHFN##L_R ; break;
1818
1819 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1820 if available. If IMPLICIT is true use the implicit builtin declaration,
1821 otherwise use the explicit declaration. If we can't do the conversion,
1822 return zero. */
1823
1824 static tree
1825 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1826 {
1827 enum built_in_function fcode, fcodef, fcodel, fcode2;
1828
1829 switch (fn)
1830 {
1831 CASE_MATHFN (BUILT_IN_ACOS)
1832 CASE_MATHFN (BUILT_IN_ACOSH)
1833 CASE_MATHFN (BUILT_IN_ASIN)
1834 CASE_MATHFN (BUILT_IN_ASINH)
1835 CASE_MATHFN (BUILT_IN_ATAN)
1836 CASE_MATHFN (BUILT_IN_ATAN2)
1837 CASE_MATHFN (BUILT_IN_ATANH)
1838 CASE_MATHFN (BUILT_IN_CBRT)
1839 CASE_MATHFN (BUILT_IN_CEIL)
1840 CASE_MATHFN (BUILT_IN_CEXPI)
1841 CASE_MATHFN (BUILT_IN_COPYSIGN)
1842 CASE_MATHFN (BUILT_IN_COS)
1843 CASE_MATHFN (BUILT_IN_COSH)
1844 CASE_MATHFN (BUILT_IN_DREM)
1845 CASE_MATHFN (BUILT_IN_ERF)
1846 CASE_MATHFN (BUILT_IN_ERFC)
1847 CASE_MATHFN (BUILT_IN_EXP)
1848 CASE_MATHFN (BUILT_IN_EXP10)
1849 CASE_MATHFN (BUILT_IN_EXP2)
1850 CASE_MATHFN (BUILT_IN_EXPM1)
1851 CASE_MATHFN (BUILT_IN_FABS)
1852 CASE_MATHFN (BUILT_IN_FDIM)
1853 CASE_MATHFN (BUILT_IN_FLOOR)
1854 CASE_MATHFN (BUILT_IN_FMA)
1855 CASE_MATHFN (BUILT_IN_FMAX)
1856 CASE_MATHFN (BUILT_IN_FMIN)
1857 CASE_MATHFN (BUILT_IN_FMOD)
1858 CASE_MATHFN (BUILT_IN_FREXP)
1859 CASE_MATHFN (BUILT_IN_GAMMA)
1860 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1861 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1862 CASE_MATHFN (BUILT_IN_HYPOT)
1863 CASE_MATHFN (BUILT_IN_ILOGB)
1864 CASE_MATHFN (BUILT_IN_ICEIL)
1865 CASE_MATHFN (BUILT_IN_IFLOOR)
1866 CASE_MATHFN (BUILT_IN_INF)
1867 CASE_MATHFN (BUILT_IN_IRINT)
1868 CASE_MATHFN (BUILT_IN_IROUND)
1869 CASE_MATHFN (BUILT_IN_ISINF)
1870 CASE_MATHFN (BUILT_IN_J0)
1871 CASE_MATHFN (BUILT_IN_J1)
1872 CASE_MATHFN (BUILT_IN_JN)
1873 CASE_MATHFN (BUILT_IN_LCEIL)
1874 CASE_MATHFN (BUILT_IN_LDEXP)
1875 CASE_MATHFN (BUILT_IN_LFLOOR)
1876 CASE_MATHFN (BUILT_IN_LGAMMA)
1877 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1878 CASE_MATHFN (BUILT_IN_LLCEIL)
1879 CASE_MATHFN (BUILT_IN_LLFLOOR)
1880 CASE_MATHFN (BUILT_IN_LLRINT)
1881 CASE_MATHFN (BUILT_IN_LLROUND)
1882 CASE_MATHFN (BUILT_IN_LOG)
1883 CASE_MATHFN (BUILT_IN_LOG10)
1884 CASE_MATHFN (BUILT_IN_LOG1P)
1885 CASE_MATHFN (BUILT_IN_LOG2)
1886 CASE_MATHFN (BUILT_IN_LOGB)
1887 CASE_MATHFN (BUILT_IN_LRINT)
1888 CASE_MATHFN (BUILT_IN_LROUND)
1889 CASE_MATHFN (BUILT_IN_MODF)
1890 CASE_MATHFN (BUILT_IN_NAN)
1891 CASE_MATHFN (BUILT_IN_NANS)
1892 CASE_MATHFN (BUILT_IN_NEARBYINT)
1893 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1894 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1895 CASE_MATHFN (BUILT_IN_POW)
1896 CASE_MATHFN (BUILT_IN_POWI)
1897 CASE_MATHFN (BUILT_IN_POW10)
1898 CASE_MATHFN (BUILT_IN_REMAINDER)
1899 CASE_MATHFN (BUILT_IN_REMQUO)
1900 CASE_MATHFN (BUILT_IN_RINT)
1901 CASE_MATHFN (BUILT_IN_ROUND)
1902 CASE_MATHFN (BUILT_IN_SCALB)
1903 CASE_MATHFN (BUILT_IN_SCALBLN)
1904 CASE_MATHFN (BUILT_IN_SCALBN)
1905 CASE_MATHFN (BUILT_IN_SIGNBIT)
1906 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1907 CASE_MATHFN (BUILT_IN_SIN)
1908 CASE_MATHFN (BUILT_IN_SINCOS)
1909 CASE_MATHFN (BUILT_IN_SINH)
1910 CASE_MATHFN (BUILT_IN_SQRT)
1911 CASE_MATHFN (BUILT_IN_TAN)
1912 CASE_MATHFN (BUILT_IN_TANH)
1913 CASE_MATHFN (BUILT_IN_TGAMMA)
1914 CASE_MATHFN (BUILT_IN_TRUNC)
1915 CASE_MATHFN (BUILT_IN_Y0)
1916 CASE_MATHFN (BUILT_IN_Y1)
1917 CASE_MATHFN (BUILT_IN_YN)
1918
1919 default:
1920 return NULL_TREE;
1921 }
1922
1923 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1924 fcode2 = fcode;
1925 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1926 fcode2 = fcodef;
1927 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1928 fcode2 = fcodel;
1929 else
1930 return NULL_TREE;
1931
1932 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1933 return NULL_TREE;
1934
1935 return builtin_decl_explicit (fcode2);
1936 }
1937
1938 /* Like mathfn_built_in_1(), but always use the implicit array. */
1939
1940 tree
1941 mathfn_built_in (tree type, enum built_in_function fn)
1942 {
1943 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1944 }
1945
1946 /* If errno must be maintained, expand the RTL to check if the result,
1947 TARGET, of a built-in function call, EXP, is NaN, and if so set
1948 errno to EDOM. */
1949
1950 static void
1951 expand_errno_check (tree exp, rtx target)
1952 {
1953 rtx lab = gen_label_rtx ();
1954
1955 /* Test the result; if it is NaN, set errno=EDOM because
1956 the argument was not in the domain. */
1957 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1958 NULL_RTX, NULL_RTX, lab,
1959 /* The jump is very likely. */
1960 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1961
1962 #ifdef TARGET_EDOM
1963 /* If this built-in doesn't throw an exception, set errno directly. */
1964 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1965 {
1966 #ifdef GEN_ERRNO_RTX
1967 rtx errno_rtx = GEN_ERRNO_RTX;
1968 #else
1969 rtx errno_rtx
1970 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1971 #endif
1972 emit_move_insn (errno_rtx,
1973 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1974 emit_label (lab);
1975 return;
1976 }
1977 #endif
1978
1979 /* Make sure the library call isn't expanded as a tail call. */
1980 CALL_EXPR_TAILCALL (exp) = 0;
1981
1982 /* We can't set errno=EDOM directly; let the library call do it.
1983 Pop the arguments right away in case the call gets deleted. */
1984 NO_DEFER_POP;
1985 expand_call (exp, target, 0);
1986 OK_DEFER_POP;
1987 emit_label (lab);
1988 }
1989
1990 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1991 Return NULL_RTX if a normal call should be emitted rather than expanding
1992 the function in-line. EXP is the expression that is a call to the builtin
1993 function; if convenient, the result should be placed in TARGET.
1994 SUBTARGET may be used as the target for computing one of EXP's operands. */
1995
1996 static rtx
1997 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1998 {
1999 optab builtin_optab;
2000 rtx op0, insns;
2001 tree fndecl = get_callee_fndecl (exp);
2002 enum machine_mode mode;
2003 bool errno_set = false;
2004 bool try_widening = false;
2005 tree arg;
2006
2007 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2008 return NULL_RTX;
2009
2010 arg = CALL_EXPR_ARG (exp, 0);
2011
2012 switch (DECL_FUNCTION_CODE (fndecl))
2013 {
2014 CASE_FLT_FN (BUILT_IN_SQRT):
2015 errno_set = ! tree_expr_nonnegative_p (arg);
2016 try_widening = true;
2017 builtin_optab = sqrt_optab;
2018 break;
2019 CASE_FLT_FN (BUILT_IN_EXP):
2020 errno_set = true; builtin_optab = exp_optab; break;
2021 CASE_FLT_FN (BUILT_IN_EXP10):
2022 CASE_FLT_FN (BUILT_IN_POW10):
2023 errno_set = true; builtin_optab = exp10_optab; break;
2024 CASE_FLT_FN (BUILT_IN_EXP2):
2025 errno_set = true; builtin_optab = exp2_optab; break;
2026 CASE_FLT_FN (BUILT_IN_EXPM1):
2027 errno_set = true; builtin_optab = expm1_optab; break;
2028 CASE_FLT_FN (BUILT_IN_LOGB):
2029 errno_set = true; builtin_optab = logb_optab; break;
2030 CASE_FLT_FN (BUILT_IN_LOG):
2031 errno_set = true; builtin_optab = log_optab; break;
2032 CASE_FLT_FN (BUILT_IN_LOG10):
2033 errno_set = true; builtin_optab = log10_optab; break;
2034 CASE_FLT_FN (BUILT_IN_LOG2):
2035 errno_set = true; builtin_optab = log2_optab; break;
2036 CASE_FLT_FN (BUILT_IN_LOG1P):
2037 errno_set = true; builtin_optab = log1p_optab; break;
2038 CASE_FLT_FN (BUILT_IN_ASIN):
2039 builtin_optab = asin_optab; break;
2040 CASE_FLT_FN (BUILT_IN_ACOS):
2041 builtin_optab = acos_optab; break;
2042 CASE_FLT_FN (BUILT_IN_TAN):
2043 builtin_optab = tan_optab; break;
2044 CASE_FLT_FN (BUILT_IN_ATAN):
2045 builtin_optab = atan_optab; break;
2046 CASE_FLT_FN (BUILT_IN_FLOOR):
2047 builtin_optab = floor_optab; break;
2048 CASE_FLT_FN (BUILT_IN_CEIL):
2049 builtin_optab = ceil_optab; break;
2050 CASE_FLT_FN (BUILT_IN_TRUNC):
2051 builtin_optab = btrunc_optab; break;
2052 CASE_FLT_FN (BUILT_IN_ROUND):
2053 builtin_optab = round_optab; break;
2054 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2055 builtin_optab = nearbyint_optab;
2056 if (flag_trapping_math)
2057 break;
2058 /* Else fallthrough and expand as rint. */
2059 CASE_FLT_FN (BUILT_IN_RINT):
2060 builtin_optab = rint_optab; break;
2061 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2062 builtin_optab = significand_optab; break;
2063 default:
2064 gcc_unreachable ();
2065 }
2066
2067 /* Make a suitable register to place result in. */
2068 mode = TYPE_MODE (TREE_TYPE (exp));
2069
2070 if (! flag_errno_math || ! HONOR_NANS (mode))
2071 errno_set = false;
2072
2073 /* Before working hard, check whether the instruction is available, but try
2074 to widen the mode for specific operations. */
2075 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2076 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2077 && (!errno_set || !optimize_insn_for_size_p ()))
2078 {
2079 rtx result = gen_reg_rtx (mode);
2080
2081 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2082 need to expand the argument again. This way, we will not perform
2083 side-effects more the once. */
2084 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2085
2086 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2087
2088 start_sequence ();
2089
2090 /* Compute into RESULT.
2091 Set RESULT to wherever the result comes back. */
2092 result = expand_unop (mode, builtin_optab, op0, result, 0);
2093
2094 if (result != 0)
2095 {
2096 if (errno_set)
2097 expand_errno_check (exp, result);
2098
2099 /* Output the entire sequence. */
2100 insns = get_insns ();
2101 end_sequence ();
2102 emit_insn (insns);
2103 return result;
2104 }
2105
2106 /* If we were unable to expand via the builtin, stop the sequence
2107 (without outputting the insns) and call to the library function
2108 with the stabilized argument list. */
2109 end_sequence ();
2110 }
2111
2112 return expand_call (exp, target, target == const0_rtx);
2113 }
2114
2115 /* Expand a call to the builtin binary math functions (pow and atan2).
2116 Return NULL_RTX if a normal call should be emitted rather than expanding the
2117 function in-line. EXP is the expression that is a call to the builtin
2118 function; if convenient, the result should be placed in TARGET.
2119 SUBTARGET may be used as the target for computing one of EXP's
2120 operands. */
2121
2122 static rtx
2123 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2124 {
2125 optab builtin_optab;
2126 rtx op0, op1, insns, result;
2127 int op1_type = REAL_TYPE;
2128 tree fndecl = get_callee_fndecl (exp);
2129 tree arg0, arg1;
2130 enum machine_mode mode;
2131 bool errno_set = true;
2132
2133 switch (DECL_FUNCTION_CODE (fndecl))
2134 {
2135 CASE_FLT_FN (BUILT_IN_SCALBN):
2136 CASE_FLT_FN (BUILT_IN_SCALBLN):
2137 CASE_FLT_FN (BUILT_IN_LDEXP):
2138 op1_type = INTEGER_TYPE;
2139 default:
2140 break;
2141 }
2142
2143 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2144 return NULL_RTX;
2145
2146 arg0 = CALL_EXPR_ARG (exp, 0);
2147 arg1 = CALL_EXPR_ARG (exp, 1);
2148
2149 switch (DECL_FUNCTION_CODE (fndecl))
2150 {
2151 CASE_FLT_FN (BUILT_IN_POW):
2152 builtin_optab = pow_optab; break;
2153 CASE_FLT_FN (BUILT_IN_ATAN2):
2154 builtin_optab = atan2_optab; break;
2155 CASE_FLT_FN (BUILT_IN_SCALB):
2156 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2157 return 0;
2158 builtin_optab = scalb_optab; break;
2159 CASE_FLT_FN (BUILT_IN_SCALBN):
2160 CASE_FLT_FN (BUILT_IN_SCALBLN):
2161 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2162 return 0;
2163 /* Fall through... */
2164 CASE_FLT_FN (BUILT_IN_LDEXP):
2165 builtin_optab = ldexp_optab; break;
2166 CASE_FLT_FN (BUILT_IN_FMOD):
2167 builtin_optab = fmod_optab; break;
2168 CASE_FLT_FN (BUILT_IN_REMAINDER):
2169 CASE_FLT_FN (BUILT_IN_DREM):
2170 builtin_optab = remainder_optab; break;
2171 default:
2172 gcc_unreachable ();
2173 }
2174
2175 /* Make a suitable register to place result in. */
2176 mode = TYPE_MODE (TREE_TYPE (exp));
2177
2178 /* Before working hard, check whether the instruction is available. */
2179 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2180 return NULL_RTX;
2181
2182 result = gen_reg_rtx (mode);
2183
2184 if (! flag_errno_math || ! HONOR_NANS (mode))
2185 errno_set = false;
2186
2187 if (errno_set && optimize_insn_for_size_p ())
2188 return 0;
2189
2190 /* Always stabilize the argument list. */
2191 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2192 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2193
2194 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2195 op1 = expand_normal (arg1);
2196
2197 start_sequence ();
2198
2199 /* Compute into RESULT.
2200 Set RESULT to wherever the result comes back. */
2201 result = expand_binop (mode, builtin_optab, op0, op1,
2202 result, 0, OPTAB_DIRECT);
2203
2204 /* If we were unable to expand via the builtin, stop the sequence
2205 (without outputting the insns) and call to the library function
2206 with the stabilized argument list. */
2207 if (result == 0)
2208 {
2209 end_sequence ();
2210 return expand_call (exp, target, target == const0_rtx);
2211 }
2212
2213 if (errno_set)
2214 expand_errno_check (exp, result);
2215
2216 /* Output the entire sequence. */
2217 insns = get_insns ();
2218 end_sequence ();
2219 emit_insn (insns);
2220
2221 return result;
2222 }
2223
2224 /* Expand a call to the builtin trinary math functions (fma).
2225 Return NULL_RTX if a normal call should be emitted rather than expanding the
2226 function in-line. EXP is the expression that is a call to the builtin
2227 function; if convenient, the result should be placed in TARGET.
2228 SUBTARGET may be used as the target for computing one of EXP's
2229 operands. */
2230
2231 static rtx
2232 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2233 {
2234 optab builtin_optab;
2235 rtx op0, op1, op2, insns, result;
2236 tree fndecl = get_callee_fndecl (exp);
2237 tree arg0, arg1, arg2;
2238 enum machine_mode mode;
2239
2240 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2241 return NULL_RTX;
2242
2243 arg0 = CALL_EXPR_ARG (exp, 0);
2244 arg1 = CALL_EXPR_ARG (exp, 1);
2245 arg2 = CALL_EXPR_ARG (exp, 2);
2246
2247 switch (DECL_FUNCTION_CODE (fndecl))
2248 {
2249 CASE_FLT_FN (BUILT_IN_FMA):
2250 builtin_optab = fma_optab; break;
2251 default:
2252 gcc_unreachable ();
2253 }
2254
2255 /* Make a suitable register to place result in. */
2256 mode = TYPE_MODE (TREE_TYPE (exp));
2257
2258 /* Before working hard, check whether the instruction is available. */
2259 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2260 return NULL_RTX;
2261
2262 result = gen_reg_rtx (mode);
2263
2264 /* Always stabilize the argument list. */
2265 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2266 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2267 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2268
2269 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2270 op1 = expand_normal (arg1);
2271 op2 = expand_normal (arg2);
2272
2273 start_sequence ();
2274
2275 /* Compute into RESULT.
2276 Set RESULT to wherever the result comes back. */
2277 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2278 result, 0);
2279
2280 /* If we were unable to expand via the builtin, stop the sequence
2281 (without outputting the insns) and call to the library function
2282 with the stabilized argument list. */
2283 if (result == 0)
2284 {
2285 end_sequence ();
2286 return expand_call (exp, target, target == const0_rtx);
2287 }
2288
2289 /* Output the entire sequence. */
2290 insns = get_insns ();
2291 end_sequence ();
2292 emit_insn (insns);
2293
2294 return result;
2295 }
2296
2297 /* Expand a call to the builtin sin and cos math functions.
2298 Return NULL_RTX if a normal call should be emitted rather than expanding the
2299 function in-line. EXP is the expression that is a call to the builtin
2300 function; if convenient, the result should be placed in TARGET.
2301 SUBTARGET may be used as the target for computing one of EXP's
2302 operands. */
2303
2304 static rtx
2305 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2306 {
2307 optab builtin_optab;
2308 rtx op0, insns;
2309 tree fndecl = get_callee_fndecl (exp);
2310 enum machine_mode mode;
2311 tree arg;
2312
2313 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2314 return NULL_RTX;
2315
2316 arg = CALL_EXPR_ARG (exp, 0);
2317
2318 switch (DECL_FUNCTION_CODE (fndecl))
2319 {
2320 CASE_FLT_FN (BUILT_IN_SIN):
2321 CASE_FLT_FN (BUILT_IN_COS):
2322 builtin_optab = sincos_optab; break;
2323 default:
2324 gcc_unreachable ();
2325 }
2326
2327 /* Make a suitable register to place result in. */
2328 mode = TYPE_MODE (TREE_TYPE (exp));
2329
2330 /* Check if sincos insn is available, otherwise fallback
2331 to sin or cos insn. */
2332 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2333 switch (DECL_FUNCTION_CODE (fndecl))
2334 {
2335 CASE_FLT_FN (BUILT_IN_SIN):
2336 builtin_optab = sin_optab; break;
2337 CASE_FLT_FN (BUILT_IN_COS):
2338 builtin_optab = cos_optab; break;
2339 default:
2340 gcc_unreachable ();
2341 }
2342
2343 /* Before working hard, check whether the instruction is available. */
2344 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2345 {
2346 rtx result = gen_reg_rtx (mode);
2347
2348 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2349 need to expand the argument again. This way, we will not perform
2350 side-effects more the once. */
2351 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2352
2353 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2354
2355 start_sequence ();
2356
2357 /* Compute into RESULT.
2358 Set RESULT to wherever the result comes back. */
2359 if (builtin_optab == sincos_optab)
2360 {
2361 int ok;
2362
2363 switch (DECL_FUNCTION_CODE (fndecl))
2364 {
2365 CASE_FLT_FN (BUILT_IN_SIN):
2366 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2367 break;
2368 CASE_FLT_FN (BUILT_IN_COS):
2369 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2370 break;
2371 default:
2372 gcc_unreachable ();
2373 }
2374 gcc_assert (ok);
2375 }
2376 else
2377 result = expand_unop (mode, builtin_optab, op0, result, 0);
2378
2379 if (result != 0)
2380 {
2381 /* Output the entire sequence. */
2382 insns = get_insns ();
2383 end_sequence ();
2384 emit_insn (insns);
2385 return result;
2386 }
2387
2388 /* If we were unable to expand via the builtin, stop the sequence
2389 (without outputting the insns) and call to the library function
2390 with the stabilized argument list. */
2391 end_sequence ();
2392 }
2393
2394 return expand_call (exp, target, target == const0_rtx);
2395 }
2396
2397 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2398 return an RTL instruction code that implements the functionality.
2399 If that isn't possible or available return CODE_FOR_nothing. */
2400
2401 static enum insn_code
2402 interclass_mathfn_icode (tree arg, tree fndecl)
2403 {
2404 bool errno_set = false;
2405 optab builtin_optab = unknown_optab;
2406 enum machine_mode mode;
2407
2408 switch (DECL_FUNCTION_CODE (fndecl))
2409 {
2410 CASE_FLT_FN (BUILT_IN_ILOGB):
2411 errno_set = true; builtin_optab = ilogb_optab; break;
2412 CASE_FLT_FN (BUILT_IN_ISINF):
2413 builtin_optab = isinf_optab; break;
2414 case BUILT_IN_ISNORMAL:
2415 case BUILT_IN_ISFINITE:
2416 CASE_FLT_FN (BUILT_IN_FINITE):
2417 case BUILT_IN_FINITED32:
2418 case BUILT_IN_FINITED64:
2419 case BUILT_IN_FINITED128:
2420 case BUILT_IN_ISINFD32:
2421 case BUILT_IN_ISINFD64:
2422 case BUILT_IN_ISINFD128:
2423 /* These builtins have no optabs (yet). */
2424 break;
2425 default:
2426 gcc_unreachable ();
2427 }
2428
2429 /* There's no easy way to detect the case we need to set EDOM. */
2430 if (flag_errno_math && errno_set)
2431 return CODE_FOR_nothing;
2432
2433 /* Optab mode depends on the mode of the input argument. */
2434 mode = TYPE_MODE (TREE_TYPE (arg));
2435
2436 if (builtin_optab)
2437 return optab_handler (builtin_optab, mode);
2438 return CODE_FOR_nothing;
2439 }
2440
2441 /* Expand a call to one of the builtin math functions that operate on
2442 floating point argument and output an integer result (ilogb, isinf,
2443 isnan, etc).
2444 Return 0 if a normal call should be emitted rather than expanding the
2445 function in-line. EXP is the expression that is a call to the builtin
2446 function; if convenient, the result should be placed in TARGET. */
2447
2448 static rtx
2449 expand_builtin_interclass_mathfn (tree exp, rtx target)
2450 {
2451 enum insn_code icode = CODE_FOR_nothing;
2452 rtx op0;
2453 tree fndecl = get_callee_fndecl (exp);
2454 enum machine_mode mode;
2455 tree arg;
2456
2457 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2458 return NULL_RTX;
2459
2460 arg = CALL_EXPR_ARG (exp, 0);
2461 icode = interclass_mathfn_icode (arg, fndecl);
2462 mode = TYPE_MODE (TREE_TYPE (arg));
2463
2464 if (icode != CODE_FOR_nothing)
2465 {
2466 struct expand_operand ops[1];
2467 rtx last = get_last_insn ();
2468 tree orig_arg = arg;
2469
2470 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2471 need to expand the argument again. This way, we will not perform
2472 side-effects more the once. */
2473 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2474
2475 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2476
2477 if (mode != GET_MODE (op0))
2478 op0 = convert_to_mode (mode, op0, 0);
2479
2480 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2481 if (maybe_legitimize_operands (icode, 0, 1, ops)
2482 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2483 return ops[0].value;
2484
2485 delete_insns_since (last);
2486 CALL_EXPR_ARG (exp, 0) = orig_arg;
2487 }
2488
2489 return NULL_RTX;
2490 }
2491
2492 /* Expand a call to the builtin sincos math function.
2493 Return NULL_RTX if a normal call should be emitted rather than expanding the
2494 function in-line. EXP is the expression that is a call to the builtin
2495 function. */
2496
2497 static rtx
2498 expand_builtin_sincos (tree exp)
2499 {
2500 rtx op0, op1, op2, target1, target2;
2501 enum machine_mode mode;
2502 tree arg, sinp, cosp;
2503 int result;
2504 location_t loc = EXPR_LOCATION (exp);
2505 tree alias_type, alias_off;
2506
2507 if (!validate_arglist (exp, REAL_TYPE,
2508 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2509 return NULL_RTX;
2510
2511 arg = CALL_EXPR_ARG (exp, 0);
2512 sinp = CALL_EXPR_ARG (exp, 1);
2513 cosp = CALL_EXPR_ARG (exp, 2);
2514
2515 /* Make a suitable register to place result in. */
2516 mode = TYPE_MODE (TREE_TYPE (arg));
2517
2518 /* Check if sincos insn is available, otherwise emit the call. */
2519 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2520 return NULL_RTX;
2521
2522 target1 = gen_reg_rtx (mode);
2523 target2 = gen_reg_rtx (mode);
2524
2525 op0 = expand_normal (arg);
2526 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2527 alias_off = build_int_cst (alias_type, 0);
2528 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2529 sinp, alias_off));
2530 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2531 cosp, alias_off));
2532
2533 /* Compute into target1 and target2.
2534 Set TARGET to wherever the result comes back. */
2535 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2536 gcc_assert (result);
2537
2538 /* Move target1 and target2 to the memory locations indicated
2539 by op1 and op2. */
2540 emit_move_insn (op1, target1);
2541 emit_move_insn (op2, target2);
2542
2543 return const0_rtx;
2544 }
2545
2546 /* Expand a call to the internal cexpi builtin to the sincos math function.
2547 EXP is the expression that is a call to the builtin function; if convenient,
2548 the result should be placed in TARGET. */
2549
2550 static rtx
2551 expand_builtin_cexpi (tree exp, rtx target)
2552 {
2553 tree fndecl = get_callee_fndecl (exp);
2554 tree arg, type;
2555 enum machine_mode mode;
2556 rtx op0, op1, op2;
2557 location_t loc = EXPR_LOCATION (exp);
2558
2559 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2560 return NULL_RTX;
2561
2562 arg = CALL_EXPR_ARG (exp, 0);
2563 type = TREE_TYPE (arg);
2564 mode = TYPE_MODE (TREE_TYPE (arg));
2565
2566 /* Try expanding via a sincos optab, fall back to emitting a libcall
2567 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2568 is only generated from sincos, cexp or if we have either of them. */
2569 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2570 {
2571 op1 = gen_reg_rtx (mode);
2572 op2 = gen_reg_rtx (mode);
2573
2574 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2575
2576 /* Compute into op1 and op2. */
2577 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2578 }
2579 else if (targetm.libc_has_function (function_sincos))
2580 {
2581 tree call, fn = NULL_TREE;
2582 tree top1, top2;
2583 rtx op1a, op2a;
2584
2585 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2586 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2587 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2588 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2589 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2590 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2591 else
2592 gcc_unreachable ();
2593
2594 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2595 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2596 op1a = copy_addr_to_reg (XEXP (op1, 0));
2597 op2a = copy_addr_to_reg (XEXP (op2, 0));
2598 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2599 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2600
2601 /* Make sure not to fold the sincos call again. */
2602 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2603 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2604 call, 3, arg, top1, top2));
2605 }
2606 else
2607 {
2608 tree call, fn = NULL_TREE, narg;
2609 tree ctype = build_complex_type (type);
2610
2611 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2612 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2613 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2614 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2615 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2616 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2617 else
2618 gcc_unreachable ();
2619
2620 /* If we don't have a decl for cexp create one. This is the
2621 friendliest fallback if the user calls __builtin_cexpi
2622 without full target C99 function support. */
2623 if (fn == NULL_TREE)
2624 {
2625 tree fntype;
2626 const char *name = NULL;
2627
2628 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2629 name = "cexpf";
2630 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2631 name = "cexp";
2632 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2633 name = "cexpl";
2634
2635 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2636 fn = build_fn_decl (name, fntype);
2637 }
2638
2639 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2640 build_real (type, dconst0), arg);
2641
2642 /* Make sure not to fold the cexp call again. */
2643 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2644 return expand_expr (build_call_nary (ctype, call, 1, narg),
2645 target, VOIDmode, EXPAND_NORMAL);
2646 }
2647
2648 /* Now build the proper return type. */
2649 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2650 make_tree (TREE_TYPE (arg), op2),
2651 make_tree (TREE_TYPE (arg), op1)),
2652 target, VOIDmode, EXPAND_NORMAL);
2653 }
2654
2655 /* Conveniently construct a function call expression. FNDECL names the
2656 function to be called, N is the number of arguments, and the "..."
2657 parameters are the argument expressions. Unlike build_call_exr
2658 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2659
2660 static tree
2661 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2662 {
2663 va_list ap;
2664 tree fntype = TREE_TYPE (fndecl);
2665 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2666
2667 va_start (ap, n);
2668 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2669 va_end (ap);
2670 SET_EXPR_LOCATION (fn, loc);
2671 return fn;
2672 }
2673
2674 /* Expand a call to one of the builtin rounding functions gcc defines
2675 as an extension (lfloor and lceil). As these are gcc extensions we
2676 do not need to worry about setting errno to EDOM.
2677 If expanding via optab fails, lower expression to (int)(floor(x)).
2678 EXP is the expression that is a call to the builtin function;
2679 if convenient, the result should be placed in TARGET. */
2680
2681 static rtx
2682 expand_builtin_int_roundingfn (tree exp, rtx target)
2683 {
2684 convert_optab builtin_optab;
2685 rtx op0, insns, tmp;
2686 tree fndecl = get_callee_fndecl (exp);
2687 enum built_in_function fallback_fn;
2688 tree fallback_fndecl;
2689 enum machine_mode mode;
2690 tree arg;
2691
2692 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2693 gcc_unreachable ();
2694
2695 arg = CALL_EXPR_ARG (exp, 0);
2696
2697 switch (DECL_FUNCTION_CODE (fndecl))
2698 {
2699 CASE_FLT_FN (BUILT_IN_ICEIL):
2700 CASE_FLT_FN (BUILT_IN_LCEIL):
2701 CASE_FLT_FN (BUILT_IN_LLCEIL):
2702 builtin_optab = lceil_optab;
2703 fallback_fn = BUILT_IN_CEIL;
2704 break;
2705
2706 CASE_FLT_FN (BUILT_IN_IFLOOR):
2707 CASE_FLT_FN (BUILT_IN_LFLOOR):
2708 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2709 builtin_optab = lfloor_optab;
2710 fallback_fn = BUILT_IN_FLOOR;
2711 break;
2712
2713 default:
2714 gcc_unreachable ();
2715 }
2716
2717 /* Make a suitable register to place result in. */
2718 mode = TYPE_MODE (TREE_TYPE (exp));
2719
2720 target = gen_reg_rtx (mode);
2721
2722 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2723 need to expand the argument again. This way, we will not perform
2724 side-effects more the once. */
2725 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2726
2727 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2728
2729 start_sequence ();
2730
2731 /* Compute into TARGET. */
2732 if (expand_sfix_optab (target, op0, builtin_optab))
2733 {
2734 /* Output the entire sequence. */
2735 insns = get_insns ();
2736 end_sequence ();
2737 emit_insn (insns);
2738 return target;
2739 }
2740
2741 /* If we were unable to expand via the builtin, stop the sequence
2742 (without outputting the insns). */
2743 end_sequence ();
2744
2745 /* Fall back to floating point rounding optab. */
2746 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2747
2748 /* For non-C99 targets we may end up without a fallback fndecl here
2749 if the user called __builtin_lfloor directly. In this case emit
2750 a call to the floor/ceil variants nevertheless. This should result
2751 in the best user experience for not full C99 targets. */
2752 if (fallback_fndecl == NULL_TREE)
2753 {
2754 tree fntype;
2755 const char *name = NULL;
2756
2757 switch (DECL_FUNCTION_CODE (fndecl))
2758 {
2759 case BUILT_IN_ICEIL:
2760 case BUILT_IN_LCEIL:
2761 case BUILT_IN_LLCEIL:
2762 name = "ceil";
2763 break;
2764 case BUILT_IN_ICEILF:
2765 case BUILT_IN_LCEILF:
2766 case BUILT_IN_LLCEILF:
2767 name = "ceilf";
2768 break;
2769 case BUILT_IN_ICEILL:
2770 case BUILT_IN_LCEILL:
2771 case BUILT_IN_LLCEILL:
2772 name = "ceill";
2773 break;
2774 case BUILT_IN_IFLOOR:
2775 case BUILT_IN_LFLOOR:
2776 case BUILT_IN_LLFLOOR:
2777 name = "floor";
2778 break;
2779 case BUILT_IN_IFLOORF:
2780 case BUILT_IN_LFLOORF:
2781 case BUILT_IN_LLFLOORF:
2782 name = "floorf";
2783 break;
2784 case BUILT_IN_IFLOORL:
2785 case BUILT_IN_LFLOORL:
2786 case BUILT_IN_LLFLOORL:
2787 name = "floorl";
2788 break;
2789 default:
2790 gcc_unreachable ();
2791 }
2792
2793 fntype = build_function_type_list (TREE_TYPE (arg),
2794 TREE_TYPE (arg), NULL_TREE);
2795 fallback_fndecl = build_fn_decl (name, fntype);
2796 }
2797
2798 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2799
2800 tmp = expand_normal (exp);
2801 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2802
2803 /* Truncate the result of floating point optab to integer
2804 via expand_fix (). */
2805 target = gen_reg_rtx (mode);
2806 expand_fix (target, tmp, 0);
2807
2808 return target;
2809 }
2810
2811 /* Expand a call to one of the builtin math functions doing integer
2812 conversion (lrint).
2813 Return 0 if a normal call should be emitted rather than expanding the
2814 function in-line. EXP is the expression that is a call to the builtin
2815 function; if convenient, the result should be placed in TARGET. */
2816
2817 static rtx
2818 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2819 {
2820 convert_optab builtin_optab;
2821 rtx op0, insns;
2822 tree fndecl = get_callee_fndecl (exp);
2823 tree arg;
2824 enum machine_mode mode;
2825 enum built_in_function fallback_fn = BUILT_IN_NONE;
2826
2827 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2828 gcc_unreachable ();
2829
2830 arg = CALL_EXPR_ARG (exp, 0);
2831
2832 switch (DECL_FUNCTION_CODE (fndecl))
2833 {
2834 CASE_FLT_FN (BUILT_IN_IRINT):
2835 fallback_fn = BUILT_IN_LRINT;
2836 /* FALLTHRU */
2837 CASE_FLT_FN (BUILT_IN_LRINT):
2838 CASE_FLT_FN (BUILT_IN_LLRINT):
2839 builtin_optab = lrint_optab;
2840 break;
2841
2842 CASE_FLT_FN (BUILT_IN_IROUND):
2843 fallback_fn = BUILT_IN_LROUND;
2844 /* FALLTHRU */
2845 CASE_FLT_FN (BUILT_IN_LROUND):
2846 CASE_FLT_FN (BUILT_IN_LLROUND):
2847 builtin_optab = lround_optab;
2848 break;
2849
2850 default:
2851 gcc_unreachable ();
2852 }
2853
2854 /* There's no easy way to detect the case we need to set EDOM. */
2855 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2856 return NULL_RTX;
2857
2858 /* Make a suitable register to place result in. */
2859 mode = TYPE_MODE (TREE_TYPE (exp));
2860
2861 /* There's no easy way to detect the case we need to set EDOM. */
2862 if (!flag_errno_math)
2863 {
2864 rtx result = gen_reg_rtx (mode);
2865
2866 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2867 need to expand the argument again. This way, we will not perform
2868 side-effects more the once. */
2869 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2870
2871 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2872
2873 start_sequence ();
2874
2875 if (expand_sfix_optab (result, op0, builtin_optab))
2876 {
2877 /* Output the entire sequence. */
2878 insns = get_insns ();
2879 end_sequence ();
2880 emit_insn (insns);
2881 return result;
2882 }
2883
2884 /* If we were unable to expand via the builtin, stop the sequence
2885 (without outputting the insns) and call to the library function
2886 with the stabilized argument list. */
2887 end_sequence ();
2888 }
2889
2890 if (fallback_fn != BUILT_IN_NONE)
2891 {
2892 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2893 targets, (int) round (x) should never be transformed into
2894 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2895 a call to lround in the hope that the target provides at least some
2896 C99 functions. This should result in the best user experience for
2897 not full C99 targets. */
2898 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2899 fallback_fn, 0);
2900
2901 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2902 fallback_fndecl, 1, arg);
2903
2904 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2905 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2906 return convert_to_mode (mode, target, 0);
2907 }
2908
2909 return expand_call (exp, target, target == const0_rtx);
2910 }
2911
2912 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2913 a normal call should be emitted rather than expanding the function
2914 in-line. EXP is the expression that is a call to the builtin
2915 function; if convenient, the result should be placed in TARGET. */
2916
2917 static rtx
2918 expand_builtin_powi (tree exp, rtx target)
2919 {
2920 tree arg0, arg1;
2921 rtx op0, op1;
2922 enum machine_mode mode;
2923 enum machine_mode mode2;
2924
2925 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2926 return NULL_RTX;
2927
2928 arg0 = CALL_EXPR_ARG (exp, 0);
2929 arg1 = CALL_EXPR_ARG (exp, 1);
2930 mode = TYPE_MODE (TREE_TYPE (exp));
2931
2932 /* Emit a libcall to libgcc. */
2933
2934 /* Mode of the 2nd argument must match that of an int. */
2935 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2936
2937 if (target == NULL_RTX)
2938 target = gen_reg_rtx (mode);
2939
2940 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2941 if (GET_MODE (op0) != mode)
2942 op0 = convert_to_mode (mode, op0, 0);
2943 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2944 if (GET_MODE (op1) != mode2)
2945 op1 = convert_to_mode (mode2, op1, 0);
2946
2947 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2948 target, LCT_CONST, mode, 2,
2949 op0, mode, op1, mode2);
2950
2951 return target;
2952 }
2953
2954 /* Expand expression EXP which is a call to the strlen builtin. Return
2955 NULL_RTX if we failed the caller should emit a normal call, otherwise
2956 try to get the result in TARGET, if convenient. */
2957
2958 static rtx
2959 expand_builtin_strlen (tree exp, rtx target,
2960 enum machine_mode target_mode)
2961 {
2962 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2963 return NULL_RTX;
2964 else
2965 {
2966 struct expand_operand ops[4];
2967 rtx pat;
2968 tree len;
2969 tree src = CALL_EXPR_ARG (exp, 0);
2970 rtx src_reg, before_strlen;
2971 enum machine_mode insn_mode = target_mode;
2972 enum insn_code icode = CODE_FOR_nothing;
2973 unsigned int align;
2974
2975 /* If the length can be computed at compile-time, return it. */
2976 len = c_strlen (src, 0);
2977 if (len)
2978 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2979
2980 /* If the length can be computed at compile-time and is constant
2981 integer, but there are side-effects in src, evaluate
2982 src for side-effects, then return len.
2983 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2984 can be optimized into: i++; x = 3; */
2985 len = c_strlen (src, 1);
2986 if (len && TREE_CODE (len) == INTEGER_CST)
2987 {
2988 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2989 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2990 }
2991
2992 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2993
2994 /* If SRC is not a pointer type, don't do this operation inline. */
2995 if (align == 0)
2996 return NULL_RTX;
2997
2998 /* Bail out if we can't compute strlen in the right mode. */
2999 while (insn_mode != VOIDmode)
3000 {
3001 icode = optab_handler (strlen_optab, insn_mode);
3002 if (icode != CODE_FOR_nothing)
3003 break;
3004
3005 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3006 }
3007 if (insn_mode == VOIDmode)
3008 return NULL_RTX;
3009
3010 /* Make a place to hold the source address. We will not expand
3011 the actual source until we are sure that the expansion will
3012 not fail -- there are trees that cannot be expanded twice. */
3013 src_reg = gen_reg_rtx (Pmode);
3014
3015 /* Mark the beginning of the strlen sequence so we can emit the
3016 source operand later. */
3017 before_strlen = get_last_insn ();
3018
3019 create_output_operand (&ops[0], target, insn_mode);
3020 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3021 create_integer_operand (&ops[2], 0);
3022 create_integer_operand (&ops[3], align);
3023 if (!maybe_expand_insn (icode, 4, ops))
3024 return NULL_RTX;
3025
3026 /* Now that we are assured of success, expand the source. */
3027 start_sequence ();
3028 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3029 if (pat != src_reg)
3030 {
3031 #ifdef POINTERS_EXTEND_UNSIGNED
3032 if (GET_MODE (pat) != Pmode)
3033 pat = convert_to_mode (Pmode, pat,
3034 POINTERS_EXTEND_UNSIGNED);
3035 #endif
3036 emit_move_insn (src_reg, pat);
3037 }
3038 pat = get_insns ();
3039 end_sequence ();
3040
3041 if (before_strlen)
3042 emit_insn_after (pat, before_strlen);
3043 else
3044 emit_insn_before (pat, get_insns ());
3045
3046 /* Return the value in the proper mode for this function. */
3047 if (GET_MODE (ops[0].value) == target_mode)
3048 target = ops[0].value;
3049 else if (target != 0)
3050 convert_move (target, ops[0].value, 0);
3051 else
3052 target = convert_to_mode (target_mode, ops[0].value, 0);
3053
3054 return target;
3055 }
3056 }
3057
3058 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3059 bytes from constant string DATA + OFFSET and return it as target
3060 constant. */
3061
3062 static rtx
3063 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3064 enum machine_mode mode)
3065 {
3066 const char *str = (const char *) data;
3067
3068 gcc_assert (offset >= 0
3069 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3070 <= strlen (str) + 1));
3071
3072 return c_readstr (str + offset, mode);
3073 }
3074
3075 /* Expand a call EXP to the memcpy builtin.
3076 Return NULL_RTX if we failed, the caller should emit a normal call,
3077 otherwise try to get the result in TARGET, if convenient (and in
3078 mode MODE if that's convenient). */
3079
3080 static rtx
3081 expand_builtin_memcpy (tree exp, rtx target)
3082 {
3083 if (!validate_arglist (exp,
3084 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3085 return NULL_RTX;
3086 else
3087 {
3088 tree dest = CALL_EXPR_ARG (exp, 0);
3089 tree src = CALL_EXPR_ARG (exp, 1);
3090 tree len = CALL_EXPR_ARG (exp, 2);
3091 const char *src_str;
3092 unsigned int src_align = get_pointer_alignment (src);
3093 unsigned int dest_align = get_pointer_alignment (dest);
3094 rtx dest_mem, src_mem, dest_addr, len_rtx;
3095 HOST_WIDE_INT expected_size = -1;
3096 unsigned int expected_align = 0;
3097
3098 /* If DEST is not a pointer type, call the normal function. */
3099 if (dest_align == 0)
3100 return NULL_RTX;
3101
3102 /* If either SRC is not a pointer type, don't do this
3103 operation in-line. */
3104 if (src_align == 0)
3105 return NULL_RTX;
3106
3107 if (currently_expanding_gimple_stmt)
3108 stringop_block_profile (currently_expanding_gimple_stmt,
3109 &expected_align, &expected_size);
3110
3111 if (expected_align < dest_align)
3112 expected_align = dest_align;
3113 dest_mem = get_memory_rtx (dest, len);
3114 set_mem_align (dest_mem, dest_align);
3115 len_rtx = expand_normal (len);
3116 src_str = c_getstr (src);
3117
3118 /* If SRC is a string constant and block move would be done
3119 by pieces, we can avoid loading the string from memory
3120 and only stored the computed constants. */
3121 if (src_str
3122 && CONST_INT_P (len_rtx)
3123 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3124 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3125 CONST_CAST (char *, src_str),
3126 dest_align, false))
3127 {
3128 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3129 builtin_memcpy_read_str,
3130 CONST_CAST (char *, src_str),
3131 dest_align, false, 0);
3132 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3133 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3134 return dest_mem;
3135 }
3136
3137 src_mem = get_memory_rtx (src, len);
3138 set_mem_align (src_mem, src_align);
3139
3140 /* Copy word part most expediently. */
3141 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3142 CALL_EXPR_TAILCALL (exp)
3143 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3144 expected_align, expected_size);
3145
3146 if (dest_addr == 0)
3147 {
3148 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3149 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3150 }
3151 return dest_addr;
3152 }
3153 }
3154
3155 /* Expand a call EXP to the mempcpy builtin.
3156 Return NULL_RTX if we failed; the caller should emit a normal call,
3157 otherwise try to get the result in TARGET, if convenient (and in
3158 mode MODE if that's convenient). If ENDP is 0 return the
3159 destination pointer, if ENDP is 1 return the end pointer ala
3160 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3161 stpcpy. */
3162
3163 static rtx
3164 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3165 {
3166 if (!validate_arglist (exp,
3167 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3168 return NULL_RTX;
3169 else
3170 {
3171 tree dest = CALL_EXPR_ARG (exp, 0);
3172 tree src = CALL_EXPR_ARG (exp, 1);
3173 tree len = CALL_EXPR_ARG (exp, 2);
3174 return expand_builtin_mempcpy_args (dest, src, len,
3175 target, mode, /*endp=*/ 1);
3176 }
3177 }
3178
3179 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3180 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3181 so that this can also be called without constructing an actual CALL_EXPR.
3182 The other arguments and return value are the same as for
3183 expand_builtin_mempcpy. */
3184
3185 static rtx
3186 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3187 rtx target, enum machine_mode mode, int endp)
3188 {
3189 /* If return value is ignored, transform mempcpy into memcpy. */
3190 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3191 {
3192 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3193 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3194 dest, src, len);
3195 return expand_expr (result, target, mode, EXPAND_NORMAL);
3196 }
3197 else
3198 {
3199 const char *src_str;
3200 unsigned int src_align = get_pointer_alignment (src);
3201 unsigned int dest_align = get_pointer_alignment (dest);
3202 rtx dest_mem, src_mem, len_rtx;
3203
3204 /* If either SRC or DEST is not a pointer type, don't do this
3205 operation in-line. */
3206 if (dest_align == 0 || src_align == 0)
3207 return NULL_RTX;
3208
3209 /* If LEN is not constant, call the normal function. */
3210 if (! host_integerp (len, 1))
3211 return NULL_RTX;
3212
3213 len_rtx = expand_normal (len);
3214 src_str = c_getstr (src);
3215
3216 /* If SRC is a string constant and block move would be done
3217 by pieces, we can avoid loading the string from memory
3218 and only stored the computed constants. */
3219 if (src_str
3220 && CONST_INT_P (len_rtx)
3221 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3222 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3223 CONST_CAST (char *, src_str),
3224 dest_align, false))
3225 {
3226 dest_mem = get_memory_rtx (dest, len);
3227 set_mem_align (dest_mem, dest_align);
3228 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3229 builtin_memcpy_read_str,
3230 CONST_CAST (char *, src_str),
3231 dest_align, false, endp);
3232 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3233 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3234 return dest_mem;
3235 }
3236
3237 if (CONST_INT_P (len_rtx)
3238 && can_move_by_pieces (INTVAL (len_rtx),
3239 MIN (dest_align, src_align)))
3240 {
3241 dest_mem = get_memory_rtx (dest, len);
3242 set_mem_align (dest_mem, dest_align);
3243 src_mem = get_memory_rtx (src, len);
3244 set_mem_align (src_mem, src_align);
3245 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3246 MIN (dest_align, src_align), endp);
3247 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3248 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3249 return dest_mem;
3250 }
3251
3252 return NULL_RTX;
3253 }
3254 }
3255
3256 #ifndef HAVE_movstr
3257 # define HAVE_movstr 0
3258 # define CODE_FOR_movstr CODE_FOR_nothing
3259 #endif
3260
3261 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3262 we failed, the caller should emit a normal call, otherwise try to
3263 get the result in TARGET, if convenient. If ENDP is 0 return the
3264 destination pointer, if ENDP is 1 return the end pointer ala
3265 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3266 stpcpy. */
3267
3268 static rtx
3269 expand_movstr (tree dest, tree src, rtx target, int endp)
3270 {
3271 struct expand_operand ops[3];
3272 rtx dest_mem;
3273 rtx src_mem;
3274
3275 if (!HAVE_movstr)
3276 return NULL_RTX;
3277
3278 dest_mem = get_memory_rtx (dest, NULL);
3279 src_mem = get_memory_rtx (src, NULL);
3280 if (!endp)
3281 {
3282 target = force_reg (Pmode, XEXP (dest_mem, 0));
3283 dest_mem = replace_equiv_address (dest_mem, target);
3284 }
3285
3286 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3287 create_fixed_operand (&ops[1], dest_mem);
3288 create_fixed_operand (&ops[2], src_mem);
3289 expand_insn (CODE_FOR_movstr, 3, ops);
3290
3291 if (endp && target != const0_rtx)
3292 {
3293 target = ops[0].value;
3294 /* movstr is supposed to set end to the address of the NUL
3295 terminator. If the caller requested a mempcpy-like return value,
3296 adjust it. */
3297 if (endp == 1)
3298 {
3299 rtx tem = plus_constant (GET_MODE (target),
3300 gen_lowpart (GET_MODE (target), target), 1);
3301 emit_move_insn (target, force_operand (tem, NULL_RTX));
3302 }
3303 }
3304 return target;
3305 }
3306
3307 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3308 NULL_RTX if we failed the caller should emit a normal call, otherwise
3309 try to get the result in TARGET, if convenient (and in mode MODE if that's
3310 convenient). */
3311
3312 static rtx
3313 expand_builtin_strcpy (tree exp, rtx target)
3314 {
3315 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3316 {
3317 tree dest = CALL_EXPR_ARG (exp, 0);
3318 tree src = CALL_EXPR_ARG (exp, 1);
3319 return expand_builtin_strcpy_args (dest, src, target);
3320 }
3321 return NULL_RTX;
3322 }
3323
3324 /* Helper function to do the actual work for expand_builtin_strcpy. The
3325 arguments to the builtin_strcpy call DEST and SRC are broken out
3326 so that this can also be called without constructing an actual CALL_EXPR.
3327 The other arguments and return value are the same as for
3328 expand_builtin_strcpy. */
3329
3330 static rtx
3331 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3332 {
3333 return expand_movstr (dest, src, target, /*endp=*/0);
3334 }
3335
3336 /* Expand a call EXP to the stpcpy builtin.
3337 Return NULL_RTX if we failed the caller should emit a normal call,
3338 otherwise try to get the result in TARGET, if convenient (and in
3339 mode MODE if that's convenient). */
3340
3341 static rtx
3342 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3343 {
3344 tree dst, src;
3345 location_t loc = EXPR_LOCATION (exp);
3346
3347 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3348 return NULL_RTX;
3349
3350 dst = CALL_EXPR_ARG (exp, 0);
3351 src = CALL_EXPR_ARG (exp, 1);
3352
3353 /* If return value is ignored, transform stpcpy into strcpy. */
3354 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3355 {
3356 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3357 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3358 return expand_expr (result, target, mode, EXPAND_NORMAL);
3359 }
3360 else
3361 {
3362 tree len, lenp1;
3363 rtx ret;
3364
3365 /* Ensure we get an actual string whose length can be evaluated at
3366 compile-time, not an expression containing a string. This is
3367 because the latter will potentially produce pessimized code
3368 when used to produce the return value. */
3369 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3370 return expand_movstr (dst, src, target, /*endp=*/2);
3371
3372 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3373 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3374 target, mode, /*endp=*/2);
3375
3376 if (ret)
3377 return ret;
3378
3379 if (TREE_CODE (len) == INTEGER_CST)
3380 {
3381 rtx len_rtx = expand_normal (len);
3382
3383 if (CONST_INT_P (len_rtx))
3384 {
3385 ret = expand_builtin_strcpy_args (dst, src, target);
3386
3387 if (ret)
3388 {
3389 if (! target)
3390 {
3391 if (mode != VOIDmode)
3392 target = gen_reg_rtx (mode);
3393 else
3394 target = gen_reg_rtx (GET_MODE (ret));
3395 }
3396 if (GET_MODE (target) != GET_MODE (ret))
3397 ret = gen_lowpart (GET_MODE (target), ret);
3398
3399 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3400 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3401 gcc_assert (ret);
3402
3403 return target;
3404 }
3405 }
3406 }
3407
3408 return expand_movstr (dst, src, target, /*endp=*/2);
3409 }
3410 }
3411
3412 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3413 bytes from constant string DATA + OFFSET and return it as target
3414 constant. */
3415
3416 rtx
3417 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3418 enum machine_mode mode)
3419 {
3420 const char *str = (const char *) data;
3421
3422 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3423 return const0_rtx;
3424
3425 return c_readstr (str + offset, mode);
3426 }
3427
3428 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3429 NULL_RTX if we failed the caller should emit a normal call. */
3430
3431 static rtx
3432 expand_builtin_strncpy (tree exp, rtx target)
3433 {
3434 location_t loc = EXPR_LOCATION (exp);
3435
3436 if (validate_arglist (exp,
3437 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3438 {
3439 tree dest = CALL_EXPR_ARG (exp, 0);
3440 tree src = CALL_EXPR_ARG (exp, 1);
3441 tree len = CALL_EXPR_ARG (exp, 2);
3442 tree slen = c_strlen (src, 1);
3443
3444 /* We must be passed a constant len and src parameter. */
3445 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3446 return NULL_RTX;
3447
3448 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3449
3450 /* We're required to pad with trailing zeros if the requested
3451 len is greater than strlen(s2)+1. In that case try to
3452 use store_by_pieces, if it fails, punt. */
3453 if (tree_int_cst_lt (slen, len))
3454 {
3455 unsigned int dest_align = get_pointer_alignment (dest);
3456 const char *p = c_getstr (src);
3457 rtx dest_mem;
3458
3459 if (!p || dest_align == 0 || !host_integerp (len, 1)
3460 || !can_store_by_pieces (tree_low_cst (len, 1),
3461 builtin_strncpy_read_str,
3462 CONST_CAST (char *, p),
3463 dest_align, false))
3464 return NULL_RTX;
3465
3466 dest_mem = get_memory_rtx (dest, len);
3467 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3468 builtin_strncpy_read_str,
3469 CONST_CAST (char *, p), dest_align, false, 0);
3470 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3471 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3472 return dest_mem;
3473 }
3474 }
3475 return NULL_RTX;
3476 }
3477
3478 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3479 bytes from constant string DATA + OFFSET and return it as target
3480 constant. */
3481
3482 rtx
3483 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3484 enum machine_mode mode)
3485 {
3486 const char *c = (const char *) data;
3487 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3488
3489 memset (p, *c, GET_MODE_SIZE (mode));
3490
3491 return c_readstr (p, mode);
3492 }
3493
3494 /* Callback routine for store_by_pieces. Return the RTL of a register
3495 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3496 char value given in the RTL register data. For example, if mode is
3497 4 bytes wide, return the RTL for 0x01010101*data. */
3498
3499 static rtx
3500 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3501 enum machine_mode mode)
3502 {
3503 rtx target, coeff;
3504 size_t size;
3505 char *p;
3506
3507 size = GET_MODE_SIZE (mode);
3508 if (size == 1)
3509 return (rtx) data;
3510
3511 p = XALLOCAVEC (char, size);
3512 memset (p, 1, size);
3513 coeff = c_readstr (p, mode);
3514
3515 target = convert_to_mode (mode, (rtx) data, 1);
3516 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3517 return force_reg (mode, target);
3518 }
3519
3520 /* Expand expression EXP, which is a call to the memset builtin. Return
3521 NULL_RTX if we failed the caller should emit a normal call, otherwise
3522 try to get the result in TARGET, if convenient (and in mode MODE if that's
3523 convenient). */
3524
3525 static rtx
3526 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3527 {
3528 if (!validate_arglist (exp,
3529 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3530 return NULL_RTX;
3531 else
3532 {
3533 tree dest = CALL_EXPR_ARG (exp, 0);
3534 tree val = CALL_EXPR_ARG (exp, 1);
3535 tree len = CALL_EXPR_ARG (exp, 2);
3536 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3537 }
3538 }
3539
3540 /* Helper function to do the actual work for expand_builtin_memset. The
3541 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3542 so that this can also be called without constructing an actual CALL_EXPR.
3543 The other arguments and return value are the same as for
3544 expand_builtin_memset. */
3545
3546 static rtx
3547 expand_builtin_memset_args (tree dest, tree val, tree len,
3548 rtx target, enum machine_mode mode, tree orig_exp)
3549 {
3550 tree fndecl, fn;
3551 enum built_in_function fcode;
3552 enum machine_mode val_mode;
3553 char c;
3554 unsigned int dest_align;
3555 rtx dest_mem, dest_addr, len_rtx;
3556 HOST_WIDE_INT expected_size = -1;
3557 unsigned int expected_align = 0;
3558
3559 dest_align = get_pointer_alignment (dest);
3560
3561 /* If DEST is not a pointer type, don't do this operation in-line. */
3562 if (dest_align == 0)
3563 return NULL_RTX;
3564
3565 if (currently_expanding_gimple_stmt)
3566 stringop_block_profile (currently_expanding_gimple_stmt,
3567 &expected_align, &expected_size);
3568
3569 if (expected_align < dest_align)
3570 expected_align = dest_align;
3571
3572 /* If the LEN parameter is zero, return DEST. */
3573 if (integer_zerop (len))
3574 {
3575 /* Evaluate and ignore VAL in case it has side-effects. */
3576 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3577 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3578 }
3579
3580 /* Stabilize the arguments in case we fail. */
3581 dest = builtin_save_expr (dest);
3582 val = builtin_save_expr (val);
3583 len = builtin_save_expr (len);
3584
3585 len_rtx = expand_normal (len);
3586 dest_mem = get_memory_rtx (dest, len);
3587 val_mode = TYPE_MODE (unsigned_char_type_node);
3588
3589 if (TREE_CODE (val) != INTEGER_CST)
3590 {
3591 rtx val_rtx;
3592
3593 val_rtx = expand_normal (val);
3594 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3595
3596 /* Assume that we can memset by pieces if we can store
3597 * the coefficients by pieces (in the required modes).
3598 * We can't pass builtin_memset_gen_str as that emits RTL. */
3599 c = 1;
3600 if (host_integerp (len, 1)
3601 && can_store_by_pieces (tree_low_cst (len, 1),
3602 builtin_memset_read_str, &c, dest_align,
3603 true))
3604 {
3605 val_rtx = force_reg (val_mode, val_rtx);
3606 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3607 builtin_memset_gen_str, val_rtx, dest_align,
3608 true, 0);
3609 }
3610 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3611 dest_align, expected_align,
3612 expected_size))
3613 goto do_libcall;
3614
3615 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3616 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3617 return dest_mem;
3618 }
3619
3620 if (target_char_cast (val, &c))
3621 goto do_libcall;
3622
3623 if (c)
3624 {
3625 if (host_integerp (len, 1)
3626 && can_store_by_pieces (tree_low_cst (len, 1),
3627 builtin_memset_read_str, &c, dest_align,
3628 true))
3629 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3630 builtin_memset_read_str, &c, dest_align, true, 0);
3631 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3632 gen_int_mode (c, val_mode),
3633 dest_align, expected_align,
3634 expected_size))
3635 goto do_libcall;
3636
3637 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3638 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3639 return dest_mem;
3640 }
3641
3642 set_mem_align (dest_mem, dest_align);
3643 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3644 CALL_EXPR_TAILCALL (orig_exp)
3645 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3646 expected_align, expected_size);
3647
3648 if (dest_addr == 0)
3649 {
3650 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3651 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3652 }
3653
3654 return dest_addr;
3655
3656 do_libcall:
3657 fndecl = get_callee_fndecl (orig_exp);
3658 fcode = DECL_FUNCTION_CODE (fndecl);
3659 if (fcode == BUILT_IN_MEMSET)
3660 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3661 dest, val, len);
3662 else if (fcode == BUILT_IN_BZERO)
3663 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3664 dest, len);
3665 else
3666 gcc_unreachable ();
3667 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3668 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3669 return expand_call (fn, target, target == const0_rtx);
3670 }
3671
3672 /* Expand expression EXP, which is a call to the bzero builtin. Return
3673 NULL_RTX if we failed the caller should emit a normal call. */
3674
3675 static rtx
3676 expand_builtin_bzero (tree exp)
3677 {
3678 tree dest, size;
3679 location_t loc = EXPR_LOCATION (exp);
3680
3681 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3682 return NULL_RTX;
3683
3684 dest = CALL_EXPR_ARG (exp, 0);
3685 size = CALL_EXPR_ARG (exp, 1);
3686
3687 /* New argument list transforming bzero(ptr x, int y) to
3688 memset(ptr x, int 0, size_t y). This is done this way
3689 so that if it isn't expanded inline, we fallback to
3690 calling bzero instead of memset. */
3691
3692 return expand_builtin_memset_args (dest, integer_zero_node,
3693 fold_convert_loc (loc,
3694 size_type_node, size),
3695 const0_rtx, VOIDmode, exp);
3696 }
3697
3698 /* Expand expression EXP, which is a call to the memcmp built-in function.
3699 Return NULL_RTX if we failed and the caller should emit a normal call,
3700 otherwise try to get the result in TARGET, if convenient (and in mode
3701 MODE, if that's convenient). */
3702
3703 static rtx
3704 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3705 ATTRIBUTE_UNUSED enum machine_mode mode)
3706 {
3707 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3708
3709 if (!validate_arglist (exp,
3710 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3711 return NULL_RTX;
3712
3713 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3714 implementing memcmp because it will stop if it encounters two
3715 zero bytes. */
3716 #if defined HAVE_cmpmemsi
3717 {
3718 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3719 rtx result;
3720 rtx insn;
3721 tree arg1 = CALL_EXPR_ARG (exp, 0);
3722 tree arg2 = CALL_EXPR_ARG (exp, 1);
3723 tree len = CALL_EXPR_ARG (exp, 2);
3724
3725 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3726 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3727 enum machine_mode insn_mode;
3728
3729 if (HAVE_cmpmemsi)
3730 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3731 else
3732 return NULL_RTX;
3733
3734 /* If we don't have POINTER_TYPE, call the function. */
3735 if (arg1_align == 0 || arg2_align == 0)
3736 return NULL_RTX;
3737
3738 /* Make a place to write the result of the instruction. */
3739 result = target;
3740 if (! (result != 0
3741 && REG_P (result) && GET_MODE (result) == insn_mode
3742 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3743 result = gen_reg_rtx (insn_mode);
3744
3745 arg1_rtx = get_memory_rtx (arg1, len);
3746 arg2_rtx = get_memory_rtx (arg2, len);
3747 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3748
3749 /* Set MEM_SIZE as appropriate. */
3750 if (CONST_INT_P (arg3_rtx))
3751 {
3752 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3753 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3754 }
3755
3756 if (HAVE_cmpmemsi)
3757 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3758 GEN_INT (MIN (arg1_align, arg2_align)));
3759 else
3760 gcc_unreachable ();
3761
3762 if (insn)
3763 emit_insn (insn);
3764 else
3765 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3766 TYPE_MODE (integer_type_node), 3,
3767 XEXP (arg1_rtx, 0), Pmode,
3768 XEXP (arg2_rtx, 0), Pmode,
3769 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3770 TYPE_UNSIGNED (sizetype)),
3771 TYPE_MODE (sizetype));
3772
3773 /* Return the value in the proper mode for this function. */
3774 mode = TYPE_MODE (TREE_TYPE (exp));
3775 if (GET_MODE (result) == mode)
3776 return result;
3777 else if (target != 0)
3778 {
3779 convert_move (target, result, 0);
3780 return target;
3781 }
3782 else
3783 return convert_to_mode (mode, result, 0);
3784 }
3785 #endif /* HAVE_cmpmemsi. */
3786
3787 return NULL_RTX;
3788 }
3789
3790 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3791 if we failed the caller should emit a normal call, otherwise try to get
3792 the result in TARGET, if convenient. */
3793
3794 static rtx
3795 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3796 {
3797 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3798 return NULL_RTX;
3799
3800 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3801 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3802 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3803 {
3804 rtx arg1_rtx, arg2_rtx;
3805 rtx result, insn = NULL_RTX;
3806 tree fndecl, fn;
3807 tree arg1 = CALL_EXPR_ARG (exp, 0);
3808 tree arg2 = CALL_EXPR_ARG (exp, 1);
3809
3810 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3811 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3812
3813 /* If we don't have POINTER_TYPE, call the function. */
3814 if (arg1_align == 0 || arg2_align == 0)
3815 return NULL_RTX;
3816
3817 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3818 arg1 = builtin_save_expr (arg1);
3819 arg2 = builtin_save_expr (arg2);
3820
3821 arg1_rtx = get_memory_rtx (arg1, NULL);
3822 arg2_rtx = get_memory_rtx (arg2, NULL);
3823
3824 #ifdef HAVE_cmpstrsi
3825 /* Try to call cmpstrsi. */
3826 if (HAVE_cmpstrsi)
3827 {
3828 enum machine_mode insn_mode
3829 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3830
3831 /* Make a place to write the result of the instruction. */
3832 result = target;
3833 if (! (result != 0
3834 && REG_P (result) && GET_MODE (result) == insn_mode
3835 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3836 result = gen_reg_rtx (insn_mode);
3837
3838 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3839 GEN_INT (MIN (arg1_align, arg2_align)));
3840 }
3841 #endif
3842 #ifdef HAVE_cmpstrnsi
3843 /* Try to determine at least one length and call cmpstrnsi. */
3844 if (!insn && HAVE_cmpstrnsi)
3845 {
3846 tree len;
3847 rtx arg3_rtx;
3848
3849 enum machine_mode insn_mode
3850 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3851 tree len1 = c_strlen (arg1, 1);
3852 tree len2 = c_strlen (arg2, 1);
3853
3854 if (len1)
3855 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3856 if (len2)
3857 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3858
3859 /* If we don't have a constant length for the first, use the length
3860 of the second, if we know it. We don't require a constant for
3861 this case; some cost analysis could be done if both are available
3862 but neither is constant. For now, assume they're equally cheap,
3863 unless one has side effects. If both strings have constant lengths,
3864 use the smaller. */
3865
3866 if (!len1)
3867 len = len2;
3868 else if (!len2)
3869 len = len1;
3870 else if (TREE_SIDE_EFFECTS (len1))
3871 len = len2;
3872 else if (TREE_SIDE_EFFECTS (len2))
3873 len = len1;
3874 else if (TREE_CODE (len1) != INTEGER_CST)
3875 len = len2;
3876 else if (TREE_CODE (len2) != INTEGER_CST)
3877 len = len1;
3878 else if (tree_int_cst_lt (len1, len2))
3879 len = len1;
3880 else
3881 len = len2;
3882
3883 /* If both arguments have side effects, we cannot optimize. */
3884 if (!len || TREE_SIDE_EFFECTS (len))
3885 goto do_libcall;
3886
3887 arg3_rtx = expand_normal (len);
3888
3889 /* Make a place to write the result of the instruction. */
3890 result = target;
3891 if (! (result != 0
3892 && REG_P (result) && GET_MODE (result) == insn_mode
3893 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3894 result = gen_reg_rtx (insn_mode);
3895
3896 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3897 GEN_INT (MIN (arg1_align, arg2_align)));
3898 }
3899 #endif
3900
3901 if (insn)
3902 {
3903 enum machine_mode mode;
3904 emit_insn (insn);
3905
3906 /* Return the value in the proper mode for this function. */
3907 mode = TYPE_MODE (TREE_TYPE (exp));
3908 if (GET_MODE (result) == mode)
3909 return result;
3910 if (target == 0)
3911 return convert_to_mode (mode, result, 0);
3912 convert_move (target, result, 0);
3913 return target;
3914 }
3915
3916 /* Expand the library call ourselves using a stabilized argument
3917 list to avoid re-evaluating the function's arguments twice. */
3918 #ifdef HAVE_cmpstrnsi
3919 do_libcall:
3920 #endif
3921 fndecl = get_callee_fndecl (exp);
3922 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3923 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3924 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3925 return expand_call (fn, target, target == const0_rtx);
3926 }
3927 #endif
3928 return NULL_RTX;
3929 }
3930
3931 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3932 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3933 the result in TARGET, if convenient. */
3934
3935 static rtx
3936 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3937 ATTRIBUTE_UNUSED enum machine_mode mode)
3938 {
3939 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3940
3941 if (!validate_arglist (exp,
3942 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3943 return NULL_RTX;
3944
3945 /* If c_strlen can determine an expression for one of the string
3946 lengths, and it doesn't have side effects, then emit cmpstrnsi
3947 using length MIN(strlen(string)+1, arg3). */
3948 #ifdef HAVE_cmpstrnsi
3949 if (HAVE_cmpstrnsi)
3950 {
3951 tree len, len1, len2;
3952 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3953 rtx result, insn;
3954 tree fndecl, fn;
3955 tree arg1 = CALL_EXPR_ARG (exp, 0);
3956 tree arg2 = CALL_EXPR_ARG (exp, 1);
3957 tree arg3 = CALL_EXPR_ARG (exp, 2);
3958
3959 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3960 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3961 enum machine_mode insn_mode
3962 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3963
3964 len1 = c_strlen (arg1, 1);
3965 len2 = c_strlen (arg2, 1);
3966
3967 if (len1)
3968 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3969 if (len2)
3970 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3971
3972 /* If we don't have a constant length for the first, use the length
3973 of the second, if we know it. We don't require a constant for
3974 this case; some cost analysis could be done if both are available
3975 but neither is constant. For now, assume they're equally cheap,
3976 unless one has side effects. If both strings have constant lengths,
3977 use the smaller. */
3978
3979 if (!len1)
3980 len = len2;
3981 else if (!len2)
3982 len = len1;
3983 else if (TREE_SIDE_EFFECTS (len1))
3984 len = len2;
3985 else if (TREE_SIDE_EFFECTS (len2))
3986 len = len1;
3987 else if (TREE_CODE (len1) != INTEGER_CST)
3988 len = len2;
3989 else if (TREE_CODE (len2) != INTEGER_CST)
3990 len = len1;
3991 else if (tree_int_cst_lt (len1, len2))
3992 len = len1;
3993 else
3994 len = len2;
3995
3996 /* If both arguments have side effects, we cannot optimize. */
3997 if (!len || TREE_SIDE_EFFECTS (len))
3998 return NULL_RTX;
3999
4000 /* The actual new length parameter is MIN(len,arg3). */
4001 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4002 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4003
4004 /* If we don't have POINTER_TYPE, call the function. */
4005 if (arg1_align == 0 || arg2_align == 0)
4006 return NULL_RTX;
4007
4008 /* Make a place to write the result of the instruction. */
4009 result = target;
4010 if (! (result != 0
4011 && REG_P (result) && GET_MODE (result) == insn_mode
4012 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4013 result = gen_reg_rtx (insn_mode);
4014
4015 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4016 arg1 = builtin_save_expr (arg1);
4017 arg2 = builtin_save_expr (arg2);
4018 len = builtin_save_expr (len);
4019
4020 arg1_rtx = get_memory_rtx (arg1, len);
4021 arg2_rtx = get_memory_rtx (arg2, len);
4022 arg3_rtx = expand_normal (len);
4023 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4024 GEN_INT (MIN (arg1_align, arg2_align)));
4025 if (insn)
4026 {
4027 emit_insn (insn);
4028
4029 /* Return the value in the proper mode for this function. */
4030 mode = TYPE_MODE (TREE_TYPE (exp));
4031 if (GET_MODE (result) == mode)
4032 return result;
4033 if (target == 0)
4034 return convert_to_mode (mode, result, 0);
4035 convert_move (target, result, 0);
4036 return target;
4037 }
4038
4039 /* Expand the library call ourselves using a stabilized argument
4040 list to avoid re-evaluating the function's arguments twice. */
4041 fndecl = get_callee_fndecl (exp);
4042 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4043 arg1, arg2, len);
4044 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4045 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4046 return expand_call (fn, target, target == const0_rtx);
4047 }
4048 #endif
4049 return NULL_RTX;
4050 }
4051
4052 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4053 if that's convenient. */
4054
4055 rtx
4056 expand_builtin_saveregs (void)
4057 {
4058 rtx val, seq;
4059
4060 /* Don't do __builtin_saveregs more than once in a function.
4061 Save the result of the first call and reuse it. */
4062 if (saveregs_value != 0)
4063 return saveregs_value;
4064
4065 /* When this function is called, it means that registers must be
4066 saved on entry to this function. So we migrate the call to the
4067 first insn of this function. */
4068
4069 start_sequence ();
4070
4071 /* Do whatever the machine needs done in this case. */
4072 val = targetm.calls.expand_builtin_saveregs ();
4073
4074 seq = get_insns ();
4075 end_sequence ();
4076
4077 saveregs_value = val;
4078
4079 /* Put the insns after the NOTE that starts the function. If this
4080 is inside a start_sequence, make the outer-level insn chain current, so
4081 the code is placed at the start of the function. */
4082 push_topmost_sequence ();
4083 emit_insn_after (seq, entry_of_function ());
4084 pop_topmost_sequence ();
4085
4086 return val;
4087 }
4088
4089 /* Expand a call to __builtin_next_arg. */
4090
4091 static rtx
4092 expand_builtin_next_arg (void)
4093 {
4094 /* Checking arguments is already done in fold_builtin_next_arg
4095 that must be called before this function. */
4096 return expand_binop (ptr_mode, add_optab,
4097 crtl->args.internal_arg_pointer,
4098 crtl->args.arg_offset_rtx,
4099 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4100 }
4101
4102 /* Make it easier for the backends by protecting the valist argument
4103 from multiple evaluations. */
4104
4105 static tree
4106 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4107 {
4108 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4109
4110 /* The current way of determining the type of valist is completely
4111 bogus. We should have the information on the va builtin instead. */
4112 if (!vatype)
4113 vatype = targetm.fn_abi_va_list (cfun->decl);
4114
4115 if (TREE_CODE (vatype) == ARRAY_TYPE)
4116 {
4117 if (TREE_SIDE_EFFECTS (valist))
4118 valist = save_expr (valist);
4119
4120 /* For this case, the backends will be expecting a pointer to
4121 vatype, but it's possible we've actually been given an array
4122 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4123 So fix it. */
4124 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4125 {
4126 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4127 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4128 }
4129 }
4130 else
4131 {
4132 tree pt = build_pointer_type (vatype);
4133
4134 if (! needs_lvalue)
4135 {
4136 if (! TREE_SIDE_EFFECTS (valist))
4137 return valist;
4138
4139 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4140 TREE_SIDE_EFFECTS (valist) = 1;
4141 }
4142
4143 if (TREE_SIDE_EFFECTS (valist))
4144 valist = save_expr (valist);
4145 valist = fold_build2_loc (loc, MEM_REF,
4146 vatype, valist, build_int_cst (pt, 0));
4147 }
4148
4149 return valist;
4150 }
4151
4152 /* The "standard" definition of va_list is void*. */
4153
4154 tree
4155 std_build_builtin_va_list (void)
4156 {
4157 return ptr_type_node;
4158 }
4159
4160 /* The "standard" abi va_list is va_list_type_node. */
4161
4162 tree
4163 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4164 {
4165 return va_list_type_node;
4166 }
4167
4168 /* The "standard" type of va_list is va_list_type_node. */
4169
4170 tree
4171 std_canonical_va_list_type (tree type)
4172 {
4173 tree wtype, htype;
4174
4175 if (INDIRECT_REF_P (type))
4176 type = TREE_TYPE (type);
4177 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4178 type = TREE_TYPE (type);
4179 wtype = va_list_type_node;
4180 htype = type;
4181 /* Treat structure va_list types. */
4182 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4183 htype = TREE_TYPE (htype);
4184 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4185 {
4186 /* If va_list is an array type, the argument may have decayed
4187 to a pointer type, e.g. by being passed to another function.
4188 In that case, unwrap both types so that we can compare the
4189 underlying records. */
4190 if (TREE_CODE (htype) == ARRAY_TYPE
4191 || POINTER_TYPE_P (htype))
4192 {
4193 wtype = TREE_TYPE (wtype);
4194 htype = TREE_TYPE (htype);
4195 }
4196 }
4197 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4198 return va_list_type_node;
4199
4200 return NULL_TREE;
4201 }
4202
4203 /* The "standard" implementation of va_start: just assign `nextarg' to
4204 the variable. */
4205
4206 void
4207 std_expand_builtin_va_start (tree valist, rtx nextarg)
4208 {
4209 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4210 convert_move (va_r, nextarg, 0);
4211 }
4212
4213 /* Expand EXP, a call to __builtin_va_start. */
4214
4215 static rtx
4216 expand_builtin_va_start (tree exp)
4217 {
4218 rtx nextarg;
4219 tree valist;
4220 location_t loc = EXPR_LOCATION (exp);
4221
4222 if (call_expr_nargs (exp) < 2)
4223 {
4224 error_at (loc, "too few arguments to function %<va_start%>");
4225 return const0_rtx;
4226 }
4227
4228 if (fold_builtin_next_arg (exp, true))
4229 return const0_rtx;
4230
4231 nextarg = expand_builtin_next_arg ();
4232 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4233
4234 if (targetm.expand_builtin_va_start)
4235 targetm.expand_builtin_va_start (valist, nextarg);
4236 else
4237 std_expand_builtin_va_start (valist, nextarg);
4238
4239 return const0_rtx;
4240 }
4241
4242 /* Expand EXP, a call to __builtin_va_end. */
4243
4244 static rtx
4245 expand_builtin_va_end (tree exp)
4246 {
4247 tree valist = CALL_EXPR_ARG (exp, 0);
4248
4249 /* Evaluate for side effects, if needed. I hate macros that don't
4250 do that. */
4251 if (TREE_SIDE_EFFECTS (valist))
4252 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4253
4254 return const0_rtx;
4255 }
4256
4257 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4258 builtin rather than just as an assignment in stdarg.h because of the
4259 nastiness of array-type va_list types. */
4260
4261 static rtx
4262 expand_builtin_va_copy (tree exp)
4263 {
4264 tree dst, src, t;
4265 location_t loc = EXPR_LOCATION (exp);
4266
4267 dst = CALL_EXPR_ARG (exp, 0);
4268 src = CALL_EXPR_ARG (exp, 1);
4269
4270 dst = stabilize_va_list_loc (loc, dst, 1);
4271 src = stabilize_va_list_loc (loc, src, 0);
4272
4273 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4274
4275 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4276 {
4277 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4278 TREE_SIDE_EFFECTS (t) = 1;
4279 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4280 }
4281 else
4282 {
4283 rtx dstb, srcb, size;
4284
4285 /* Evaluate to pointers. */
4286 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4287 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4288 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4289 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4290
4291 dstb = convert_memory_address (Pmode, dstb);
4292 srcb = convert_memory_address (Pmode, srcb);
4293
4294 /* "Dereference" to BLKmode memories. */
4295 dstb = gen_rtx_MEM (BLKmode, dstb);
4296 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4297 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4298 srcb = gen_rtx_MEM (BLKmode, srcb);
4299 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4300 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4301
4302 /* Copy. */
4303 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4304 }
4305
4306 return const0_rtx;
4307 }
4308
4309 /* Expand a call to one of the builtin functions __builtin_frame_address or
4310 __builtin_return_address. */
4311
4312 static rtx
4313 expand_builtin_frame_address (tree fndecl, tree exp)
4314 {
4315 /* The argument must be a nonnegative integer constant.
4316 It counts the number of frames to scan up the stack.
4317 The value is the return address saved in that frame. */
4318 if (call_expr_nargs (exp) == 0)
4319 /* Warning about missing arg was already issued. */
4320 return const0_rtx;
4321 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4322 {
4323 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4324 error ("invalid argument to %<__builtin_frame_address%>");
4325 else
4326 error ("invalid argument to %<__builtin_return_address%>");
4327 return const0_rtx;
4328 }
4329 else
4330 {
4331 rtx tem
4332 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4333 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4334
4335 /* Some ports cannot access arbitrary stack frames. */
4336 if (tem == NULL)
4337 {
4338 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4339 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4340 else
4341 warning (0, "unsupported argument to %<__builtin_return_address%>");
4342 return const0_rtx;
4343 }
4344
4345 /* For __builtin_frame_address, return what we've got. */
4346 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4347 return tem;
4348
4349 if (!REG_P (tem)
4350 && ! CONSTANT_P (tem))
4351 tem = copy_addr_to_reg (tem);
4352 return tem;
4353 }
4354 }
4355
4356 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4357 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4358 is the same as for allocate_dynamic_stack_space. */
4359
4360 static rtx
4361 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4362 {
4363 rtx op0;
4364 rtx result;
4365 bool valid_arglist;
4366 unsigned int align;
4367 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4368 == BUILT_IN_ALLOCA_WITH_ALIGN);
4369
4370 valid_arglist
4371 = (alloca_with_align
4372 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4373 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4374
4375 if (!valid_arglist)
4376 return NULL_RTX;
4377
4378 /* Compute the argument. */
4379 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4380
4381 /* Compute the alignment. */
4382 align = (alloca_with_align
4383 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4384 : BIGGEST_ALIGNMENT);
4385
4386 /* Allocate the desired space. */
4387 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4388 result = convert_memory_address (ptr_mode, result);
4389
4390 return result;
4391 }
4392
4393 /* Expand a call to bswap builtin in EXP.
4394 Return NULL_RTX if a normal call should be emitted rather than expanding the
4395 function in-line. If convenient, the result should be placed in TARGET.
4396 SUBTARGET may be used as the target for computing one of EXP's operands. */
4397
4398 static rtx
4399 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4400 rtx subtarget)
4401 {
4402 tree arg;
4403 rtx op0;
4404
4405 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4406 return NULL_RTX;
4407
4408 arg = CALL_EXPR_ARG (exp, 0);
4409 op0 = expand_expr (arg,
4410 subtarget && GET_MODE (subtarget) == target_mode
4411 ? subtarget : NULL_RTX,
4412 target_mode, EXPAND_NORMAL);
4413 if (GET_MODE (op0) != target_mode)
4414 op0 = convert_to_mode (target_mode, op0, 1);
4415
4416 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4417
4418 gcc_assert (target);
4419
4420 return convert_to_mode (target_mode, target, 1);
4421 }
4422
4423 /* Expand a call to a unary builtin in EXP.
4424 Return NULL_RTX if a normal call should be emitted rather than expanding the
4425 function in-line. If convenient, the result should be placed in TARGET.
4426 SUBTARGET may be used as the target for computing one of EXP's operands. */
4427
4428 static rtx
4429 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4430 rtx subtarget, optab op_optab)
4431 {
4432 rtx op0;
4433
4434 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4435 return NULL_RTX;
4436
4437 /* Compute the argument. */
4438 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4439 (subtarget
4440 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4441 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4442 VOIDmode, EXPAND_NORMAL);
4443 /* Compute op, into TARGET if possible.
4444 Set TARGET to wherever the result comes back. */
4445 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4446 op_optab, op0, target, op_optab != clrsb_optab);
4447 gcc_assert (target);
4448
4449 return convert_to_mode (target_mode, target, 0);
4450 }
4451
4452 /* Expand a call to __builtin_expect. We just return our argument
4453 as the builtin_expect semantic should've been already executed by
4454 tree branch prediction pass. */
4455
4456 static rtx
4457 expand_builtin_expect (tree exp, rtx target)
4458 {
4459 tree arg;
4460
4461 if (call_expr_nargs (exp) < 2)
4462 return const0_rtx;
4463 arg = CALL_EXPR_ARG (exp, 0);
4464
4465 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4466 /* When guessing was done, the hints should be already stripped away. */
4467 gcc_assert (!flag_guess_branch_prob
4468 || optimize == 0 || seen_error ());
4469 return target;
4470 }
4471
4472 /* Expand a call to __builtin_assume_aligned. We just return our first
4473 argument as the builtin_assume_aligned semantic should've been already
4474 executed by CCP. */
4475
4476 static rtx
4477 expand_builtin_assume_aligned (tree exp, rtx target)
4478 {
4479 if (call_expr_nargs (exp) < 2)
4480 return const0_rtx;
4481 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4482 EXPAND_NORMAL);
4483 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4484 && (call_expr_nargs (exp) < 3
4485 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4486 return target;
4487 }
4488
4489 void
4490 expand_builtin_trap (void)
4491 {
4492 #ifdef HAVE_trap
4493 if (HAVE_trap)
4494 {
4495 rtx insn = emit_insn (gen_trap ());
4496 /* For trap insns when not accumulating outgoing args force
4497 REG_ARGS_SIZE note to prevent crossjumping of calls with
4498 different args sizes. */
4499 if (!ACCUMULATE_OUTGOING_ARGS)
4500 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4501 }
4502 else
4503 #endif
4504 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4505 emit_barrier ();
4506 }
4507
4508 /* Expand a call to __builtin_unreachable. We do nothing except emit
4509 a barrier saying that control flow will not pass here.
4510
4511 It is the responsibility of the program being compiled to ensure
4512 that control flow does never reach __builtin_unreachable. */
4513 static void
4514 expand_builtin_unreachable (void)
4515 {
4516 emit_barrier ();
4517 }
4518
4519 /* Expand EXP, a call to fabs, fabsf or fabsl.
4520 Return NULL_RTX if a normal call should be emitted rather than expanding
4521 the function inline. If convenient, the result should be placed
4522 in TARGET. SUBTARGET may be used as the target for computing
4523 the operand. */
4524
4525 static rtx
4526 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4527 {
4528 enum machine_mode mode;
4529 tree arg;
4530 rtx op0;
4531
4532 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4533 return NULL_RTX;
4534
4535 arg = CALL_EXPR_ARG (exp, 0);
4536 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4537 mode = TYPE_MODE (TREE_TYPE (arg));
4538 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4539 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4540 }
4541
4542 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4543 Return NULL is a normal call should be emitted rather than expanding the
4544 function inline. If convenient, the result should be placed in TARGET.
4545 SUBTARGET may be used as the target for computing the operand. */
4546
4547 static rtx
4548 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4549 {
4550 rtx op0, op1;
4551 tree arg;
4552
4553 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4554 return NULL_RTX;
4555
4556 arg = CALL_EXPR_ARG (exp, 0);
4557 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4558
4559 arg = CALL_EXPR_ARG (exp, 1);
4560 op1 = expand_normal (arg);
4561
4562 return expand_copysign (op0, op1, target);
4563 }
4564
4565 /* Create a new constant string literal and return a char* pointer to it.
4566 The STRING_CST value is the LEN characters at STR. */
4567 tree
4568 build_string_literal (int len, const char *str)
4569 {
4570 tree t, elem, index, type;
4571
4572 t = build_string (len, str);
4573 elem = build_type_variant (char_type_node, 1, 0);
4574 index = build_index_type (size_int (len - 1));
4575 type = build_array_type (elem, index);
4576 TREE_TYPE (t) = type;
4577 TREE_CONSTANT (t) = 1;
4578 TREE_READONLY (t) = 1;
4579 TREE_STATIC (t) = 1;
4580
4581 type = build_pointer_type (elem);
4582 t = build1 (ADDR_EXPR, type,
4583 build4 (ARRAY_REF, elem,
4584 t, integer_zero_node, NULL_TREE, NULL_TREE));
4585 return t;
4586 }
4587
4588 /* Expand a call to __builtin___clear_cache. */
4589
4590 static rtx
4591 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4592 {
4593 #ifndef HAVE_clear_cache
4594 #ifdef CLEAR_INSN_CACHE
4595 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4596 does something. Just do the default expansion to a call to
4597 __clear_cache(). */
4598 return NULL_RTX;
4599 #else
4600 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4601 does nothing. There is no need to call it. Do nothing. */
4602 return const0_rtx;
4603 #endif /* CLEAR_INSN_CACHE */
4604 #else
4605 /* We have a "clear_cache" insn, and it will handle everything. */
4606 tree begin, end;
4607 rtx begin_rtx, end_rtx;
4608
4609 /* We must not expand to a library call. If we did, any
4610 fallback library function in libgcc that might contain a call to
4611 __builtin___clear_cache() would recurse infinitely. */
4612 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4613 {
4614 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4615 return const0_rtx;
4616 }
4617
4618 if (HAVE_clear_cache)
4619 {
4620 struct expand_operand ops[2];
4621
4622 begin = CALL_EXPR_ARG (exp, 0);
4623 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4624
4625 end = CALL_EXPR_ARG (exp, 1);
4626 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4627
4628 create_address_operand (&ops[0], begin_rtx);
4629 create_address_operand (&ops[1], end_rtx);
4630 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4631 return const0_rtx;
4632 }
4633 return const0_rtx;
4634 #endif /* HAVE_clear_cache */
4635 }
4636
4637 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4638
4639 static rtx
4640 round_trampoline_addr (rtx tramp)
4641 {
4642 rtx temp, addend, mask;
4643
4644 /* If we don't need too much alignment, we'll have been guaranteed
4645 proper alignment by get_trampoline_type. */
4646 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4647 return tramp;
4648
4649 /* Round address up to desired boundary. */
4650 temp = gen_reg_rtx (Pmode);
4651 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4652 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4653
4654 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4655 temp, 0, OPTAB_LIB_WIDEN);
4656 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4657 temp, 0, OPTAB_LIB_WIDEN);
4658
4659 return tramp;
4660 }
4661
4662 static rtx
4663 expand_builtin_init_trampoline (tree exp, bool onstack)
4664 {
4665 tree t_tramp, t_func, t_chain;
4666 rtx m_tramp, r_tramp, r_chain, tmp;
4667
4668 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4669 POINTER_TYPE, VOID_TYPE))
4670 return NULL_RTX;
4671
4672 t_tramp = CALL_EXPR_ARG (exp, 0);
4673 t_func = CALL_EXPR_ARG (exp, 1);
4674 t_chain = CALL_EXPR_ARG (exp, 2);
4675
4676 r_tramp = expand_normal (t_tramp);
4677 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4678 MEM_NOTRAP_P (m_tramp) = 1;
4679
4680 /* If ONSTACK, the TRAMP argument should be the address of a field
4681 within the local function's FRAME decl. Either way, let's see if
4682 we can fill in the MEM_ATTRs for this memory. */
4683 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4684 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4685
4686 /* Creator of a heap trampoline is responsible for making sure the
4687 address is aligned to at least STACK_BOUNDARY. Normally malloc
4688 will ensure this anyhow. */
4689 tmp = round_trampoline_addr (r_tramp);
4690 if (tmp != r_tramp)
4691 {
4692 m_tramp = change_address (m_tramp, BLKmode, tmp);
4693 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4694 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4695 }
4696
4697 /* The FUNC argument should be the address of the nested function.
4698 Extract the actual function decl to pass to the hook. */
4699 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4700 t_func = TREE_OPERAND (t_func, 0);
4701 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4702
4703 r_chain = expand_normal (t_chain);
4704
4705 /* Generate insns to initialize the trampoline. */
4706 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4707
4708 if (onstack)
4709 {
4710 trampolines_created = 1;
4711
4712 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4713 "trampoline generated for nested function %qD", t_func);
4714 }
4715
4716 return const0_rtx;
4717 }
4718
4719 static rtx
4720 expand_builtin_adjust_trampoline (tree exp)
4721 {
4722 rtx tramp;
4723
4724 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4725 return NULL_RTX;
4726
4727 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4728 tramp = round_trampoline_addr (tramp);
4729 if (targetm.calls.trampoline_adjust_address)
4730 tramp = targetm.calls.trampoline_adjust_address (tramp);
4731
4732 return tramp;
4733 }
4734
4735 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4736 function. The function first checks whether the back end provides
4737 an insn to implement signbit for the respective mode. If not, it
4738 checks whether the floating point format of the value is such that
4739 the sign bit can be extracted. If that is not the case, the
4740 function returns NULL_RTX to indicate that a normal call should be
4741 emitted rather than expanding the function in-line. EXP is the
4742 expression that is a call to the builtin function; if convenient,
4743 the result should be placed in TARGET. */
4744 static rtx
4745 expand_builtin_signbit (tree exp, rtx target)
4746 {
4747 const struct real_format *fmt;
4748 enum machine_mode fmode, imode, rmode;
4749 tree arg;
4750 int word, bitpos;
4751 enum insn_code icode;
4752 rtx temp;
4753 location_t loc = EXPR_LOCATION (exp);
4754
4755 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4756 return NULL_RTX;
4757
4758 arg = CALL_EXPR_ARG (exp, 0);
4759 fmode = TYPE_MODE (TREE_TYPE (arg));
4760 rmode = TYPE_MODE (TREE_TYPE (exp));
4761 fmt = REAL_MODE_FORMAT (fmode);
4762
4763 arg = builtin_save_expr (arg);
4764
4765 /* Expand the argument yielding a RTX expression. */
4766 temp = expand_normal (arg);
4767
4768 /* Check if the back end provides an insn that handles signbit for the
4769 argument's mode. */
4770 icode = optab_handler (signbit_optab, fmode);
4771 if (icode != CODE_FOR_nothing)
4772 {
4773 rtx last = get_last_insn ();
4774 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4775 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4776 return target;
4777 delete_insns_since (last);
4778 }
4779
4780 /* For floating point formats without a sign bit, implement signbit
4781 as "ARG < 0.0". */
4782 bitpos = fmt->signbit_ro;
4783 if (bitpos < 0)
4784 {
4785 /* But we can't do this if the format supports signed zero. */
4786 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4787 return NULL_RTX;
4788
4789 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4790 build_real (TREE_TYPE (arg), dconst0));
4791 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4792 }
4793
4794 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4795 {
4796 imode = int_mode_for_mode (fmode);
4797 if (imode == BLKmode)
4798 return NULL_RTX;
4799 temp = gen_lowpart (imode, temp);
4800 }
4801 else
4802 {
4803 imode = word_mode;
4804 /* Handle targets with different FP word orders. */
4805 if (FLOAT_WORDS_BIG_ENDIAN)
4806 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4807 else
4808 word = bitpos / BITS_PER_WORD;
4809 temp = operand_subword_force (temp, word, fmode);
4810 bitpos = bitpos % BITS_PER_WORD;
4811 }
4812
4813 /* Force the intermediate word_mode (or narrower) result into a
4814 register. This avoids attempting to create paradoxical SUBREGs
4815 of floating point modes below. */
4816 temp = force_reg (imode, temp);
4817
4818 /* If the bitpos is within the "result mode" lowpart, the operation
4819 can be implement with a single bitwise AND. Otherwise, we need
4820 a right shift and an AND. */
4821
4822 if (bitpos < GET_MODE_BITSIZE (rmode))
4823 {
4824 double_int mask = double_int_zero.set_bit (bitpos);
4825
4826 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4827 temp = gen_lowpart (rmode, temp);
4828 temp = expand_binop (rmode, and_optab, temp,
4829 immed_double_int_const (mask, rmode),
4830 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4831 }
4832 else
4833 {
4834 /* Perform a logical right shift to place the signbit in the least
4835 significant bit, then truncate the result to the desired mode
4836 and mask just this bit. */
4837 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4838 temp = gen_lowpart (rmode, temp);
4839 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4840 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4841 }
4842
4843 return temp;
4844 }
4845
4846 /* Expand fork or exec calls. TARGET is the desired target of the
4847 call. EXP is the call. FN is the
4848 identificator of the actual function. IGNORE is nonzero if the
4849 value is to be ignored. */
4850
4851 static rtx
4852 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4853 {
4854 tree id, decl;
4855 tree call;
4856
4857 /* If we are not profiling, just call the function. */
4858 if (!profile_arc_flag)
4859 return NULL_RTX;
4860
4861 /* Otherwise call the wrapper. This should be equivalent for the rest of
4862 compiler, so the code does not diverge, and the wrapper may run the
4863 code necessary for keeping the profiling sane. */
4864
4865 switch (DECL_FUNCTION_CODE (fn))
4866 {
4867 case BUILT_IN_FORK:
4868 id = get_identifier ("__gcov_fork");
4869 break;
4870
4871 case BUILT_IN_EXECL:
4872 id = get_identifier ("__gcov_execl");
4873 break;
4874
4875 case BUILT_IN_EXECV:
4876 id = get_identifier ("__gcov_execv");
4877 break;
4878
4879 case BUILT_IN_EXECLP:
4880 id = get_identifier ("__gcov_execlp");
4881 break;
4882
4883 case BUILT_IN_EXECLE:
4884 id = get_identifier ("__gcov_execle");
4885 break;
4886
4887 case BUILT_IN_EXECVP:
4888 id = get_identifier ("__gcov_execvp");
4889 break;
4890
4891 case BUILT_IN_EXECVE:
4892 id = get_identifier ("__gcov_execve");
4893 break;
4894
4895 default:
4896 gcc_unreachable ();
4897 }
4898
4899 decl = build_decl (DECL_SOURCE_LOCATION (fn),
4900 FUNCTION_DECL, id, TREE_TYPE (fn));
4901 DECL_EXTERNAL (decl) = 1;
4902 TREE_PUBLIC (decl) = 1;
4903 DECL_ARTIFICIAL (decl) = 1;
4904 TREE_NOTHROW (decl) = 1;
4905 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
4906 DECL_VISIBILITY_SPECIFIED (decl) = 1;
4907 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
4908 return expand_call (call, target, ignore);
4909 }
4910
4911
4912 \f
4913 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
4914 the pointer in these functions is void*, the tree optimizers may remove
4915 casts. The mode computed in expand_builtin isn't reliable either, due
4916 to __sync_bool_compare_and_swap.
4917
4918 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
4919 group of builtins. This gives us log2 of the mode size. */
4920
4921 static inline enum machine_mode
4922 get_builtin_sync_mode (int fcode_diff)
4923 {
4924 /* The size is not negotiable, so ask not to get BLKmode in return
4925 if the target indicates that a smaller size would be better. */
4926 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
4927 }
4928
4929 /* Expand the memory expression LOC and return the appropriate memory operand
4930 for the builtin_sync operations. */
4931
4932 static rtx
4933 get_builtin_sync_mem (tree loc, enum machine_mode mode)
4934 {
4935 rtx addr, mem;
4936
4937 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
4938 addr = convert_memory_address (Pmode, addr);
4939
4940 /* Note that we explicitly do not want any alias information for this
4941 memory, so that we kill all other live memories. Otherwise we don't
4942 satisfy the full barrier semantics of the intrinsic. */
4943 mem = validize_mem (gen_rtx_MEM (mode, addr));
4944
4945 /* The alignment needs to be at least according to that of the mode. */
4946 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
4947 get_pointer_alignment (loc)));
4948 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
4949 MEM_VOLATILE_P (mem) = 1;
4950
4951 return mem;
4952 }
4953
4954 /* Make sure an argument is in the right mode.
4955 EXP is the tree argument.
4956 MODE is the mode it should be in. */
4957
4958 static rtx
4959 expand_expr_force_mode (tree exp, enum machine_mode mode)
4960 {
4961 rtx val;
4962 enum machine_mode old_mode;
4963
4964 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
4965 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
4966 of CONST_INTs, where we know the old_mode only from the call argument. */
4967
4968 old_mode = GET_MODE (val);
4969 if (old_mode == VOIDmode)
4970 old_mode = TYPE_MODE (TREE_TYPE (exp));
4971 val = convert_modes (mode, old_mode, val, 1);
4972 return val;
4973 }
4974
4975
4976 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
4977 EXP is the CALL_EXPR. CODE is the rtx code
4978 that corresponds to the arithmetic or logical operation from the name;
4979 an exception here is that NOT actually means NAND. TARGET is an optional
4980 place for us to store the results; AFTER is true if this is the
4981 fetch_and_xxx form. */
4982
4983 static rtx
4984 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
4985 enum rtx_code code, bool after,
4986 rtx target)
4987 {
4988 rtx val, mem;
4989 location_t loc = EXPR_LOCATION (exp);
4990
4991 if (code == NOT && warn_sync_nand)
4992 {
4993 tree fndecl = get_callee_fndecl (exp);
4994 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4995
4996 static bool warned_f_a_n, warned_n_a_f;
4997
4998 switch (fcode)
4999 {
5000 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5001 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5002 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5003 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5004 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5005 if (warned_f_a_n)
5006 break;
5007
5008 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5009 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5010 warned_f_a_n = true;
5011 break;
5012
5013 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5014 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5015 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5016 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5017 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5018 if (warned_n_a_f)
5019 break;
5020
5021 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5022 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5023 warned_n_a_f = true;
5024 break;
5025
5026 default:
5027 gcc_unreachable ();
5028 }
5029 }
5030
5031 /* Expand the operands. */
5032 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5033 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5034
5035 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5036 after);
5037 }
5038
5039 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5040 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5041 true if this is the boolean form. TARGET is a place for us to store the
5042 results; this is NOT optional if IS_BOOL is true. */
5043
5044 static rtx
5045 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5046 bool is_bool, rtx target)
5047 {
5048 rtx old_val, new_val, mem;
5049 rtx *pbool, *poval;
5050
5051 /* Expand the operands. */
5052 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5053 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5054 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5055
5056 pbool = poval = NULL;
5057 if (target != const0_rtx)
5058 {
5059 if (is_bool)
5060 pbool = &target;
5061 else
5062 poval = &target;
5063 }
5064 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5065 false, MEMMODEL_SEQ_CST,
5066 MEMMODEL_SEQ_CST))
5067 return NULL_RTX;
5068
5069 return target;
5070 }
5071
5072 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5073 general form is actually an atomic exchange, and some targets only
5074 support a reduced form with the second argument being a constant 1.
5075 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5076 the results. */
5077
5078 static rtx
5079 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5080 rtx target)
5081 {
5082 rtx val, mem;
5083
5084 /* Expand the operands. */
5085 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5086 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5087
5088 return expand_sync_lock_test_and_set (target, mem, val);
5089 }
5090
5091 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5092
5093 static void
5094 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5095 {
5096 rtx mem;
5097
5098 /* Expand the operands. */
5099 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5100
5101 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5102 }
5103
5104 /* Given an integer representing an ``enum memmodel'', verify its
5105 correctness and return the memory model enum. */
5106
5107 static enum memmodel
5108 get_memmodel (tree exp)
5109 {
5110 rtx op;
5111 unsigned HOST_WIDE_INT val;
5112
5113 /* If the parameter is not a constant, it's a run time value so we'll just
5114 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5115 if (TREE_CODE (exp) != INTEGER_CST)
5116 return MEMMODEL_SEQ_CST;
5117
5118 op = expand_normal (exp);
5119
5120 val = INTVAL (op);
5121 if (targetm.memmodel_check)
5122 val = targetm.memmodel_check (val);
5123 else if (val & ~MEMMODEL_MASK)
5124 {
5125 warning (OPT_Winvalid_memory_model,
5126 "Unknown architecture specifier in memory model to builtin.");
5127 return MEMMODEL_SEQ_CST;
5128 }
5129
5130 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5131 {
5132 warning (OPT_Winvalid_memory_model,
5133 "invalid memory model argument to builtin");
5134 return MEMMODEL_SEQ_CST;
5135 }
5136
5137 return (enum memmodel) val;
5138 }
5139
5140 /* Expand the __atomic_exchange intrinsic:
5141 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5142 EXP is the CALL_EXPR.
5143 TARGET is an optional place for us to store the results. */
5144
5145 static rtx
5146 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5147 {
5148 rtx val, mem;
5149 enum memmodel model;
5150
5151 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5152 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5153 {
5154 error ("invalid memory model for %<__atomic_exchange%>");
5155 return NULL_RTX;
5156 }
5157
5158 if (!flag_inline_atomics)
5159 return NULL_RTX;
5160
5161 /* Expand the operands. */
5162 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5163 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5164
5165 return expand_atomic_exchange (target, mem, val, model);
5166 }
5167
5168 /* Expand the __atomic_compare_exchange intrinsic:
5169 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5170 TYPE desired, BOOL weak,
5171 enum memmodel success,
5172 enum memmodel failure)
5173 EXP is the CALL_EXPR.
5174 TARGET is an optional place for us to store the results. */
5175
5176 static rtx
5177 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5178 rtx target)
5179 {
5180 rtx expect, desired, mem, oldval;
5181 enum memmodel success, failure;
5182 tree weak;
5183 bool is_weak;
5184
5185 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5186 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5187
5188 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5189 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5190 {
5191 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5192 return NULL_RTX;
5193 }
5194
5195 if (failure > success)
5196 {
5197 error ("failure memory model cannot be stronger than success "
5198 "memory model for %<__atomic_compare_exchange%>");
5199 return NULL_RTX;
5200 }
5201
5202 if (!flag_inline_atomics)
5203 return NULL_RTX;
5204
5205 /* Expand the operands. */
5206 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5207
5208 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5209 expect = convert_memory_address (Pmode, expect);
5210 expect = gen_rtx_MEM (mode, expect);
5211 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5212
5213 weak = CALL_EXPR_ARG (exp, 3);
5214 is_weak = false;
5215 if (host_integerp (weak, 0) && tree_low_cst (weak, 0) != 0)
5216 is_weak = true;
5217
5218 oldval = expect;
5219 if (!expand_atomic_compare_and_swap ((target == const0_rtx ? NULL : &target),
5220 &oldval, mem, oldval, desired,
5221 is_weak, success, failure))
5222 return NULL_RTX;
5223
5224 if (oldval != expect)
5225 emit_move_insn (expect, oldval);
5226
5227 return target;
5228 }
5229
5230 /* Expand the __atomic_load intrinsic:
5231 TYPE __atomic_load (TYPE *object, enum memmodel)
5232 EXP is the CALL_EXPR.
5233 TARGET is an optional place for us to store the results. */
5234
5235 static rtx
5236 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5237 {
5238 rtx mem;
5239 enum memmodel model;
5240
5241 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5242 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5243 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5244 {
5245 error ("invalid memory model for %<__atomic_load%>");
5246 return NULL_RTX;
5247 }
5248
5249 if (!flag_inline_atomics)
5250 return NULL_RTX;
5251
5252 /* Expand the operand. */
5253 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5254
5255 return expand_atomic_load (target, mem, model);
5256 }
5257
5258
5259 /* Expand the __atomic_store intrinsic:
5260 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5261 EXP is the CALL_EXPR.
5262 TARGET is an optional place for us to store the results. */
5263
5264 static rtx
5265 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5266 {
5267 rtx mem, val;
5268 enum memmodel model;
5269
5270 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5271 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5272 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5273 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5274 {
5275 error ("invalid memory model for %<__atomic_store%>");
5276 return NULL_RTX;
5277 }
5278
5279 if (!flag_inline_atomics)
5280 return NULL_RTX;
5281
5282 /* Expand the operands. */
5283 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5284 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5285
5286 return expand_atomic_store (mem, val, model, false);
5287 }
5288
5289 /* Expand the __atomic_fetch_XXX intrinsic:
5290 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5291 EXP is the CALL_EXPR.
5292 TARGET is an optional place for us to store the results.
5293 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5294 FETCH_AFTER is true if returning the result of the operation.
5295 FETCH_AFTER is false if returning the value before the operation.
5296 IGNORE is true if the result is not used.
5297 EXT_CALL is the correct builtin for an external call if this cannot be
5298 resolved to an instruction sequence. */
5299
5300 static rtx
5301 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5302 enum rtx_code code, bool fetch_after,
5303 bool ignore, enum built_in_function ext_call)
5304 {
5305 rtx val, mem, ret;
5306 enum memmodel model;
5307 tree fndecl;
5308 tree addr;
5309
5310 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5311
5312 /* Expand the operands. */
5313 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5314 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5315
5316 /* Only try generating instructions if inlining is turned on. */
5317 if (flag_inline_atomics)
5318 {
5319 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5320 if (ret)
5321 return ret;
5322 }
5323
5324 /* Return if a different routine isn't needed for the library call. */
5325 if (ext_call == BUILT_IN_NONE)
5326 return NULL_RTX;
5327
5328 /* Change the call to the specified function. */
5329 fndecl = get_callee_fndecl (exp);
5330 addr = CALL_EXPR_FN (exp);
5331 STRIP_NOPS (addr);
5332
5333 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5334 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5335
5336 /* Expand the call here so we can emit trailing code. */
5337 ret = expand_call (exp, target, ignore);
5338
5339 /* Replace the original function just in case it matters. */
5340 TREE_OPERAND (addr, 0) = fndecl;
5341
5342 /* Then issue the arithmetic correction to return the right result. */
5343 if (!ignore)
5344 {
5345 if (code == NOT)
5346 {
5347 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5348 OPTAB_LIB_WIDEN);
5349 ret = expand_simple_unop (mode, NOT, ret, target, true);
5350 }
5351 else
5352 ret = expand_simple_binop (mode, code, ret, val, target, true,
5353 OPTAB_LIB_WIDEN);
5354 }
5355 return ret;
5356 }
5357
5358
5359 #ifndef HAVE_atomic_clear
5360 # define HAVE_atomic_clear 0
5361 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5362 #endif
5363
5364 /* Expand an atomic clear operation.
5365 void _atomic_clear (BOOL *obj, enum memmodel)
5366 EXP is the call expression. */
5367
5368 static rtx
5369 expand_builtin_atomic_clear (tree exp)
5370 {
5371 enum machine_mode mode;
5372 rtx mem, ret;
5373 enum memmodel model;
5374
5375 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5376 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5377 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5378
5379 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5380 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5381 {
5382 error ("invalid memory model for %<__atomic_store%>");
5383 return const0_rtx;
5384 }
5385
5386 if (HAVE_atomic_clear)
5387 {
5388 emit_insn (gen_atomic_clear (mem, model));
5389 return const0_rtx;
5390 }
5391
5392 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5393 Failing that, a store is issued by __atomic_store. The only way this can
5394 fail is if the bool type is larger than a word size. Unlikely, but
5395 handle it anyway for completeness. Assume a single threaded model since
5396 there is no atomic support in this case, and no barriers are required. */
5397 ret = expand_atomic_store (mem, const0_rtx, model, true);
5398 if (!ret)
5399 emit_move_insn (mem, const0_rtx);
5400 return const0_rtx;
5401 }
5402
5403 /* Expand an atomic test_and_set operation.
5404 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5405 EXP is the call expression. */
5406
5407 static rtx
5408 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5409 {
5410 rtx mem;
5411 enum memmodel model;
5412 enum machine_mode mode;
5413
5414 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5415 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5416 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5417
5418 return expand_atomic_test_and_set (target, mem, model);
5419 }
5420
5421
5422 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5423 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5424
5425 static tree
5426 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5427 {
5428 int size;
5429 enum machine_mode mode;
5430 unsigned int mode_align, type_align;
5431
5432 if (TREE_CODE (arg0) != INTEGER_CST)
5433 return NULL_TREE;
5434
5435 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5436 mode = mode_for_size (size, MODE_INT, 0);
5437 mode_align = GET_MODE_ALIGNMENT (mode);
5438
5439 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5440 type_align = mode_align;
5441 else
5442 {
5443 tree ttype = TREE_TYPE (arg1);
5444
5445 /* This function is usually invoked and folded immediately by the front
5446 end before anything else has a chance to look at it. The pointer
5447 parameter at this point is usually cast to a void *, so check for that
5448 and look past the cast. */
5449 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5450 && VOID_TYPE_P (TREE_TYPE (ttype)))
5451 arg1 = TREE_OPERAND (arg1, 0);
5452
5453 ttype = TREE_TYPE (arg1);
5454 gcc_assert (POINTER_TYPE_P (ttype));
5455
5456 /* Get the underlying type of the object. */
5457 ttype = TREE_TYPE (ttype);
5458 type_align = TYPE_ALIGN (ttype);
5459 }
5460
5461 /* If the object has smaller alignment, the the lock free routines cannot
5462 be used. */
5463 if (type_align < mode_align)
5464 return boolean_false_node;
5465
5466 /* Check if a compare_and_swap pattern exists for the mode which represents
5467 the required size. The pattern is not allowed to fail, so the existence
5468 of the pattern indicates support is present. */
5469 if (can_compare_and_swap_p (mode, true))
5470 return boolean_true_node;
5471 else
5472 return boolean_false_node;
5473 }
5474
5475 /* Return true if the parameters to call EXP represent an object which will
5476 always generate lock free instructions. The first argument represents the
5477 size of the object, and the second parameter is a pointer to the object
5478 itself. If NULL is passed for the object, then the result is based on
5479 typical alignment for an object of the specified size. Otherwise return
5480 false. */
5481
5482 static rtx
5483 expand_builtin_atomic_always_lock_free (tree exp)
5484 {
5485 tree size;
5486 tree arg0 = CALL_EXPR_ARG (exp, 0);
5487 tree arg1 = CALL_EXPR_ARG (exp, 1);
5488
5489 if (TREE_CODE (arg0) != INTEGER_CST)
5490 {
5491 error ("non-constant argument 1 to __atomic_always_lock_free");
5492 return const0_rtx;
5493 }
5494
5495 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5496 if (size == boolean_true_node)
5497 return const1_rtx;
5498 return const0_rtx;
5499 }
5500
5501 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5502 is lock free on this architecture. */
5503
5504 static tree
5505 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5506 {
5507 if (!flag_inline_atomics)
5508 return NULL_TREE;
5509
5510 /* If it isn't always lock free, don't generate a result. */
5511 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5512 return boolean_true_node;
5513
5514 return NULL_TREE;
5515 }
5516
5517 /* Return true if the parameters to call EXP represent an object which will
5518 always generate lock free instructions. The first argument represents the
5519 size of the object, and the second parameter is a pointer to the object
5520 itself. If NULL is passed for the object, then the result is based on
5521 typical alignment for an object of the specified size. Otherwise return
5522 NULL*/
5523
5524 static rtx
5525 expand_builtin_atomic_is_lock_free (tree exp)
5526 {
5527 tree size;
5528 tree arg0 = CALL_EXPR_ARG (exp, 0);
5529 tree arg1 = CALL_EXPR_ARG (exp, 1);
5530
5531 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5532 {
5533 error ("non-integer argument 1 to __atomic_is_lock_free");
5534 return NULL_RTX;
5535 }
5536
5537 if (!flag_inline_atomics)
5538 return NULL_RTX;
5539
5540 /* If the value is known at compile time, return the RTX for it. */
5541 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5542 if (size == boolean_true_node)
5543 return const1_rtx;
5544
5545 return NULL_RTX;
5546 }
5547
5548 /* Expand the __atomic_thread_fence intrinsic:
5549 void __atomic_thread_fence (enum memmodel)
5550 EXP is the CALL_EXPR. */
5551
5552 static void
5553 expand_builtin_atomic_thread_fence (tree exp)
5554 {
5555 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5556 expand_mem_thread_fence (model);
5557 }
5558
5559 /* Expand the __atomic_signal_fence intrinsic:
5560 void __atomic_signal_fence (enum memmodel)
5561 EXP is the CALL_EXPR. */
5562
5563 static void
5564 expand_builtin_atomic_signal_fence (tree exp)
5565 {
5566 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5567 expand_mem_signal_fence (model);
5568 }
5569
5570 /* Expand the __sync_synchronize intrinsic. */
5571
5572 static void
5573 expand_builtin_sync_synchronize (void)
5574 {
5575 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5576 }
5577
5578 static rtx
5579 expand_builtin_thread_pointer (tree exp, rtx target)
5580 {
5581 enum insn_code icode;
5582 if (!validate_arglist (exp, VOID_TYPE))
5583 return const0_rtx;
5584 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5585 if (icode != CODE_FOR_nothing)
5586 {
5587 struct expand_operand op;
5588 if (!REG_P (target) || GET_MODE (target) != Pmode)
5589 target = gen_reg_rtx (Pmode);
5590 create_output_operand (&op, target, Pmode);
5591 expand_insn (icode, 1, &op);
5592 return target;
5593 }
5594 error ("__builtin_thread_pointer is not supported on this target");
5595 return const0_rtx;
5596 }
5597
5598 static void
5599 expand_builtin_set_thread_pointer (tree exp)
5600 {
5601 enum insn_code icode;
5602 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5603 return;
5604 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5605 if (icode != CODE_FOR_nothing)
5606 {
5607 struct expand_operand op;
5608 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5609 Pmode, EXPAND_NORMAL);
5610 create_input_operand (&op, val, Pmode);
5611 expand_insn (icode, 1, &op);
5612 return;
5613 }
5614 error ("__builtin_set_thread_pointer is not supported on this target");
5615 }
5616
5617 \f
5618 /* Expand an expression EXP that calls a built-in function,
5619 with result going to TARGET if that's convenient
5620 (and in mode MODE if that's convenient).
5621 SUBTARGET may be used as the target for computing one of EXP's operands.
5622 IGNORE is nonzero if the value is to be ignored. */
5623
5624 rtx
5625 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5626 int ignore)
5627 {
5628 tree fndecl = get_callee_fndecl (exp);
5629 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5630 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5631 int flags;
5632
5633 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5634 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5635
5636 /* When not optimizing, generate calls to library functions for a certain
5637 set of builtins. */
5638 if (!optimize
5639 && !called_as_built_in (fndecl)
5640 && fcode != BUILT_IN_FORK
5641 && fcode != BUILT_IN_EXECL
5642 && fcode != BUILT_IN_EXECV
5643 && fcode != BUILT_IN_EXECLP
5644 && fcode != BUILT_IN_EXECLE
5645 && fcode != BUILT_IN_EXECVP
5646 && fcode != BUILT_IN_EXECVE
5647 && fcode != BUILT_IN_ALLOCA
5648 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5649 && fcode != BUILT_IN_FREE
5650 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5651 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5652 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5653 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5654 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5655 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5656 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5657 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5658 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5659 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5660 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND)
5661 return expand_call (exp, target, ignore);
5662
5663 /* The built-in function expanders test for target == const0_rtx
5664 to determine whether the function's result will be ignored. */
5665 if (ignore)
5666 target = const0_rtx;
5667
5668 /* If the result of a pure or const built-in function is ignored, and
5669 none of its arguments are volatile, we can avoid expanding the
5670 built-in call and just evaluate the arguments for side-effects. */
5671 if (target == const0_rtx
5672 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5673 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5674 {
5675 bool volatilep = false;
5676 tree arg;
5677 call_expr_arg_iterator iter;
5678
5679 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5680 if (TREE_THIS_VOLATILE (arg))
5681 {
5682 volatilep = true;
5683 break;
5684 }
5685
5686 if (! volatilep)
5687 {
5688 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5689 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5690 return const0_rtx;
5691 }
5692 }
5693
5694 switch (fcode)
5695 {
5696 CASE_FLT_FN (BUILT_IN_FABS):
5697 case BUILT_IN_FABSD32:
5698 case BUILT_IN_FABSD64:
5699 case BUILT_IN_FABSD128:
5700 target = expand_builtin_fabs (exp, target, subtarget);
5701 if (target)
5702 return target;
5703 break;
5704
5705 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5706 target = expand_builtin_copysign (exp, target, subtarget);
5707 if (target)
5708 return target;
5709 break;
5710
5711 /* Just do a normal library call if we were unable to fold
5712 the values. */
5713 CASE_FLT_FN (BUILT_IN_CABS):
5714 break;
5715
5716 CASE_FLT_FN (BUILT_IN_EXP):
5717 CASE_FLT_FN (BUILT_IN_EXP10):
5718 CASE_FLT_FN (BUILT_IN_POW10):
5719 CASE_FLT_FN (BUILT_IN_EXP2):
5720 CASE_FLT_FN (BUILT_IN_EXPM1):
5721 CASE_FLT_FN (BUILT_IN_LOGB):
5722 CASE_FLT_FN (BUILT_IN_LOG):
5723 CASE_FLT_FN (BUILT_IN_LOG10):
5724 CASE_FLT_FN (BUILT_IN_LOG2):
5725 CASE_FLT_FN (BUILT_IN_LOG1P):
5726 CASE_FLT_FN (BUILT_IN_TAN):
5727 CASE_FLT_FN (BUILT_IN_ASIN):
5728 CASE_FLT_FN (BUILT_IN_ACOS):
5729 CASE_FLT_FN (BUILT_IN_ATAN):
5730 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5731 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5732 because of possible accuracy problems. */
5733 if (! flag_unsafe_math_optimizations)
5734 break;
5735 CASE_FLT_FN (BUILT_IN_SQRT):
5736 CASE_FLT_FN (BUILT_IN_FLOOR):
5737 CASE_FLT_FN (BUILT_IN_CEIL):
5738 CASE_FLT_FN (BUILT_IN_TRUNC):
5739 CASE_FLT_FN (BUILT_IN_ROUND):
5740 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5741 CASE_FLT_FN (BUILT_IN_RINT):
5742 target = expand_builtin_mathfn (exp, target, subtarget);
5743 if (target)
5744 return target;
5745 break;
5746
5747 CASE_FLT_FN (BUILT_IN_FMA):
5748 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5749 if (target)
5750 return target;
5751 break;
5752
5753 CASE_FLT_FN (BUILT_IN_ILOGB):
5754 if (! flag_unsafe_math_optimizations)
5755 break;
5756 CASE_FLT_FN (BUILT_IN_ISINF):
5757 CASE_FLT_FN (BUILT_IN_FINITE):
5758 case BUILT_IN_ISFINITE:
5759 case BUILT_IN_ISNORMAL:
5760 target = expand_builtin_interclass_mathfn (exp, target);
5761 if (target)
5762 return target;
5763 break;
5764
5765 CASE_FLT_FN (BUILT_IN_ICEIL):
5766 CASE_FLT_FN (BUILT_IN_LCEIL):
5767 CASE_FLT_FN (BUILT_IN_LLCEIL):
5768 CASE_FLT_FN (BUILT_IN_LFLOOR):
5769 CASE_FLT_FN (BUILT_IN_IFLOOR):
5770 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5771 target = expand_builtin_int_roundingfn (exp, target);
5772 if (target)
5773 return target;
5774 break;
5775
5776 CASE_FLT_FN (BUILT_IN_IRINT):
5777 CASE_FLT_FN (BUILT_IN_LRINT):
5778 CASE_FLT_FN (BUILT_IN_LLRINT):
5779 CASE_FLT_FN (BUILT_IN_IROUND):
5780 CASE_FLT_FN (BUILT_IN_LROUND):
5781 CASE_FLT_FN (BUILT_IN_LLROUND):
5782 target = expand_builtin_int_roundingfn_2 (exp, target);
5783 if (target)
5784 return target;
5785 break;
5786
5787 CASE_FLT_FN (BUILT_IN_POWI):
5788 target = expand_builtin_powi (exp, target);
5789 if (target)
5790 return target;
5791 break;
5792
5793 CASE_FLT_FN (BUILT_IN_ATAN2):
5794 CASE_FLT_FN (BUILT_IN_LDEXP):
5795 CASE_FLT_FN (BUILT_IN_SCALB):
5796 CASE_FLT_FN (BUILT_IN_SCALBN):
5797 CASE_FLT_FN (BUILT_IN_SCALBLN):
5798 if (! flag_unsafe_math_optimizations)
5799 break;
5800
5801 CASE_FLT_FN (BUILT_IN_FMOD):
5802 CASE_FLT_FN (BUILT_IN_REMAINDER):
5803 CASE_FLT_FN (BUILT_IN_DREM):
5804 CASE_FLT_FN (BUILT_IN_POW):
5805 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5806 if (target)
5807 return target;
5808 break;
5809
5810 CASE_FLT_FN (BUILT_IN_CEXPI):
5811 target = expand_builtin_cexpi (exp, target);
5812 gcc_assert (target);
5813 return target;
5814
5815 CASE_FLT_FN (BUILT_IN_SIN):
5816 CASE_FLT_FN (BUILT_IN_COS):
5817 if (! flag_unsafe_math_optimizations)
5818 break;
5819 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5820 if (target)
5821 return target;
5822 break;
5823
5824 CASE_FLT_FN (BUILT_IN_SINCOS):
5825 if (! flag_unsafe_math_optimizations)
5826 break;
5827 target = expand_builtin_sincos (exp);
5828 if (target)
5829 return target;
5830 break;
5831
5832 case BUILT_IN_APPLY_ARGS:
5833 return expand_builtin_apply_args ();
5834
5835 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5836 FUNCTION with a copy of the parameters described by
5837 ARGUMENTS, and ARGSIZE. It returns a block of memory
5838 allocated on the stack into which is stored all the registers
5839 that might possibly be used for returning the result of a
5840 function. ARGUMENTS is the value returned by
5841 __builtin_apply_args. ARGSIZE is the number of bytes of
5842 arguments that must be copied. ??? How should this value be
5843 computed? We'll also need a safe worst case value for varargs
5844 functions. */
5845 case BUILT_IN_APPLY:
5846 if (!validate_arglist (exp, POINTER_TYPE,
5847 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5848 && !validate_arglist (exp, REFERENCE_TYPE,
5849 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5850 return const0_rtx;
5851 else
5852 {
5853 rtx ops[3];
5854
5855 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5856 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5857 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5858
5859 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5860 }
5861
5862 /* __builtin_return (RESULT) causes the function to return the
5863 value described by RESULT. RESULT is address of the block of
5864 memory returned by __builtin_apply. */
5865 case BUILT_IN_RETURN:
5866 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5867 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5868 return const0_rtx;
5869
5870 case BUILT_IN_SAVEREGS:
5871 return expand_builtin_saveregs ();
5872
5873 case BUILT_IN_VA_ARG_PACK:
5874 /* All valid uses of __builtin_va_arg_pack () are removed during
5875 inlining. */
5876 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5877 return const0_rtx;
5878
5879 case BUILT_IN_VA_ARG_PACK_LEN:
5880 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5881 inlining. */
5882 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5883 return const0_rtx;
5884
5885 /* Return the address of the first anonymous stack arg. */
5886 case BUILT_IN_NEXT_ARG:
5887 if (fold_builtin_next_arg (exp, false))
5888 return const0_rtx;
5889 return expand_builtin_next_arg ();
5890
5891 case BUILT_IN_CLEAR_CACHE:
5892 target = expand_builtin___clear_cache (exp);
5893 if (target)
5894 return target;
5895 break;
5896
5897 case BUILT_IN_CLASSIFY_TYPE:
5898 return expand_builtin_classify_type (exp);
5899
5900 case BUILT_IN_CONSTANT_P:
5901 return const0_rtx;
5902
5903 case BUILT_IN_FRAME_ADDRESS:
5904 case BUILT_IN_RETURN_ADDRESS:
5905 return expand_builtin_frame_address (fndecl, exp);
5906
5907 /* Returns the address of the area where the structure is returned.
5908 0 otherwise. */
5909 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5910 if (call_expr_nargs (exp) != 0
5911 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5912 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5913 return const0_rtx;
5914 else
5915 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5916
5917 case BUILT_IN_ALLOCA:
5918 case BUILT_IN_ALLOCA_WITH_ALIGN:
5919 /* If the allocation stems from the declaration of a variable-sized
5920 object, it cannot accumulate. */
5921 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
5922 if (target)
5923 return target;
5924 break;
5925
5926 case BUILT_IN_STACK_SAVE:
5927 return expand_stack_save ();
5928
5929 case BUILT_IN_STACK_RESTORE:
5930 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5931 return const0_rtx;
5932
5933 case BUILT_IN_BSWAP16:
5934 case BUILT_IN_BSWAP32:
5935 case BUILT_IN_BSWAP64:
5936 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
5937 if (target)
5938 return target;
5939 break;
5940
5941 CASE_INT_FN (BUILT_IN_FFS):
5942 target = expand_builtin_unop (target_mode, exp, target,
5943 subtarget, ffs_optab);
5944 if (target)
5945 return target;
5946 break;
5947
5948 CASE_INT_FN (BUILT_IN_CLZ):
5949 target = expand_builtin_unop (target_mode, exp, target,
5950 subtarget, clz_optab);
5951 if (target)
5952 return target;
5953 break;
5954
5955 CASE_INT_FN (BUILT_IN_CTZ):
5956 target = expand_builtin_unop (target_mode, exp, target,
5957 subtarget, ctz_optab);
5958 if (target)
5959 return target;
5960 break;
5961
5962 CASE_INT_FN (BUILT_IN_CLRSB):
5963 target = expand_builtin_unop (target_mode, exp, target,
5964 subtarget, clrsb_optab);
5965 if (target)
5966 return target;
5967 break;
5968
5969 CASE_INT_FN (BUILT_IN_POPCOUNT):
5970 target = expand_builtin_unop (target_mode, exp, target,
5971 subtarget, popcount_optab);
5972 if (target)
5973 return target;
5974 break;
5975
5976 CASE_INT_FN (BUILT_IN_PARITY):
5977 target = expand_builtin_unop (target_mode, exp, target,
5978 subtarget, parity_optab);
5979 if (target)
5980 return target;
5981 break;
5982
5983 case BUILT_IN_STRLEN:
5984 target = expand_builtin_strlen (exp, target, target_mode);
5985 if (target)
5986 return target;
5987 break;
5988
5989 case BUILT_IN_STRCPY:
5990 target = expand_builtin_strcpy (exp, target);
5991 if (target)
5992 return target;
5993 break;
5994
5995 case BUILT_IN_STRNCPY:
5996 target = expand_builtin_strncpy (exp, target);
5997 if (target)
5998 return target;
5999 break;
6000
6001 case BUILT_IN_STPCPY:
6002 target = expand_builtin_stpcpy (exp, target, mode);
6003 if (target)
6004 return target;
6005 break;
6006
6007 case BUILT_IN_MEMCPY:
6008 target = expand_builtin_memcpy (exp, target);
6009 if (target)
6010 return target;
6011 break;
6012
6013 case BUILT_IN_MEMPCPY:
6014 target = expand_builtin_mempcpy (exp, target, mode);
6015 if (target)
6016 return target;
6017 break;
6018
6019 case BUILT_IN_MEMSET:
6020 target = expand_builtin_memset (exp, target, mode);
6021 if (target)
6022 return target;
6023 break;
6024
6025 case BUILT_IN_BZERO:
6026 target = expand_builtin_bzero (exp);
6027 if (target)
6028 return target;
6029 break;
6030
6031 case BUILT_IN_STRCMP:
6032 target = expand_builtin_strcmp (exp, target);
6033 if (target)
6034 return target;
6035 break;
6036
6037 case BUILT_IN_STRNCMP:
6038 target = expand_builtin_strncmp (exp, target, mode);
6039 if (target)
6040 return target;
6041 break;
6042
6043 case BUILT_IN_BCMP:
6044 case BUILT_IN_MEMCMP:
6045 target = expand_builtin_memcmp (exp, target, mode);
6046 if (target)
6047 return target;
6048 break;
6049
6050 case BUILT_IN_SETJMP:
6051 /* This should have been lowered to the builtins below. */
6052 gcc_unreachable ();
6053
6054 case BUILT_IN_SETJMP_SETUP:
6055 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6056 and the receiver label. */
6057 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6058 {
6059 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6060 VOIDmode, EXPAND_NORMAL);
6061 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6062 rtx label_r = label_rtx (label);
6063
6064 /* This is copied from the handling of non-local gotos. */
6065 expand_builtin_setjmp_setup (buf_addr, label_r);
6066 nonlocal_goto_handler_labels
6067 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6068 nonlocal_goto_handler_labels);
6069 /* ??? Do not let expand_label treat us as such since we would
6070 not want to be both on the list of non-local labels and on
6071 the list of forced labels. */
6072 FORCED_LABEL (label) = 0;
6073 return const0_rtx;
6074 }
6075 break;
6076
6077 case BUILT_IN_SETJMP_DISPATCHER:
6078 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6079 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6080 {
6081 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6082 rtx label_r = label_rtx (label);
6083
6084 /* Remove the dispatcher label from the list of non-local labels
6085 since the receiver labels have been added to it above. */
6086 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6087 return const0_rtx;
6088 }
6089 break;
6090
6091 case BUILT_IN_SETJMP_RECEIVER:
6092 /* __builtin_setjmp_receiver is passed the receiver label. */
6093 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6094 {
6095 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6096 rtx label_r = label_rtx (label);
6097
6098 expand_builtin_setjmp_receiver (label_r);
6099 return const0_rtx;
6100 }
6101 break;
6102
6103 /* __builtin_longjmp is passed a pointer to an array of five words.
6104 It's similar to the C library longjmp function but works with
6105 __builtin_setjmp above. */
6106 case BUILT_IN_LONGJMP:
6107 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6108 {
6109 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6110 VOIDmode, EXPAND_NORMAL);
6111 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6112
6113 if (value != const1_rtx)
6114 {
6115 error ("%<__builtin_longjmp%> second argument must be 1");
6116 return const0_rtx;
6117 }
6118
6119 expand_builtin_longjmp (buf_addr, value);
6120 return const0_rtx;
6121 }
6122 break;
6123
6124 case BUILT_IN_NONLOCAL_GOTO:
6125 target = expand_builtin_nonlocal_goto (exp);
6126 if (target)
6127 return target;
6128 break;
6129
6130 /* This updates the setjmp buffer that is its argument with the value
6131 of the current stack pointer. */
6132 case BUILT_IN_UPDATE_SETJMP_BUF:
6133 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6134 {
6135 rtx buf_addr
6136 = expand_normal (CALL_EXPR_ARG (exp, 0));
6137
6138 expand_builtin_update_setjmp_buf (buf_addr);
6139 return const0_rtx;
6140 }
6141 break;
6142
6143 case BUILT_IN_TRAP:
6144 expand_builtin_trap ();
6145 return const0_rtx;
6146
6147 case BUILT_IN_UNREACHABLE:
6148 expand_builtin_unreachable ();
6149 return const0_rtx;
6150
6151 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6152 case BUILT_IN_SIGNBITD32:
6153 case BUILT_IN_SIGNBITD64:
6154 case BUILT_IN_SIGNBITD128:
6155 target = expand_builtin_signbit (exp, target);
6156 if (target)
6157 return target;
6158 break;
6159
6160 /* Various hooks for the DWARF 2 __throw routine. */
6161 case BUILT_IN_UNWIND_INIT:
6162 expand_builtin_unwind_init ();
6163 return const0_rtx;
6164 case BUILT_IN_DWARF_CFA:
6165 return virtual_cfa_rtx;
6166 #ifdef DWARF2_UNWIND_INFO
6167 case BUILT_IN_DWARF_SP_COLUMN:
6168 return expand_builtin_dwarf_sp_column ();
6169 case BUILT_IN_INIT_DWARF_REG_SIZES:
6170 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6171 return const0_rtx;
6172 #endif
6173 case BUILT_IN_FROB_RETURN_ADDR:
6174 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6175 case BUILT_IN_EXTRACT_RETURN_ADDR:
6176 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6177 case BUILT_IN_EH_RETURN:
6178 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6179 CALL_EXPR_ARG (exp, 1));
6180 return const0_rtx;
6181 #ifdef EH_RETURN_DATA_REGNO
6182 case BUILT_IN_EH_RETURN_DATA_REGNO:
6183 return expand_builtin_eh_return_data_regno (exp);
6184 #endif
6185 case BUILT_IN_EXTEND_POINTER:
6186 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6187 case BUILT_IN_EH_POINTER:
6188 return expand_builtin_eh_pointer (exp);
6189 case BUILT_IN_EH_FILTER:
6190 return expand_builtin_eh_filter (exp);
6191 case BUILT_IN_EH_COPY_VALUES:
6192 return expand_builtin_eh_copy_values (exp);
6193
6194 case BUILT_IN_VA_START:
6195 return expand_builtin_va_start (exp);
6196 case BUILT_IN_VA_END:
6197 return expand_builtin_va_end (exp);
6198 case BUILT_IN_VA_COPY:
6199 return expand_builtin_va_copy (exp);
6200 case BUILT_IN_EXPECT:
6201 return expand_builtin_expect (exp, target);
6202 case BUILT_IN_ASSUME_ALIGNED:
6203 return expand_builtin_assume_aligned (exp, target);
6204 case BUILT_IN_PREFETCH:
6205 expand_builtin_prefetch (exp);
6206 return const0_rtx;
6207
6208 case BUILT_IN_INIT_TRAMPOLINE:
6209 return expand_builtin_init_trampoline (exp, true);
6210 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6211 return expand_builtin_init_trampoline (exp, false);
6212 case BUILT_IN_ADJUST_TRAMPOLINE:
6213 return expand_builtin_adjust_trampoline (exp);
6214
6215 case BUILT_IN_FORK:
6216 case BUILT_IN_EXECL:
6217 case BUILT_IN_EXECV:
6218 case BUILT_IN_EXECLP:
6219 case BUILT_IN_EXECLE:
6220 case BUILT_IN_EXECVP:
6221 case BUILT_IN_EXECVE:
6222 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6223 if (target)
6224 return target;
6225 break;
6226
6227 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6228 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6229 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6230 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6231 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6232 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6233 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6234 if (target)
6235 return target;
6236 break;
6237
6238 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6239 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6240 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6241 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6242 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6243 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6244 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6245 if (target)
6246 return target;
6247 break;
6248
6249 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6250 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6251 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6252 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6253 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6254 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6255 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6256 if (target)
6257 return target;
6258 break;
6259
6260 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6261 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6262 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6263 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6264 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6265 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6266 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6267 if (target)
6268 return target;
6269 break;
6270
6271 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6272 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6273 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6274 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6275 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6276 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6277 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6278 if (target)
6279 return target;
6280 break;
6281
6282 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6283 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6284 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6285 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6286 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6287 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6288 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6289 if (target)
6290 return target;
6291 break;
6292
6293 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6294 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6295 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6296 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6297 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6298 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6299 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6300 if (target)
6301 return target;
6302 break;
6303
6304 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6305 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6306 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6307 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6308 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6309 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6310 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6311 if (target)
6312 return target;
6313 break;
6314
6315 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6316 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6317 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6318 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6319 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6320 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6321 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6322 if (target)
6323 return target;
6324 break;
6325
6326 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6327 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6328 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6329 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6330 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6331 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6332 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6333 if (target)
6334 return target;
6335 break;
6336
6337 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6338 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6339 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6340 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6341 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6342 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6343 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6344 if (target)
6345 return target;
6346 break;
6347
6348 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6349 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6350 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6351 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6352 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6353 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6354 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6355 if (target)
6356 return target;
6357 break;
6358
6359 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6360 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6361 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6362 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6363 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6364 if (mode == VOIDmode)
6365 mode = TYPE_MODE (boolean_type_node);
6366 if (!target || !register_operand (target, mode))
6367 target = gen_reg_rtx (mode);
6368
6369 mode = get_builtin_sync_mode
6370 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6371 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6372 if (target)
6373 return target;
6374 break;
6375
6376 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6377 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6378 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6379 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6380 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6381 mode = get_builtin_sync_mode
6382 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6383 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6384 if (target)
6385 return target;
6386 break;
6387
6388 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6389 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6390 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6391 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6392 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6393 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6394 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6395 if (target)
6396 return target;
6397 break;
6398
6399 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6400 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6401 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6402 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6403 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6404 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6405 expand_builtin_sync_lock_release (mode, exp);
6406 return const0_rtx;
6407
6408 case BUILT_IN_SYNC_SYNCHRONIZE:
6409 expand_builtin_sync_synchronize ();
6410 return const0_rtx;
6411
6412 case BUILT_IN_ATOMIC_EXCHANGE_1:
6413 case BUILT_IN_ATOMIC_EXCHANGE_2:
6414 case BUILT_IN_ATOMIC_EXCHANGE_4:
6415 case BUILT_IN_ATOMIC_EXCHANGE_8:
6416 case BUILT_IN_ATOMIC_EXCHANGE_16:
6417 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6418 target = expand_builtin_atomic_exchange (mode, exp, target);
6419 if (target)
6420 return target;
6421 break;
6422
6423 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6424 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6425 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6426 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6427 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6428 {
6429 unsigned int nargs, z;
6430 vec<tree, va_gc> *vec;
6431
6432 mode =
6433 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6434 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6435 if (target)
6436 return target;
6437
6438 /* If this is turned into an external library call, the weak parameter
6439 must be dropped to match the expected parameter list. */
6440 nargs = call_expr_nargs (exp);
6441 vec_alloc (vec, nargs - 1);
6442 for (z = 0; z < 3; z++)
6443 vec->quick_push (CALL_EXPR_ARG (exp, z));
6444 /* Skip the boolean weak parameter. */
6445 for (z = 4; z < 6; z++)
6446 vec->quick_push (CALL_EXPR_ARG (exp, z));
6447 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6448 break;
6449 }
6450
6451 case BUILT_IN_ATOMIC_LOAD_1:
6452 case BUILT_IN_ATOMIC_LOAD_2:
6453 case BUILT_IN_ATOMIC_LOAD_4:
6454 case BUILT_IN_ATOMIC_LOAD_8:
6455 case BUILT_IN_ATOMIC_LOAD_16:
6456 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6457 target = expand_builtin_atomic_load (mode, exp, target);
6458 if (target)
6459 return target;
6460 break;
6461
6462 case BUILT_IN_ATOMIC_STORE_1:
6463 case BUILT_IN_ATOMIC_STORE_2:
6464 case BUILT_IN_ATOMIC_STORE_4:
6465 case BUILT_IN_ATOMIC_STORE_8:
6466 case BUILT_IN_ATOMIC_STORE_16:
6467 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6468 target = expand_builtin_atomic_store (mode, exp);
6469 if (target)
6470 return const0_rtx;
6471 break;
6472
6473 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6474 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6475 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6476 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6477 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6478 {
6479 enum built_in_function lib;
6480 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6481 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6482 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6483 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6484 ignore, lib);
6485 if (target)
6486 return target;
6487 break;
6488 }
6489 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6490 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6491 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6492 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6493 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6494 {
6495 enum built_in_function lib;
6496 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6497 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6498 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6499 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6500 ignore, lib);
6501 if (target)
6502 return target;
6503 break;
6504 }
6505 case BUILT_IN_ATOMIC_AND_FETCH_1:
6506 case BUILT_IN_ATOMIC_AND_FETCH_2:
6507 case BUILT_IN_ATOMIC_AND_FETCH_4:
6508 case BUILT_IN_ATOMIC_AND_FETCH_8:
6509 case BUILT_IN_ATOMIC_AND_FETCH_16:
6510 {
6511 enum built_in_function lib;
6512 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6513 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6514 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6515 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6516 ignore, lib);
6517 if (target)
6518 return target;
6519 break;
6520 }
6521 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6522 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6523 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6524 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6525 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6526 {
6527 enum built_in_function lib;
6528 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6529 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6530 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6531 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6532 ignore, lib);
6533 if (target)
6534 return target;
6535 break;
6536 }
6537 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6538 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6539 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6540 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6541 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6542 {
6543 enum built_in_function lib;
6544 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6545 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6546 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6547 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6548 ignore, lib);
6549 if (target)
6550 return target;
6551 break;
6552 }
6553 case BUILT_IN_ATOMIC_OR_FETCH_1:
6554 case BUILT_IN_ATOMIC_OR_FETCH_2:
6555 case BUILT_IN_ATOMIC_OR_FETCH_4:
6556 case BUILT_IN_ATOMIC_OR_FETCH_8:
6557 case BUILT_IN_ATOMIC_OR_FETCH_16:
6558 {
6559 enum built_in_function lib;
6560 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6561 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6562 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6563 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6564 ignore, lib);
6565 if (target)
6566 return target;
6567 break;
6568 }
6569 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6570 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6571 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6572 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6573 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6574 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6575 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6576 ignore, BUILT_IN_NONE);
6577 if (target)
6578 return target;
6579 break;
6580
6581 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6582 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6583 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6584 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6585 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6586 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6587 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6588 ignore, BUILT_IN_NONE);
6589 if (target)
6590 return target;
6591 break;
6592
6593 case BUILT_IN_ATOMIC_FETCH_AND_1:
6594 case BUILT_IN_ATOMIC_FETCH_AND_2:
6595 case BUILT_IN_ATOMIC_FETCH_AND_4:
6596 case BUILT_IN_ATOMIC_FETCH_AND_8:
6597 case BUILT_IN_ATOMIC_FETCH_AND_16:
6598 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6599 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6600 ignore, BUILT_IN_NONE);
6601 if (target)
6602 return target;
6603 break;
6604
6605 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6606 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6607 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6608 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6609 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6610 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6611 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6612 ignore, BUILT_IN_NONE);
6613 if (target)
6614 return target;
6615 break;
6616
6617 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6618 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6619 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6620 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6621 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6622 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6623 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6624 ignore, BUILT_IN_NONE);
6625 if (target)
6626 return target;
6627 break;
6628
6629 case BUILT_IN_ATOMIC_FETCH_OR_1:
6630 case BUILT_IN_ATOMIC_FETCH_OR_2:
6631 case BUILT_IN_ATOMIC_FETCH_OR_4:
6632 case BUILT_IN_ATOMIC_FETCH_OR_8:
6633 case BUILT_IN_ATOMIC_FETCH_OR_16:
6634 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6635 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6636 ignore, BUILT_IN_NONE);
6637 if (target)
6638 return target;
6639 break;
6640
6641 case BUILT_IN_ATOMIC_TEST_AND_SET:
6642 return expand_builtin_atomic_test_and_set (exp, target);
6643
6644 case BUILT_IN_ATOMIC_CLEAR:
6645 return expand_builtin_atomic_clear (exp);
6646
6647 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6648 return expand_builtin_atomic_always_lock_free (exp);
6649
6650 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6651 target = expand_builtin_atomic_is_lock_free (exp);
6652 if (target)
6653 return target;
6654 break;
6655
6656 case BUILT_IN_ATOMIC_THREAD_FENCE:
6657 expand_builtin_atomic_thread_fence (exp);
6658 return const0_rtx;
6659
6660 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6661 expand_builtin_atomic_signal_fence (exp);
6662 return const0_rtx;
6663
6664 case BUILT_IN_OBJECT_SIZE:
6665 return expand_builtin_object_size (exp);
6666
6667 case BUILT_IN_MEMCPY_CHK:
6668 case BUILT_IN_MEMPCPY_CHK:
6669 case BUILT_IN_MEMMOVE_CHK:
6670 case BUILT_IN_MEMSET_CHK:
6671 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6672 if (target)
6673 return target;
6674 break;
6675
6676 case BUILT_IN_STRCPY_CHK:
6677 case BUILT_IN_STPCPY_CHK:
6678 case BUILT_IN_STRNCPY_CHK:
6679 case BUILT_IN_STPNCPY_CHK:
6680 case BUILT_IN_STRCAT_CHK:
6681 case BUILT_IN_STRNCAT_CHK:
6682 case BUILT_IN_SNPRINTF_CHK:
6683 case BUILT_IN_VSNPRINTF_CHK:
6684 maybe_emit_chk_warning (exp, fcode);
6685 break;
6686
6687 case BUILT_IN_SPRINTF_CHK:
6688 case BUILT_IN_VSPRINTF_CHK:
6689 maybe_emit_sprintf_chk_warning (exp, fcode);
6690 break;
6691
6692 case BUILT_IN_FREE:
6693 if (warn_free_nonheap_object)
6694 maybe_emit_free_warning (exp);
6695 break;
6696
6697 case BUILT_IN_THREAD_POINTER:
6698 return expand_builtin_thread_pointer (exp, target);
6699
6700 case BUILT_IN_SET_THREAD_POINTER:
6701 expand_builtin_set_thread_pointer (exp);
6702 return const0_rtx;
6703
6704 case BUILT_IN_CILK_DETACH:
6705 expand_builtin_cilk_detach (exp);
6706 return const0_rtx;
6707
6708 case BUILT_IN_CILK_POP_FRAME:
6709 expand_builtin_cilk_pop_frame (exp);
6710 return const0_rtx;
6711
6712 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6713 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6714 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6715 return expand_normal (CALL_EXPR_ARG (exp, 0));
6716
6717 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6718 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6719 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6720 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6721 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6722 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6723 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6724 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6725 /* We allow user CHKP builtins if Pointer Bounds
6726 Checker is off. */
6727 if (!flag_check_pointer_bounds)
6728 {
6729 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6730 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
6731 return expand_normal (CALL_EXPR_ARG (exp, 0));
6732 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6733 return expand_normal (size_zero_node);
6734 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6735 return expand_normal (size_int (-1));
6736 else
6737 return const0_rtx;
6738 }
6739 /* FALLTHROUGH */
6740
6741 case BUILT_IN_CHKP_BNDMK:
6742 case BUILT_IN_CHKP_BNDSTX:
6743 case BUILT_IN_CHKP_BNDCL:
6744 case BUILT_IN_CHKP_BNDCU:
6745 case BUILT_IN_CHKP_BNDLDX:
6746 case BUILT_IN_CHKP_BNDRET:
6747 case BUILT_IN_CHKP_INTERSECT:
6748 case BUILT_IN_CHKP_ARG_BND:
6749 case BUILT_IN_CHKP_NARROW:
6750 case BUILT_IN_CHKP_EXTRACT_LOWER:
6751 case BUILT_IN_CHKP_EXTRACT_UPPER:
6752 /* Software implementation of pointers checker is NYI.
6753 Target support is required. */
6754 error ("Your target platform does not support -fcheck-pointers");
6755 break;
6756
6757 default: /* just do library call, if unknown builtin */
6758 break;
6759 }
6760
6761 /* The switch statement above can drop through to cause the function
6762 to be called normally. */
6763 return expand_call (exp, target, ignore);
6764 }
6765
6766 /* Determine whether a tree node represents a call to a built-in
6767 function. If the tree T is a call to a built-in function with
6768 the right number of arguments of the appropriate types, return
6769 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6770 Otherwise the return value is END_BUILTINS. */
6771
6772 enum built_in_function
6773 builtin_mathfn_code (const_tree t)
6774 {
6775 const_tree fndecl, arg, parmlist;
6776 const_tree argtype, parmtype;
6777 const_call_expr_arg_iterator iter;
6778
6779 if (TREE_CODE (t) != CALL_EXPR
6780 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6781 return END_BUILTINS;
6782
6783 fndecl = get_callee_fndecl (t);
6784 if (fndecl == NULL_TREE
6785 || TREE_CODE (fndecl) != FUNCTION_DECL
6786 || ! DECL_BUILT_IN (fndecl)
6787 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6788 return END_BUILTINS;
6789
6790 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6791 init_const_call_expr_arg_iterator (t, &iter);
6792 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6793 {
6794 /* If a function doesn't take a variable number of arguments,
6795 the last element in the list will have type `void'. */
6796 parmtype = TREE_VALUE (parmlist);
6797 if (VOID_TYPE_P (parmtype))
6798 {
6799 if (more_const_call_expr_args_p (&iter))
6800 return END_BUILTINS;
6801 return DECL_FUNCTION_CODE (fndecl);
6802 }
6803
6804 if (! more_const_call_expr_args_p (&iter))
6805 return END_BUILTINS;
6806
6807 arg = next_const_call_expr_arg (&iter);
6808 argtype = TREE_TYPE (arg);
6809
6810 if (SCALAR_FLOAT_TYPE_P (parmtype))
6811 {
6812 if (! SCALAR_FLOAT_TYPE_P (argtype))
6813 return END_BUILTINS;
6814 }
6815 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6816 {
6817 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6818 return END_BUILTINS;
6819 }
6820 else if (POINTER_TYPE_P (parmtype))
6821 {
6822 if (! POINTER_TYPE_P (argtype))
6823 return END_BUILTINS;
6824 }
6825 else if (INTEGRAL_TYPE_P (parmtype))
6826 {
6827 if (! INTEGRAL_TYPE_P (argtype))
6828 return END_BUILTINS;
6829 }
6830 else
6831 return END_BUILTINS;
6832 }
6833
6834 /* Variable-length argument list. */
6835 return DECL_FUNCTION_CODE (fndecl);
6836 }
6837
6838 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6839 evaluate to a constant. */
6840
6841 static tree
6842 fold_builtin_constant_p (tree arg)
6843 {
6844 /* We return 1 for a numeric type that's known to be a constant
6845 value at compile-time or for an aggregate type that's a
6846 literal constant. */
6847 STRIP_NOPS (arg);
6848
6849 /* If we know this is a constant, emit the constant of one. */
6850 if (CONSTANT_CLASS_P (arg)
6851 || (TREE_CODE (arg) == CONSTRUCTOR
6852 && TREE_CONSTANT (arg)))
6853 return integer_one_node;
6854 if (TREE_CODE (arg) == ADDR_EXPR)
6855 {
6856 tree op = TREE_OPERAND (arg, 0);
6857 if (TREE_CODE (op) == STRING_CST
6858 || (TREE_CODE (op) == ARRAY_REF
6859 && integer_zerop (TREE_OPERAND (op, 1))
6860 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6861 return integer_one_node;
6862 }
6863
6864 /* If this expression has side effects, show we don't know it to be a
6865 constant. Likewise if it's a pointer or aggregate type since in
6866 those case we only want literals, since those are only optimized
6867 when generating RTL, not later.
6868 And finally, if we are compiling an initializer, not code, we
6869 need to return a definite result now; there's not going to be any
6870 more optimization done. */
6871 if (TREE_SIDE_EFFECTS (arg)
6872 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6873 || POINTER_TYPE_P (TREE_TYPE (arg))
6874 || cfun == 0
6875 || folding_initializer
6876 || force_folding_builtin_constant_p)
6877 return integer_zero_node;
6878
6879 return NULL_TREE;
6880 }
6881
6882 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6883 return it as a truthvalue. */
6884
6885 static tree
6886 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6887 {
6888 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6889
6890 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
6891 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6892 ret_type = TREE_TYPE (TREE_TYPE (fn));
6893 pred_type = TREE_VALUE (arg_types);
6894 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6895
6896 pred = fold_convert_loc (loc, pred_type, pred);
6897 expected = fold_convert_loc (loc, expected_type, expected);
6898 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6899
6900 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6901 build_int_cst (ret_type, 0));
6902 }
6903
6904 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6905 NULL_TREE if no simplification is possible. */
6906
6907 static tree
6908 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6909 {
6910 tree inner, fndecl, inner_arg0;
6911 enum tree_code code;
6912
6913 /* Distribute the expected value over short-circuiting operators.
6914 See through the cast from truthvalue_type_node to long. */
6915 inner_arg0 = arg0;
6916 while (TREE_CODE (inner_arg0) == NOP_EXPR
6917 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
6918 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
6919 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
6920
6921 /* If this is a builtin_expect within a builtin_expect keep the
6922 inner one. See through a comparison against a constant. It
6923 might have been added to create a thruthvalue. */
6924 inner = inner_arg0;
6925
6926 if (COMPARISON_CLASS_P (inner)
6927 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6928 inner = TREE_OPERAND (inner, 0);
6929
6930 if (TREE_CODE (inner) == CALL_EXPR
6931 && (fndecl = get_callee_fndecl (inner))
6932 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6933 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6934 return arg0;
6935
6936 inner = inner_arg0;
6937 code = TREE_CODE (inner);
6938 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6939 {
6940 tree op0 = TREE_OPERAND (inner, 0);
6941 tree op1 = TREE_OPERAND (inner, 1);
6942
6943 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6944 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6945 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6946
6947 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6948 }
6949
6950 /* If the argument isn't invariant then there's nothing else we can do. */
6951 if (!TREE_CONSTANT (inner_arg0))
6952 return NULL_TREE;
6953
6954 /* If we expect that a comparison against the argument will fold to
6955 a constant return the constant. In practice, this means a true
6956 constant or the address of a non-weak symbol. */
6957 inner = inner_arg0;
6958 STRIP_NOPS (inner);
6959 if (TREE_CODE (inner) == ADDR_EXPR)
6960 {
6961 do
6962 {
6963 inner = TREE_OPERAND (inner, 0);
6964 }
6965 while (TREE_CODE (inner) == COMPONENT_REF
6966 || TREE_CODE (inner) == ARRAY_REF);
6967 if ((TREE_CODE (inner) == VAR_DECL
6968 || TREE_CODE (inner) == FUNCTION_DECL)
6969 && DECL_WEAK (inner))
6970 return NULL_TREE;
6971 }
6972
6973 /* Otherwise, ARG0 already has the proper type for the return value. */
6974 return arg0;
6975 }
6976
6977 /* Fold a call to __builtin_classify_type with argument ARG. */
6978
6979 static tree
6980 fold_builtin_classify_type (tree arg)
6981 {
6982 if (arg == 0)
6983 return build_int_cst (integer_type_node, no_type_class);
6984
6985 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
6986 }
6987
6988 /* Fold a call to __builtin_strlen with argument ARG. */
6989
6990 static tree
6991 fold_builtin_strlen (location_t loc, tree type, tree arg)
6992 {
6993 if (!validate_arg (arg, POINTER_TYPE))
6994 return NULL_TREE;
6995 else
6996 {
6997 tree len = c_strlen (arg, 0);
6998
6999 if (len)
7000 return fold_convert_loc (loc, type, len);
7001
7002 return NULL_TREE;
7003 }
7004 }
7005
7006 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7007
7008 static tree
7009 fold_builtin_inf (location_t loc, tree type, int warn)
7010 {
7011 REAL_VALUE_TYPE real;
7012
7013 /* __builtin_inff is intended to be usable to define INFINITY on all
7014 targets. If an infinity is not available, INFINITY expands "to a
7015 positive constant of type float that overflows at translation
7016 time", footnote "In this case, using INFINITY will violate the
7017 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7018 Thus we pedwarn to ensure this constraint violation is
7019 diagnosed. */
7020 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7021 pedwarn (loc, 0, "target format does not support infinity");
7022
7023 real_inf (&real);
7024 return build_real (type, real);
7025 }
7026
7027 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7028
7029 static tree
7030 fold_builtin_nan (tree arg, tree type, int quiet)
7031 {
7032 REAL_VALUE_TYPE real;
7033 const char *str;
7034
7035 if (!validate_arg (arg, POINTER_TYPE))
7036 return NULL_TREE;
7037 str = c_getstr (arg);
7038 if (!str)
7039 return NULL_TREE;
7040
7041 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7042 return NULL_TREE;
7043
7044 return build_real (type, real);
7045 }
7046
7047 /* Return true if the floating point expression T has an integer value.
7048 We also allow +Inf, -Inf and NaN to be considered integer values. */
7049
7050 static bool
7051 integer_valued_real_p (tree t)
7052 {
7053 switch (TREE_CODE (t))
7054 {
7055 case FLOAT_EXPR:
7056 return true;
7057
7058 case ABS_EXPR:
7059 case SAVE_EXPR:
7060 return integer_valued_real_p (TREE_OPERAND (t, 0));
7061
7062 case COMPOUND_EXPR:
7063 case MODIFY_EXPR:
7064 case BIND_EXPR:
7065 return integer_valued_real_p (TREE_OPERAND (t, 1));
7066
7067 case PLUS_EXPR:
7068 case MINUS_EXPR:
7069 case MULT_EXPR:
7070 case MIN_EXPR:
7071 case MAX_EXPR:
7072 return integer_valued_real_p (TREE_OPERAND (t, 0))
7073 && integer_valued_real_p (TREE_OPERAND (t, 1));
7074
7075 case COND_EXPR:
7076 return integer_valued_real_p (TREE_OPERAND (t, 1))
7077 && integer_valued_real_p (TREE_OPERAND (t, 2));
7078
7079 case REAL_CST:
7080 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7081
7082 case NOP_EXPR:
7083 {
7084 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7085 if (TREE_CODE (type) == INTEGER_TYPE)
7086 return true;
7087 if (TREE_CODE (type) == REAL_TYPE)
7088 return integer_valued_real_p (TREE_OPERAND (t, 0));
7089 break;
7090 }
7091
7092 case CALL_EXPR:
7093 switch (builtin_mathfn_code (t))
7094 {
7095 CASE_FLT_FN (BUILT_IN_CEIL):
7096 CASE_FLT_FN (BUILT_IN_FLOOR):
7097 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7098 CASE_FLT_FN (BUILT_IN_RINT):
7099 CASE_FLT_FN (BUILT_IN_ROUND):
7100 CASE_FLT_FN (BUILT_IN_TRUNC):
7101 return true;
7102
7103 CASE_FLT_FN (BUILT_IN_FMIN):
7104 CASE_FLT_FN (BUILT_IN_FMAX):
7105 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7106 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7107
7108 default:
7109 break;
7110 }
7111 break;
7112
7113 default:
7114 break;
7115 }
7116 return false;
7117 }
7118
7119 /* FNDECL is assumed to be a builtin where truncation can be propagated
7120 across (for instance floor((double)f) == (double)floorf (f).
7121 Do the transformation for a call with argument ARG. */
7122
7123 static tree
7124 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7125 {
7126 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7127
7128 if (!validate_arg (arg, REAL_TYPE))
7129 return NULL_TREE;
7130
7131 /* Integer rounding functions are idempotent. */
7132 if (fcode == builtin_mathfn_code (arg))
7133 return arg;
7134
7135 /* If argument is already integer valued, and we don't need to worry
7136 about setting errno, there's no need to perform rounding. */
7137 if (! flag_errno_math && integer_valued_real_p (arg))
7138 return arg;
7139
7140 if (optimize)
7141 {
7142 tree arg0 = strip_float_extensions (arg);
7143 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7144 tree newtype = TREE_TYPE (arg0);
7145 tree decl;
7146
7147 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7148 && (decl = mathfn_built_in (newtype, fcode)))
7149 return fold_convert_loc (loc, ftype,
7150 build_call_expr_loc (loc, decl, 1,
7151 fold_convert_loc (loc,
7152 newtype,
7153 arg0)));
7154 }
7155 return NULL_TREE;
7156 }
7157
7158 /* FNDECL is assumed to be builtin which can narrow the FP type of
7159 the argument, for instance lround((double)f) -> lroundf (f).
7160 Do the transformation for a call with argument ARG. */
7161
7162 static tree
7163 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7164 {
7165 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7166
7167 if (!validate_arg (arg, REAL_TYPE))
7168 return NULL_TREE;
7169
7170 /* If argument is already integer valued, and we don't need to worry
7171 about setting errno, there's no need to perform rounding. */
7172 if (! flag_errno_math && integer_valued_real_p (arg))
7173 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7174 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7175
7176 if (optimize)
7177 {
7178 tree ftype = TREE_TYPE (arg);
7179 tree arg0 = strip_float_extensions (arg);
7180 tree newtype = TREE_TYPE (arg0);
7181 tree decl;
7182
7183 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7184 && (decl = mathfn_built_in (newtype, fcode)))
7185 return build_call_expr_loc (loc, decl, 1,
7186 fold_convert_loc (loc, newtype, arg0));
7187 }
7188
7189 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7190 sizeof (int) == sizeof (long). */
7191 if (TYPE_PRECISION (integer_type_node)
7192 == TYPE_PRECISION (long_integer_type_node))
7193 {
7194 tree newfn = NULL_TREE;
7195 switch (fcode)
7196 {
7197 CASE_FLT_FN (BUILT_IN_ICEIL):
7198 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7199 break;
7200
7201 CASE_FLT_FN (BUILT_IN_IFLOOR):
7202 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7203 break;
7204
7205 CASE_FLT_FN (BUILT_IN_IROUND):
7206 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7207 break;
7208
7209 CASE_FLT_FN (BUILT_IN_IRINT):
7210 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7211 break;
7212
7213 default:
7214 break;
7215 }
7216
7217 if (newfn)
7218 {
7219 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7220 return fold_convert_loc (loc,
7221 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7222 }
7223 }
7224
7225 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7226 sizeof (long long) == sizeof (long). */
7227 if (TYPE_PRECISION (long_long_integer_type_node)
7228 == TYPE_PRECISION (long_integer_type_node))
7229 {
7230 tree newfn = NULL_TREE;
7231 switch (fcode)
7232 {
7233 CASE_FLT_FN (BUILT_IN_LLCEIL):
7234 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7235 break;
7236
7237 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7238 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7239 break;
7240
7241 CASE_FLT_FN (BUILT_IN_LLROUND):
7242 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7243 break;
7244
7245 CASE_FLT_FN (BUILT_IN_LLRINT):
7246 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7247 break;
7248
7249 default:
7250 break;
7251 }
7252
7253 if (newfn)
7254 {
7255 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7256 return fold_convert_loc (loc,
7257 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7258 }
7259 }
7260
7261 return NULL_TREE;
7262 }
7263
7264 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7265 return type. Return NULL_TREE if no simplification can be made. */
7266
7267 static tree
7268 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7269 {
7270 tree res;
7271
7272 if (!validate_arg (arg, COMPLEX_TYPE)
7273 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7274 return NULL_TREE;
7275
7276 /* Calculate the result when the argument is a constant. */
7277 if (TREE_CODE (arg) == COMPLEX_CST
7278 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7279 type, mpfr_hypot)))
7280 return res;
7281
7282 if (TREE_CODE (arg) == COMPLEX_EXPR)
7283 {
7284 tree real = TREE_OPERAND (arg, 0);
7285 tree imag = TREE_OPERAND (arg, 1);
7286
7287 /* If either part is zero, cabs is fabs of the other. */
7288 if (real_zerop (real))
7289 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7290 if (real_zerop (imag))
7291 return fold_build1_loc (loc, ABS_EXPR, type, real);
7292
7293 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7294 if (flag_unsafe_math_optimizations
7295 && operand_equal_p (real, imag, OEP_PURE_SAME))
7296 {
7297 const REAL_VALUE_TYPE sqrt2_trunc
7298 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7299 STRIP_NOPS (real);
7300 return fold_build2_loc (loc, MULT_EXPR, type,
7301 fold_build1_loc (loc, ABS_EXPR, type, real),
7302 build_real (type, sqrt2_trunc));
7303 }
7304 }
7305
7306 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7307 if (TREE_CODE (arg) == NEGATE_EXPR
7308 || TREE_CODE (arg) == CONJ_EXPR)
7309 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7310
7311 /* Don't do this when optimizing for size. */
7312 if (flag_unsafe_math_optimizations
7313 && optimize && optimize_function_for_speed_p (cfun))
7314 {
7315 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7316
7317 if (sqrtfn != NULL_TREE)
7318 {
7319 tree rpart, ipart, result;
7320
7321 arg = builtin_save_expr (arg);
7322
7323 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7324 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7325
7326 rpart = builtin_save_expr (rpart);
7327 ipart = builtin_save_expr (ipart);
7328
7329 result = fold_build2_loc (loc, PLUS_EXPR, type,
7330 fold_build2_loc (loc, MULT_EXPR, type,
7331 rpart, rpart),
7332 fold_build2_loc (loc, MULT_EXPR, type,
7333 ipart, ipart));
7334
7335 return build_call_expr_loc (loc, sqrtfn, 1, result);
7336 }
7337 }
7338
7339 return NULL_TREE;
7340 }
7341
7342 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7343 complex tree type of the result. If NEG is true, the imaginary
7344 zero is negative. */
7345
7346 static tree
7347 build_complex_cproj (tree type, bool neg)
7348 {
7349 REAL_VALUE_TYPE rinf, rzero = dconst0;
7350
7351 real_inf (&rinf);
7352 rzero.sign = neg;
7353 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7354 build_real (TREE_TYPE (type), rzero));
7355 }
7356
7357 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7358 return type. Return NULL_TREE if no simplification can be made. */
7359
7360 static tree
7361 fold_builtin_cproj (location_t loc, tree arg, tree type)
7362 {
7363 if (!validate_arg (arg, COMPLEX_TYPE)
7364 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7365 return NULL_TREE;
7366
7367 /* If there are no infinities, return arg. */
7368 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7369 return non_lvalue_loc (loc, arg);
7370
7371 /* Calculate the result when the argument is a constant. */
7372 if (TREE_CODE (arg) == COMPLEX_CST)
7373 {
7374 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7375 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7376
7377 if (real_isinf (real) || real_isinf (imag))
7378 return build_complex_cproj (type, imag->sign);
7379 else
7380 return arg;
7381 }
7382 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7383 {
7384 tree real = TREE_OPERAND (arg, 0);
7385 tree imag = TREE_OPERAND (arg, 1);
7386
7387 STRIP_NOPS (real);
7388 STRIP_NOPS (imag);
7389
7390 /* If the real part is inf and the imag part is known to be
7391 nonnegative, return (inf + 0i). Remember side-effects are
7392 possible in the imag part. */
7393 if (TREE_CODE (real) == REAL_CST
7394 && real_isinf (TREE_REAL_CST_PTR (real))
7395 && tree_expr_nonnegative_p (imag))
7396 return omit_one_operand_loc (loc, type,
7397 build_complex_cproj (type, false),
7398 arg);
7399
7400 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7401 Remember side-effects are possible in the real part. */
7402 if (TREE_CODE (imag) == REAL_CST
7403 && real_isinf (TREE_REAL_CST_PTR (imag)))
7404 return
7405 omit_one_operand_loc (loc, type,
7406 build_complex_cproj (type, TREE_REAL_CST_PTR
7407 (imag)->sign), arg);
7408 }
7409
7410 return NULL_TREE;
7411 }
7412
7413 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7414 Return NULL_TREE if no simplification can be made. */
7415
7416 static tree
7417 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7418 {
7419
7420 enum built_in_function fcode;
7421 tree res;
7422
7423 if (!validate_arg (arg, REAL_TYPE))
7424 return NULL_TREE;
7425
7426 /* Calculate the result when the argument is a constant. */
7427 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7428 return res;
7429
7430 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7431 fcode = builtin_mathfn_code (arg);
7432 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7433 {
7434 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7435 arg = fold_build2_loc (loc, MULT_EXPR, type,
7436 CALL_EXPR_ARG (arg, 0),
7437 build_real (type, dconsthalf));
7438 return build_call_expr_loc (loc, expfn, 1, arg);
7439 }
7440
7441 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7442 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7443 {
7444 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7445
7446 if (powfn)
7447 {
7448 tree arg0 = CALL_EXPR_ARG (arg, 0);
7449 tree tree_root;
7450 /* The inner root was either sqrt or cbrt. */
7451 /* This was a conditional expression but it triggered a bug
7452 in Sun C 5.5. */
7453 REAL_VALUE_TYPE dconstroot;
7454 if (BUILTIN_SQRT_P (fcode))
7455 dconstroot = dconsthalf;
7456 else
7457 dconstroot = dconst_third ();
7458
7459 /* Adjust for the outer root. */
7460 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7461 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7462 tree_root = build_real (type, dconstroot);
7463 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7464 }
7465 }
7466
7467 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7468 if (flag_unsafe_math_optimizations
7469 && (fcode == BUILT_IN_POW
7470 || fcode == BUILT_IN_POWF
7471 || fcode == BUILT_IN_POWL))
7472 {
7473 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7474 tree arg0 = CALL_EXPR_ARG (arg, 0);
7475 tree arg1 = CALL_EXPR_ARG (arg, 1);
7476 tree narg1;
7477 if (!tree_expr_nonnegative_p (arg0))
7478 arg0 = build1 (ABS_EXPR, type, arg0);
7479 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7480 build_real (type, dconsthalf));
7481 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7482 }
7483
7484 return NULL_TREE;
7485 }
7486
7487 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7488 Return NULL_TREE if no simplification can be made. */
7489
7490 static tree
7491 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7492 {
7493 const enum built_in_function fcode = builtin_mathfn_code (arg);
7494 tree res;
7495
7496 if (!validate_arg (arg, REAL_TYPE))
7497 return NULL_TREE;
7498
7499 /* Calculate the result when the argument is a constant. */
7500 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7501 return res;
7502
7503 if (flag_unsafe_math_optimizations)
7504 {
7505 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7506 if (BUILTIN_EXPONENT_P (fcode))
7507 {
7508 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7509 const REAL_VALUE_TYPE third_trunc =
7510 real_value_truncate (TYPE_MODE (type), dconst_third ());
7511 arg = fold_build2_loc (loc, MULT_EXPR, type,
7512 CALL_EXPR_ARG (arg, 0),
7513 build_real (type, third_trunc));
7514 return build_call_expr_loc (loc, expfn, 1, arg);
7515 }
7516
7517 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7518 if (BUILTIN_SQRT_P (fcode))
7519 {
7520 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7521
7522 if (powfn)
7523 {
7524 tree arg0 = CALL_EXPR_ARG (arg, 0);
7525 tree tree_root;
7526 REAL_VALUE_TYPE dconstroot = dconst_third ();
7527
7528 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7529 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7530 tree_root = build_real (type, dconstroot);
7531 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7532 }
7533 }
7534
7535 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7536 if (BUILTIN_CBRT_P (fcode))
7537 {
7538 tree arg0 = CALL_EXPR_ARG (arg, 0);
7539 if (tree_expr_nonnegative_p (arg0))
7540 {
7541 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7542
7543 if (powfn)
7544 {
7545 tree tree_root;
7546 REAL_VALUE_TYPE dconstroot;
7547
7548 real_arithmetic (&dconstroot, MULT_EXPR,
7549 dconst_third_ptr (), dconst_third_ptr ());
7550 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7551 tree_root = build_real (type, dconstroot);
7552 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7553 }
7554 }
7555 }
7556
7557 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7558 if (fcode == BUILT_IN_POW
7559 || fcode == BUILT_IN_POWF
7560 || fcode == BUILT_IN_POWL)
7561 {
7562 tree arg00 = CALL_EXPR_ARG (arg, 0);
7563 tree arg01 = CALL_EXPR_ARG (arg, 1);
7564 if (tree_expr_nonnegative_p (arg00))
7565 {
7566 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7567 const REAL_VALUE_TYPE dconstroot
7568 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7569 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7570 build_real (type, dconstroot));
7571 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7572 }
7573 }
7574 }
7575 return NULL_TREE;
7576 }
7577
7578 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7579 TYPE is the type of the return value. Return NULL_TREE if no
7580 simplification can be made. */
7581
7582 static tree
7583 fold_builtin_cos (location_t loc,
7584 tree arg, tree type, tree fndecl)
7585 {
7586 tree res, narg;
7587
7588 if (!validate_arg (arg, REAL_TYPE))
7589 return NULL_TREE;
7590
7591 /* Calculate the result when the argument is a constant. */
7592 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7593 return res;
7594
7595 /* Optimize cos(-x) into cos (x). */
7596 if ((narg = fold_strip_sign_ops (arg)))
7597 return build_call_expr_loc (loc, fndecl, 1, narg);
7598
7599 return NULL_TREE;
7600 }
7601
7602 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7603 Return NULL_TREE if no simplification can be made. */
7604
7605 static tree
7606 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7607 {
7608 if (validate_arg (arg, REAL_TYPE))
7609 {
7610 tree res, narg;
7611
7612 /* Calculate the result when the argument is a constant. */
7613 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7614 return res;
7615
7616 /* Optimize cosh(-x) into cosh (x). */
7617 if ((narg = fold_strip_sign_ops (arg)))
7618 return build_call_expr_loc (loc, fndecl, 1, narg);
7619 }
7620
7621 return NULL_TREE;
7622 }
7623
7624 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7625 argument ARG. TYPE is the type of the return value. Return
7626 NULL_TREE if no simplification can be made. */
7627
7628 static tree
7629 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7630 bool hyper)
7631 {
7632 if (validate_arg (arg, COMPLEX_TYPE)
7633 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7634 {
7635 tree tmp;
7636
7637 /* Calculate the result when the argument is a constant. */
7638 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7639 return tmp;
7640
7641 /* Optimize fn(-x) into fn(x). */
7642 if ((tmp = fold_strip_sign_ops (arg)))
7643 return build_call_expr_loc (loc, fndecl, 1, tmp);
7644 }
7645
7646 return NULL_TREE;
7647 }
7648
7649 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7650 Return NULL_TREE if no simplification can be made. */
7651
7652 static tree
7653 fold_builtin_tan (tree arg, tree type)
7654 {
7655 enum built_in_function fcode;
7656 tree res;
7657
7658 if (!validate_arg (arg, REAL_TYPE))
7659 return NULL_TREE;
7660
7661 /* Calculate the result when the argument is a constant. */
7662 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7663 return res;
7664
7665 /* Optimize tan(atan(x)) = x. */
7666 fcode = builtin_mathfn_code (arg);
7667 if (flag_unsafe_math_optimizations
7668 && (fcode == BUILT_IN_ATAN
7669 || fcode == BUILT_IN_ATANF
7670 || fcode == BUILT_IN_ATANL))
7671 return CALL_EXPR_ARG (arg, 0);
7672
7673 return NULL_TREE;
7674 }
7675
7676 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7677 NULL_TREE if no simplification can be made. */
7678
7679 static tree
7680 fold_builtin_sincos (location_t loc,
7681 tree arg0, tree arg1, tree arg2)
7682 {
7683 tree type;
7684 tree res, fn, call;
7685
7686 if (!validate_arg (arg0, REAL_TYPE)
7687 || !validate_arg (arg1, POINTER_TYPE)
7688 || !validate_arg (arg2, POINTER_TYPE))
7689 return NULL_TREE;
7690
7691 type = TREE_TYPE (arg0);
7692
7693 /* Calculate the result when the argument is a constant. */
7694 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7695 return res;
7696
7697 /* Canonicalize sincos to cexpi. */
7698 if (!targetm.libc_has_function (function_c99_math_complex))
7699 return NULL_TREE;
7700 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7701 if (!fn)
7702 return NULL_TREE;
7703
7704 call = build_call_expr_loc (loc, fn, 1, arg0);
7705 call = builtin_save_expr (call);
7706
7707 return build2 (COMPOUND_EXPR, void_type_node,
7708 build2 (MODIFY_EXPR, void_type_node,
7709 build_fold_indirect_ref_loc (loc, arg1),
7710 build1 (IMAGPART_EXPR, type, call)),
7711 build2 (MODIFY_EXPR, void_type_node,
7712 build_fold_indirect_ref_loc (loc, arg2),
7713 build1 (REALPART_EXPR, type, call)));
7714 }
7715
7716 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7717 NULL_TREE if no simplification can be made. */
7718
7719 static tree
7720 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7721 {
7722 tree rtype;
7723 tree realp, imagp, ifn;
7724 tree res;
7725
7726 if (!validate_arg (arg0, COMPLEX_TYPE)
7727 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7728 return NULL_TREE;
7729
7730 /* Calculate the result when the argument is a constant. */
7731 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7732 return res;
7733
7734 rtype = TREE_TYPE (TREE_TYPE (arg0));
7735
7736 /* In case we can figure out the real part of arg0 and it is constant zero
7737 fold to cexpi. */
7738 if (!targetm.libc_has_function (function_c99_math_complex))
7739 return NULL_TREE;
7740 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7741 if (!ifn)
7742 return NULL_TREE;
7743
7744 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7745 && real_zerop (realp))
7746 {
7747 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7748 return build_call_expr_loc (loc, ifn, 1, narg);
7749 }
7750
7751 /* In case we can easily decompose real and imaginary parts split cexp
7752 to exp (r) * cexpi (i). */
7753 if (flag_unsafe_math_optimizations
7754 && realp)
7755 {
7756 tree rfn, rcall, icall;
7757
7758 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7759 if (!rfn)
7760 return NULL_TREE;
7761
7762 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7763 if (!imagp)
7764 return NULL_TREE;
7765
7766 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7767 icall = builtin_save_expr (icall);
7768 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7769 rcall = builtin_save_expr (rcall);
7770 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7771 fold_build2_loc (loc, MULT_EXPR, rtype,
7772 rcall,
7773 fold_build1_loc (loc, REALPART_EXPR,
7774 rtype, icall)),
7775 fold_build2_loc (loc, MULT_EXPR, rtype,
7776 rcall,
7777 fold_build1_loc (loc, IMAGPART_EXPR,
7778 rtype, icall)));
7779 }
7780
7781 return NULL_TREE;
7782 }
7783
7784 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7785 Return NULL_TREE if no simplification can be made. */
7786
7787 static tree
7788 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7789 {
7790 if (!validate_arg (arg, REAL_TYPE))
7791 return NULL_TREE;
7792
7793 /* Optimize trunc of constant value. */
7794 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7795 {
7796 REAL_VALUE_TYPE r, x;
7797 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7798
7799 x = TREE_REAL_CST (arg);
7800 real_trunc (&r, TYPE_MODE (type), &x);
7801 return build_real (type, r);
7802 }
7803
7804 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7805 }
7806
7807 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7808 Return NULL_TREE if no simplification can be made. */
7809
7810 static tree
7811 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7812 {
7813 if (!validate_arg (arg, REAL_TYPE))
7814 return NULL_TREE;
7815
7816 /* Optimize floor of constant value. */
7817 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7818 {
7819 REAL_VALUE_TYPE x;
7820
7821 x = TREE_REAL_CST (arg);
7822 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7823 {
7824 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7825 REAL_VALUE_TYPE r;
7826
7827 real_floor (&r, TYPE_MODE (type), &x);
7828 return build_real (type, r);
7829 }
7830 }
7831
7832 /* Fold floor (x) where x is nonnegative to trunc (x). */
7833 if (tree_expr_nonnegative_p (arg))
7834 {
7835 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7836 if (truncfn)
7837 return build_call_expr_loc (loc, truncfn, 1, arg);
7838 }
7839
7840 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7841 }
7842
7843 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7844 Return NULL_TREE if no simplification can be made. */
7845
7846 static tree
7847 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7848 {
7849 if (!validate_arg (arg, REAL_TYPE))
7850 return NULL_TREE;
7851
7852 /* Optimize ceil of constant value. */
7853 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7854 {
7855 REAL_VALUE_TYPE x;
7856
7857 x = TREE_REAL_CST (arg);
7858 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7859 {
7860 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7861 REAL_VALUE_TYPE r;
7862
7863 real_ceil (&r, TYPE_MODE (type), &x);
7864 return build_real (type, r);
7865 }
7866 }
7867
7868 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7869 }
7870
7871 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7872 Return NULL_TREE if no simplification can be made. */
7873
7874 static tree
7875 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7876 {
7877 if (!validate_arg (arg, REAL_TYPE))
7878 return NULL_TREE;
7879
7880 /* Optimize round of constant value. */
7881 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7882 {
7883 REAL_VALUE_TYPE x;
7884
7885 x = TREE_REAL_CST (arg);
7886 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7887 {
7888 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7889 REAL_VALUE_TYPE r;
7890
7891 real_round (&r, TYPE_MODE (type), &x);
7892 return build_real (type, r);
7893 }
7894 }
7895
7896 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7897 }
7898
7899 /* Fold function call to builtin lround, lroundf or lroundl (or the
7900 corresponding long long versions) and other rounding functions. ARG
7901 is the argument to the call. Return NULL_TREE if no simplification
7902 can be made. */
7903
7904 static tree
7905 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7906 {
7907 if (!validate_arg (arg, REAL_TYPE))
7908 return NULL_TREE;
7909
7910 /* Optimize lround of constant value. */
7911 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7912 {
7913 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7914
7915 if (real_isfinite (&x))
7916 {
7917 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7918 tree ftype = TREE_TYPE (arg);
7919 double_int val;
7920 REAL_VALUE_TYPE r;
7921
7922 switch (DECL_FUNCTION_CODE (fndecl))
7923 {
7924 CASE_FLT_FN (BUILT_IN_IFLOOR):
7925 CASE_FLT_FN (BUILT_IN_LFLOOR):
7926 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7927 real_floor (&r, TYPE_MODE (ftype), &x);
7928 break;
7929
7930 CASE_FLT_FN (BUILT_IN_ICEIL):
7931 CASE_FLT_FN (BUILT_IN_LCEIL):
7932 CASE_FLT_FN (BUILT_IN_LLCEIL):
7933 real_ceil (&r, TYPE_MODE (ftype), &x);
7934 break;
7935
7936 CASE_FLT_FN (BUILT_IN_IROUND):
7937 CASE_FLT_FN (BUILT_IN_LROUND):
7938 CASE_FLT_FN (BUILT_IN_LLROUND):
7939 real_round (&r, TYPE_MODE (ftype), &x);
7940 break;
7941
7942 default:
7943 gcc_unreachable ();
7944 }
7945
7946 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
7947 if (double_int_fits_to_tree_p (itype, val))
7948 return double_int_to_tree (itype, val);
7949 }
7950 }
7951
7952 switch (DECL_FUNCTION_CODE (fndecl))
7953 {
7954 CASE_FLT_FN (BUILT_IN_LFLOOR):
7955 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7956 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7957 if (tree_expr_nonnegative_p (arg))
7958 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7959 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7960 break;
7961 default:;
7962 }
7963
7964 return fold_fixed_mathfn (loc, fndecl, arg);
7965 }
7966
7967 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7968 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7969 the argument to the call. Return NULL_TREE if no simplification can
7970 be made. */
7971
7972 static tree
7973 fold_builtin_bitop (tree fndecl, tree arg)
7974 {
7975 if (!validate_arg (arg, INTEGER_TYPE))
7976 return NULL_TREE;
7977
7978 /* Optimize for constant argument. */
7979 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7980 {
7981 HOST_WIDE_INT hi, width, result;
7982 unsigned HOST_WIDE_INT lo;
7983 tree type;
7984
7985 type = TREE_TYPE (arg);
7986 width = TYPE_PRECISION (type);
7987 lo = TREE_INT_CST_LOW (arg);
7988
7989 /* Clear all the bits that are beyond the type's precision. */
7990 if (width > HOST_BITS_PER_WIDE_INT)
7991 {
7992 hi = TREE_INT_CST_HIGH (arg);
7993 if (width < HOST_BITS_PER_DOUBLE_INT)
7994 hi &= ~(HOST_WIDE_INT_M1U << (width - HOST_BITS_PER_WIDE_INT));
7995 }
7996 else
7997 {
7998 hi = 0;
7999 if (width < HOST_BITS_PER_WIDE_INT)
8000 lo &= ~(HOST_WIDE_INT_M1U << width);
8001 }
8002
8003 switch (DECL_FUNCTION_CODE (fndecl))
8004 {
8005 CASE_INT_FN (BUILT_IN_FFS):
8006 if (lo != 0)
8007 result = ffs_hwi (lo);
8008 else if (hi != 0)
8009 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
8010 else
8011 result = 0;
8012 break;
8013
8014 CASE_INT_FN (BUILT_IN_CLZ):
8015 if (hi != 0)
8016 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8017 else if (lo != 0)
8018 result = width - floor_log2 (lo) - 1;
8019 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8020 result = width;
8021 break;
8022
8023 CASE_INT_FN (BUILT_IN_CTZ):
8024 if (lo != 0)
8025 result = ctz_hwi (lo);
8026 else if (hi != 0)
8027 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
8028 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8029 result = width;
8030 break;
8031
8032 CASE_INT_FN (BUILT_IN_CLRSB):
8033 if (width > 2 * HOST_BITS_PER_WIDE_INT)
8034 return NULL_TREE;
8035 if (width > HOST_BITS_PER_WIDE_INT
8036 && (hi & ((unsigned HOST_WIDE_INT) 1
8037 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
8038 {
8039 hi = ~hi & ~(HOST_WIDE_INT_M1U
8040 << (width - HOST_BITS_PER_WIDE_INT - 1));
8041 lo = ~lo;
8042 }
8043 else if (width <= HOST_BITS_PER_WIDE_INT
8044 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
8045 lo = ~lo & ~(HOST_WIDE_INT_M1U << (width - 1));
8046 if (hi != 0)
8047 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
8048 else if (lo != 0)
8049 result = width - floor_log2 (lo) - 2;
8050 else
8051 result = width - 1;
8052 break;
8053
8054 CASE_INT_FN (BUILT_IN_POPCOUNT):
8055 result = 0;
8056 while (lo)
8057 result++, lo &= lo - 1;
8058 while (hi)
8059 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8060 break;
8061
8062 CASE_INT_FN (BUILT_IN_PARITY):
8063 result = 0;
8064 while (lo)
8065 result++, lo &= lo - 1;
8066 while (hi)
8067 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8068 result &= 1;
8069 break;
8070
8071 default:
8072 gcc_unreachable ();
8073 }
8074
8075 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8076 }
8077
8078 return NULL_TREE;
8079 }
8080
8081 /* Fold function call to builtin_bswap and the short, long and long long
8082 variants. Return NULL_TREE if no simplification can be made. */
8083 static tree
8084 fold_builtin_bswap (tree fndecl, tree arg)
8085 {
8086 if (! validate_arg (arg, INTEGER_TYPE))
8087 return NULL_TREE;
8088
8089 /* Optimize constant value. */
8090 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8091 {
8092 HOST_WIDE_INT hi, width, r_hi = 0;
8093 unsigned HOST_WIDE_INT lo, r_lo = 0;
8094 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8095
8096 width = TYPE_PRECISION (type);
8097 lo = TREE_INT_CST_LOW (arg);
8098 hi = TREE_INT_CST_HIGH (arg);
8099
8100 switch (DECL_FUNCTION_CODE (fndecl))
8101 {
8102 case BUILT_IN_BSWAP16:
8103 case BUILT_IN_BSWAP32:
8104 case BUILT_IN_BSWAP64:
8105 {
8106 int s;
8107
8108 for (s = 0; s < width; s += 8)
8109 {
8110 int d = width - s - 8;
8111 unsigned HOST_WIDE_INT byte;
8112
8113 if (s < HOST_BITS_PER_WIDE_INT)
8114 byte = (lo >> s) & 0xff;
8115 else
8116 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8117
8118 if (d < HOST_BITS_PER_WIDE_INT)
8119 r_lo |= byte << d;
8120 else
8121 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8122 }
8123 }
8124
8125 break;
8126
8127 default:
8128 gcc_unreachable ();
8129 }
8130
8131 if (width < HOST_BITS_PER_WIDE_INT)
8132 return build_int_cst (type, r_lo);
8133 else
8134 return build_int_cst_wide (type, r_lo, r_hi);
8135 }
8136
8137 return NULL_TREE;
8138 }
8139
8140 /* A subroutine of fold_builtin to fold the various logarithmic
8141 functions. Return NULL_TREE if no simplification can me made.
8142 FUNC is the corresponding MPFR logarithm function. */
8143
8144 static tree
8145 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8146 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8147 {
8148 if (validate_arg (arg, REAL_TYPE))
8149 {
8150 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8151 tree res;
8152 const enum built_in_function fcode = builtin_mathfn_code (arg);
8153
8154 /* Calculate the result when the argument is a constant. */
8155 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8156 return res;
8157
8158 /* Special case, optimize logN(expN(x)) = x. */
8159 if (flag_unsafe_math_optimizations
8160 && ((func == mpfr_log
8161 && (fcode == BUILT_IN_EXP
8162 || fcode == BUILT_IN_EXPF
8163 || fcode == BUILT_IN_EXPL))
8164 || (func == mpfr_log2
8165 && (fcode == BUILT_IN_EXP2
8166 || fcode == BUILT_IN_EXP2F
8167 || fcode == BUILT_IN_EXP2L))
8168 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8169 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8170
8171 /* Optimize logN(func()) for various exponential functions. We
8172 want to determine the value "x" and the power "exponent" in
8173 order to transform logN(x**exponent) into exponent*logN(x). */
8174 if (flag_unsafe_math_optimizations)
8175 {
8176 tree exponent = 0, x = 0;
8177
8178 switch (fcode)
8179 {
8180 CASE_FLT_FN (BUILT_IN_EXP):
8181 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8182 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8183 dconst_e ()));
8184 exponent = CALL_EXPR_ARG (arg, 0);
8185 break;
8186 CASE_FLT_FN (BUILT_IN_EXP2):
8187 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8188 x = build_real (type, dconst2);
8189 exponent = CALL_EXPR_ARG (arg, 0);
8190 break;
8191 CASE_FLT_FN (BUILT_IN_EXP10):
8192 CASE_FLT_FN (BUILT_IN_POW10):
8193 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8194 {
8195 REAL_VALUE_TYPE dconst10;
8196 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8197 x = build_real (type, dconst10);
8198 }
8199 exponent = CALL_EXPR_ARG (arg, 0);
8200 break;
8201 CASE_FLT_FN (BUILT_IN_SQRT):
8202 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8203 x = CALL_EXPR_ARG (arg, 0);
8204 exponent = build_real (type, dconsthalf);
8205 break;
8206 CASE_FLT_FN (BUILT_IN_CBRT):
8207 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8208 x = CALL_EXPR_ARG (arg, 0);
8209 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8210 dconst_third ()));
8211 break;
8212 CASE_FLT_FN (BUILT_IN_POW):
8213 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8214 x = CALL_EXPR_ARG (arg, 0);
8215 exponent = CALL_EXPR_ARG (arg, 1);
8216 break;
8217 default:
8218 break;
8219 }
8220
8221 /* Now perform the optimization. */
8222 if (x && exponent)
8223 {
8224 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8225 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8226 }
8227 }
8228 }
8229
8230 return NULL_TREE;
8231 }
8232
8233 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8234 NULL_TREE if no simplification can be made. */
8235
8236 static tree
8237 fold_builtin_hypot (location_t loc, tree fndecl,
8238 tree arg0, tree arg1, tree type)
8239 {
8240 tree res, narg0, narg1;
8241
8242 if (!validate_arg (arg0, REAL_TYPE)
8243 || !validate_arg (arg1, REAL_TYPE))
8244 return NULL_TREE;
8245
8246 /* Calculate the result when the argument is a constant. */
8247 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8248 return res;
8249
8250 /* If either argument to hypot has a negate or abs, strip that off.
8251 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8252 narg0 = fold_strip_sign_ops (arg0);
8253 narg1 = fold_strip_sign_ops (arg1);
8254 if (narg0 || narg1)
8255 {
8256 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8257 narg1 ? narg1 : arg1);
8258 }
8259
8260 /* If either argument is zero, hypot is fabs of the other. */
8261 if (real_zerop (arg0))
8262 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8263 else if (real_zerop (arg1))
8264 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8265
8266 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8267 if (flag_unsafe_math_optimizations
8268 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8269 {
8270 const REAL_VALUE_TYPE sqrt2_trunc
8271 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8272 return fold_build2_loc (loc, MULT_EXPR, type,
8273 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8274 build_real (type, sqrt2_trunc));
8275 }
8276
8277 return NULL_TREE;
8278 }
8279
8280
8281 /* Fold a builtin function call to pow, powf, or powl. Return
8282 NULL_TREE if no simplification can be made. */
8283 static tree
8284 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8285 {
8286 tree res;
8287
8288 if (!validate_arg (arg0, REAL_TYPE)
8289 || !validate_arg (arg1, REAL_TYPE))
8290 return NULL_TREE;
8291
8292 /* Calculate the result when the argument is a constant. */
8293 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8294 return res;
8295
8296 /* Optimize pow(1.0,y) = 1.0. */
8297 if (real_onep (arg0))
8298 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8299
8300 if (TREE_CODE (arg1) == REAL_CST
8301 && !TREE_OVERFLOW (arg1))
8302 {
8303 REAL_VALUE_TYPE cint;
8304 REAL_VALUE_TYPE c;
8305 HOST_WIDE_INT n;
8306
8307 c = TREE_REAL_CST (arg1);
8308
8309 /* Optimize pow(x,0.0) = 1.0. */
8310 if (REAL_VALUES_EQUAL (c, dconst0))
8311 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8312 arg0);
8313
8314 /* Optimize pow(x,1.0) = x. */
8315 if (REAL_VALUES_EQUAL (c, dconst1))
8316 return arg0;
8317
8318 /* Optimize pow(x,-1.0) = 1.0/x. */
8319 if (REAL_VALUES_EQUAL (c, dconstm1))
8320 return fold_build2_loc (loc, RDIV_EXPR, type,
8321 build_real (type, dconst1), arg0);
8322
8323 /* Optimize pow(x,0.5) = sqrt(x). */
8324 if (flag_unsafe_math_optimizations
8325 && REAL_VALUES_EQUAL (c, dconsthalf))
8326 {
8327 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8328
8329 if (sqrtfn != NULL_TREE)
8330 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8331 }
8332
8333 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8334 if (flag_unsafe_math_optimizations)
8335 {
8336 const REAL_VALUE_TYPE dconstroot
8337 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8338
8339 if (REAL_VALUES_EQUAL (c, dconstroot))
8340 {
8341 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8342 if (cbrtfn != NULL_TREE)
8343 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8344 }
8345 }
8346
8347 /* Check for an integer exponent. */
8348 n = real_to_integer (&c);
8349 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8350 if (real_identical (&c, &cint))
8351 {
8352 /* Attempt to evaluate pow at compile-time, unless this should
8353 raise an exception. */
8354 if (TREE_CODE (arg0) == REAL_CST
8355 && !TREE_OVERFLOW (arg0)
8356 && (n > 0
8357 || (!flag_trapping_math && !flag_errno_math)
8358 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8359 {
8360 REAL_VALUE_TYPE x;
8361 bool inexact;
8362
8363 x = TREE_REAL_CST (arg0);
8364 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8365 if (flag_unsafe_math_optimizations || !inexact)
8366 return build_real (type, x);
8367 }
8368
8369 /* Strip sign ops from even integer powers. */
8370 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8371 {
8372 tree narg0 = fold_strip_sign_ops (arg0);
8373 if (narg0)
8374 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8375 }
8376 }
8377 }
8378
8379 if (flag_unsafe_math_optimizations)
8380 {
8381 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8382
8383 /* Optimize pow(expN(x),y) = expN(x*y). */
8384 if (BUILTIN_EXPONENT_P (fcode))
8385 {
8386 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8387 tree arg = CALL_EXPR_ARG (arg0, 0);
8388 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8389 return build_call_expr_loc (loc, expfn, 1, arg);
8390 }
8391
8392 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8393 if (BUILTIN_SQRT_P (fcode))
8394 {
8395 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8396 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8397 build_real (type, dconsthalf));
8398 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8399 }
8400
8401 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8402 if (BUILTIN_CBRT_P (fcode))
8403 {
8404 tree arg = CALL_EXPR_ARG (arg0, 0);
8405 if (tree_expr_nonnegative_p (arg))
8406 {
8407 const REAL_VALUE_TYPE dconstroot
8408 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8409 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8410 build_real (type, dconstroot));
8411 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8412 }
8413 }
8414
8415 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8416 if (fcode == BUILT_IN_POW
8417 || fcode == BUILT_IN_POWF
8418 || fcode == BUILT_IN_POWL)
8419 {
8420 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8421 if (tree_expr_nonnegative_p (arg00))
8422 {
8423 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8424 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8425 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8426 }
8427 }
8428 }
8429
8430 return NULL_TREE;
8431 }
8432
8433 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8434 Return NULL_TREE if no simplification can be made. */
8435 static tree
8436 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8437 tree arg0, tree arg1, tree type)
8438 {
8439 if (!validate_arg (arg0, REAL_TYPE)
8440 || !validate_arg (arg1, INTEGER_TYPE))
8441 return NULL_TREE;
8442
8443 /* Optimize pow(1.0,y) = 1.0. */
8444 if (real_onep (arg0))
8445 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8446
8447 if (host_integerp (arg1, 0))
8448 {
8449 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8450
8451 /* Evaluate powi at compile-time. */
8452 if (TREE_CODE (arg0) == REAL_CST
8453 && !TREE_OVERFLOW (arg0))
8454 {
8455 REAL_VALUE_TYPE x;
8456 x = TREE_REAL_CST (arg0);
8457 real_powi (&x, TYPE_MODE (type), &x, c);
8458 return build_real (type, x);
8459 }
8460
8461 /* Optimize pow(x,0) = 1.0. */
8462 if (c == 0)
8463 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8464 arg0);
8465
8466 /* Optimize pow(x,1) = x. */
8467 if (c == 1)
8468 return arg0;
8469
8470 /* Optimize pow(x,-1) = 1.0/x. */
8471 if (c == -1)
8472 return fold_build2_loc (loc, RDIV_EXPR, type,
8473 build_real (type, dconst1), arg0);
8474 }
8475
8476 return NULL_TREE;
8477 }
8478
8479 /* A subroutine of fold_builtin to fold the various exponent
8480 functions. Return NULL_TREE if no simplification can be made.
8481 FUNC is the corresponding MPFR exponent function. */
8482
8483 static tree
8484 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8485 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8486 {
8487 if (validate_arg (arg, REAL_TYPE))
8488 {
8489 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8490 tree res;
8491
8492 /* Calculate the result when the argument is a constant. */
8493 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8494 return res;
8495
8496 /* Optimize expN(logN(x)) = x. */
8497 if (flag_unsafe_math_optimizations)
8498 {
8499 const enum built_in_function fcode = builtin_mathfn_code (arg);
8500
8501 if ((func == mpfr_exp
8502 && (fcode == BUILT_IN_LOG
8503 || fcode == BUILT_IN_LOGF
8504 || fcode == BUILT_IN_LOGL))
8505 || (func == mpfr_exp2
8506 && (fcode == BUILT_IN_LOG2
8507 || fcode == BUILT_IN_LOG2F
8508 || fcode == BUILT_IN_LOG2L))
8509 || (func == mpfr_exp10
8510 && (fcode == BUILT_IN_LOG10
8511 || fcode == BUILT_IN_LOG10F
8512 || fcode == BUILT_IN_LOG10L)))
8513 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8514 }
8515 }
8516
8517 return NULL_TREE;
8518 }
8519
8520 /* Return true if VAR is a VAR_DECL or a component thereof. */
8521
8522 static bool
8523 var_decl_component_p (tree var)
8524 {
8525 tree inner = var;
8526 while (handled_component_p (inner))
8527 inner = TREE_OPERAND (inner, 0);
8528 return SSA_VAR_P (inner);
8529 }
8530
8531 /* Fold function call to builtin memset. Return
8532 NULL_TREE if no simplification can be made. */
8533
8534 static tree
8535 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8536 tree type, bool ignore)
8537 {
8538 tree var, ret, etype;
8539 unsigned HOST_WIDE_INT length, cval;
8540
8541 if (! validate_arg (dest, POINTER_TYPE)
8542 || ! validate_arg (c, INTEGER_TYPE)
8543 || ! validate_arg (len, INTEGER_TYPE))
8544 return NULL_TREE;
8545
8546 if (! host_integerp (len, 1))
8547 return NULL_TREE;
8548
8549 /* If the LEN parameter is zero, return DEST. */
8550 if (integer_zerop (len))
8551 return omit_one_operand_loc (loc, type, dest, c);
8552
8553 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8554 return NULL_TREE;
8555
8556 var = dest;
8557 STRIP_NOPS (var);
8558 if (TREE_CODE (var) != ADDR_EXPR)
8559 return NULL_TREE;
8560
8561 var = TREE_OPERAND (var, 0);
8562 if (TREE_THIS_VOLATILE (var))
8563 return NULL_TREE;
8564
8565 etype = TREE_TYPE (var);
8566 if (TREE_CODE (etype) == ARRAY_TYPE)
8567 etype = TREE_TYPE (etype);
8568
8569 if (!INTEGRAL_TYPE_P (etype)
8570 && !POINTER_TYPE_P (etype))
8571 return NULL_TREE;
8572
8573 if (! var_decl_component_p (var))
8574 return NULL_TREE;
8575
8576 length = tree_low_cst (len, 1);
8577 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8578 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
8579 return NULL_TREE;
8580
8581 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8582 return NULL_TREE;
8583
8584 if (integer_zerop (c))
8585 cval = 0;
8586 else
8587 {
8588 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8589 return NULL_TREE;
8590
8591 cval = TREE_INT_CST_LOW (c);
8592 cval &= 0xff;
8593 cval |= cval << 8;
8594 cval |= cval << 16;
8595 cval |= (cval << 31) << 1;
8596 }
8597
8598 ret = build_int_cst_type (etype, cval);
8599 var = build_fold_indirect_ref_loc (loc,
8600 fold_convert_loc (loc,
8601 build_pointer_type (etype),
8602 dest));
8603 ret = build2 (MODIFY_EXPR, etype, var, ret);
8604 if (ignore)
8605 return ret;
8606
8607 return omit_one_operand_loc (loc, type, dest, ret);
8608 }
8609
8610 /* Fold function call to builtin memset. Return
8611 NULL_TREE if no simplification can be made. */
8612
8613 static tree
8614 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8615 {
8616 if (! validate_arg (dest, POINTER_TYPE)
8617 || ! validate_arg (size, INTEGER_TYPE))
8618 return NULL_TREE;
8619
8620 if (!ignore)
8621 return NULL_TREE;
8622
8623 /* New argument list transforming bzero(ptr x, int y) to
8624 memset(ptr x, int 0, size_t y). This is done this way
8625 so that if it isn't expanded inline, we fallback to
8626 calling bzero instead of memset. */
8627
8628 return fold_builtin_memset (loc, dest, integer_zero_node,
8629 fold_convert_loc (loc, size_type_node, size),
8630 void_type_node, ignore);
8631 }
8632
8633 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8634 NULL_TREE if no simplification can be made.
8635 If ENDP is 0, return DEST (like memcpy).
8636 If ENDP is 1, return DEST+LEN (like mempcpy).
8637 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8638 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8639 (memmove). */
8640
8641 static tree
8642 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8643 tree len, tree type, bool ignore, int endp)
8644 {
8645 tree destvar, srcvar, expr;
8646
8647 if (! validate_arg (dest, POINTER_TYPE)
8648 || ! validate_arg (src, POINTER_TYPE)
8649 || ! validate_arg (len, INTEGER_TYPE))
8650 return NULL_TREE;
8651
8652 /* If the LEN parameter is zero, return DEST. */
8653 if (integer_zerop (len))
8654 return omit_one_operand_loc (loc, type, dest, src);
8655
8656 /* If SRC and DEST are the same (and not volatile), return
8657 DEST{,+LEN,+LEN-1}. */
8658 if (operand_equal_p (src, dest, 0))
8659 expr = len;
8660 else
8661 {
8662 tree srctype, desttype;
8663 unsigned int src_align, dest_align;
8664 tree off0;
8665
8666 if (endp == 3)
8667 {
8668 src_align = get_pointer_alignment (src);
8669 dest_align = get_pointer_alignment (dest);
8670
8671 /* Both DEST and SRC must be pointer types.
8672 ??? This is what old code did. Is the testing for pointer types
8673 really mandatory?
8674
8675 If either SRC is readonly or length is 1, we can use memcpy. */
8676 if (!dest_align || !src_align)
8677 return NULL_TREE;
8678 if (readonly_data_expr (src)
8679 || (host_integerp (len, 1)
8680 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8681 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
8682 {
8683 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8684 if (!fn)
8685 return NULL_TREE;
8686 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8687 }
8688
8689 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8690 if (TREE_CODE (src) == ADDR_EXPR
8691 && TREE_CODE (dest) == ADDR_EXPR)
8692 {
8693 tree src_base, dest_base, fn;
8694 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8695 HOST_WIDE_INT size = -1;
8696 HOST_WIDE_INT maxsize = -1;
8697
8698 srcvar = TREE_OPERAND (src, 0);
8699 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8700 &size, &maxsize);
8701 destvar = TREE_OPERAND (dest, 0);
8702 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8703 &size, &maxsize);
8704 if (host_integerp (len, 1))
8705 maxsize = tree_low_cst (len, 1);
8706 else
8707 maxsize = -1;
8708 src_offset /= BITS_PER_UNIT;
8709 dest_offset /= BITS_PER_UNIT;
8710 if (SSA_VAR_P (src_base)
8711 && SSA_VAR_P (dest_base))
8712 {
8713 if (operand_equal_p (src_base, dest_base, 0)
8714 && ranges_overlap_p (src_offset, maxsize,
8715 dest_offset, maxsize))
8716 return NULL_TREE;
8717 }
8718 else if (TREE_CODE (src_base) == MEM_REF
8719 && TREE_CODE (dest_base) == MEM_REF)
8720 {
8721 double_int off;
8722 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8723 TREE_OPERAND (dest_base, 0), 0))
8724 return NULL_TREE;
8725 off = mem_ref_offset (src_base) +
8726 double_int::from_shwi (src_offset);
8727 if (!off.fits_shwi ())
8728 return NULL_TREE;
8729 src_offset = off.low;
8730 off = mem_ref_offset (dest_base) +
8731 double_int::from_shwi (dest_offset);
8732 if (!off.fits_shwi ())
8733 return NULL_TREE;
8734 dest_offset = off.low;
8735 if (ranges_overlap_p (src_offset, maxsize,
8736 dest_offset, maxsize))
8737 return NULL_TREE;
8738 }
8739 else
8740 return NULL_TREE;
8741
8742 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8743 if (!fn)
8744 return NULL_TREE;
8745 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8746 }
8747
8748 /* If the destination and source do not alias optimize into
8749 memcpy as well. */
8750 if ((is_gimple_min_invariant (dest)
8751 || TREE_CODE (dest) == SSA_NAME)
8752 && (is_gimple_min_invariant (src)
8753 || TREE_CODE (src) == SSA_NAME))
8754 {
8755 ao_ref destr, srcr;
8756 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8757 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8758 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8759 {
8760 tree fn;
8761 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8762 if (!fn)
8763 return NULL_TREE;
8764 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8765 }
8766 }
8767
8768 return NULL_TREE;
8769 }
8770
8771 if (!host_integerp (len, 0))
8772 return NULL_TREE;
8773 /* FIXME:
8774 This logic lose for arguments like (type *)malloc (sizeof (type)),
8775 since we strip the casts of up to VOID return value from malloc.
8776 Perhaps we ought to inherit type from non-VOID argument here? */
8777 STRIP_NOPS (src);
8778 STRIP_NOPS (dest);
8779 if (!POINTER_TYPE_P (TREE_TYPE (src))
8780 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8781 return NULL_TREE;
8782 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8783 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8784 {
8785 tree tem = TREE_OPERAND (src, 0);
8786 STRIP_NOPS (tem);
8787 if (tem != TREE_OPERAND (src, 0))
8788 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8789 }
8790 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8791 {
8792 tree tem = TREE_OPERAND (dest, 0);
8793 STRIP_NOPS (tem);
8794 if (tem != TREE_OPERAND (dest, 0))
8795 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8796 }
8797 srctype = TREE_TYPE (TREE_TYPE (src));
8798 if (TREE_CODE (srctype) == ARRAY_TYPE
8799 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8800 {
8801 srctype = TREE_TYPE (srctype);
8802 STRIP_NOPS (src);
8803 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8804 }
8805 desttype = TREE_TYPE (TREE_TYPE (dest));
8806 if (TREE_CODE (desttype) == ARRAY_TYPE
8807 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8808 {
8809 desttype = TREE_TYPE (desttype);
8810 STRIP_NOPS (dest);
8811 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8812 }
8813 if (TREE_ADDRESSABLE (srctype)
8814 || TREE_ADDRESSABLE (desttype))
8815 return NULL_TREE;
8816
8817 src_align = get_pointer_alignment (src);
8818 dest_align = get_pointer_alignment (dest);
8819 if (dest_align < TYPE_ALIGN (desttype)
8820 || src_align < TYPE_ALIGN (srctype))
8821 return NULL_TREE;
8822
8823 if (!ignore)
8824 dest = builtin_save_expr (dest);
8825
8826 /* Build accesses at offset zero with a ref-all character type. */
8827 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8828 ptr_mode, true), 0);
8829
8830 destvar = dest;
8831 STRIP_NOPS (destvar);
8832 if (TREE_CODE (destvar) == ADDR_EXPR
8833 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8834 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8835 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8836 else
8837 destvar = NULL_TREE;
8838
8839 srcvar = src;
8840 STRIP_NOPS (srcvar);
8841 if (TREE_CODE (srcvar) == ADDR_EXPR
8842 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8843 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8844 {
8845 if (!destvar
8846 || src_align >= TYPE_ALIGN (desttype))
8847 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8848 srcvar, off0);
8849 else if (!STRICT_ALIGNMENT)
8850 {
8851 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8852 src_align);
8853 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8854 }
8855 else
8856 srcvar = NULL_TREE;
8857 }
8858 else
8859 srcvar = NULL_TREE;
8860
8861 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8862 return NULL_TREE;
8863
8864 if (srcvar == NULL_TREE)
8865 {
8866 STRIP_NOPS (src);
8867 if (src_align >= TYPE_ALIGN (desttype))
8868 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8869 else
8870 {
8871 if (STRICT_ALIGNMENT)
8872 return NULL_TREE;
8873 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8874 src_align);
8875 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
8876 }
8877 }
8878 else if (destvar == NULL_TREE)
8879 {
8880 STRIP_NOPS (dest);
8881 if (dest_align >= TYPE_ALIGN (srctype))
8882 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8883 else
8884 {
8885 if (STRICT_ALIGNMENT)
8886 return NULL_TREE;
8887 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
8888 dest_align);
8889 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
8890 }
8891 }
8892
8893 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8894 }
8895
8896 if (ignore)
8897 return expr;
8898
8899 if (endp == 0 || endp == 3)
8900 return omit_one_operand_loc (loc, type, dest, expr);
8901
8902 if (expr == len)
8903 expr = NULL_TREE;
8904
8905 if (endp == 2)
8906 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8907 ssize_int (1));
8908
8909 dest = fold_build_pointer_plus_loc (loc, dest, len);
8910 dest = fold_convert_loc (loc, type, dest);
8911 if (expr)
8912 dest = omit_one_operand_loc (loc, type, dest, expr);
8913 return dest;
8914 }
8915
8916 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8917 If LEN is not NULL, it represents the length of the string to be
8918 copied. Return NULL_TREE if no simplification can be made. */
8919
8920 tree
8921 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8922 {
8923 tree fn;
8924
8925 if (!validate_arg (dest, POINTER_TYPE)
8926 || !validate_arg (src, POINTER_TYPE))
8927 return NULL_TREE;
8928
8929 /* If SRC and DEST are the same (and not volatile), return DEST. */
8930 if (operand_equal_p (src, dest, 0))
8931 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8932
8933 if (optimize_function_for_size_p (cfun))
8934 return NULL_TREE;
8935
8936 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8937 if (!fn)
8938 return NULL_TREE;
8939
8940 if (!len)
8941 {
8942 len = c_strlen (src, 1);
8943 if (! len || TREE_SIDE_EFFECTS (len))
8944 return NULL_TREE;
8945 }
8946
8947 len = fold_convert_loc (loc, size_type_node, len);
8948 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
8949 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8950 build_call_expr_loc (loc, fn, 3, dest, src, len));
8951 }
8952
8953 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8954 Return NULL_TREE if no simplification can be made. */
8955
8956 static tree
8957 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8958 {
8959 tree fn, len, lenp1, call, type;
8960
8961 if (!validate_arg (dest, POINTER_TYPE)
8962 || !validate_arg (src, POINTER_TYPE))
8963 return NULL_TREE;
8964
8965 len = c_strlen (src, 1);
8966 if (!len
8967 || TREE_CODE (len) != INTEGER_CST)
8968 return NULL_TREE;
8969
8970 if (optimize_function_for_size_p (cfun)
8971 /* If length is zero it's small enough. */
8972 && !integer_zerop (len))
8973 return NULL_TREE;
8974
8975 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8976 if (!fn)
8977 return NULL_TREE;
8978
8979 lenp1 = size_binop_loc (loc, PLUS_EXPR,
8980 fold_convert_loc (loc, size_type_node, len),
8981 build_int_cst (size_type_node, 1));
8982 /* We use dest twice in building our expression. Save it from
8983 multiple expansions. */
8984 dest = builtin_save_expr (dest);
8985 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8986
8987 type = TREE_TYPE (TREE_TYPE (fndecl));
8988 dest = fold_build_pointer_plus_loc (loc, dest, len);
8989 dest = fold_convert_loc (loc, type, dest);
8990 dest = omit_one_operand_loc (loc, type, dest, call);
8991 return dest;
8992 }
8993
8994 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8995 If SLEN is not NULL, it represents the length of the source string.
8996 Return NULL_TREE if no simplification can be made. */
8997
8998 tree
8999 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
9000 tree src, tree len, tree slen)
9001 {
9002 tree fn;
9003
9004 if (!validate_arg (dest, POINTER_TYPE)
9005 || !validate_arg (src, POINTER_TYPE)
9006 || !validate_arg (len, INTEGER_TYPE))
9007 return NULL_TREE;
9008
9009 /* If the LEN parameter is zero, return DEST. */
9010 if (integer_zerop (len))
9011 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9012
9013 /* We can't compare slen with len as constants below if len is not a
9014 constant. */
9015 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9016 return NULL_TREE;
9017
9018 if (!slen)
9019 slen = c_strlen (src, 1);
9020
9021 /* Now, we must be passed a constant src ptr parameter. */
9022 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9023 return NULL_TREE;
9024
9025 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
9026
9027 /* We do not support simplification of this case, though we do
9028 support it when expanding trees into RTL. */
9029 /* FIXME: generate a call to __builtin_memset. */
9030 if (tree_int_cst_lt (slen, len))
9031 return NULL_TREE;
9032
9033 /* OK transform into builtin memcpy. */
9034 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9035 if (!fn)
9036 return NULL_TREE;
9037
9038 len = fold_convert_loc (loc, size_type_node, len);
9039 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9040 build_call_expr_loc (loc, fn, 3, dest, src, len));
9041 }
9042
9043 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9044 arguments to the call, and TYPE is its return type.
9045 Return NULL_TREE if no simplification can be made. */
9046
9047 static tree
9048 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
9049 {
9050 if (!validate_arg (arg1, POINTER_TYPE)
9051 || !validate_arg (arg2, INTEGER_TYPE)
9052 || !validate_arg (len, INTEGER_TYPE))
9053 return NULL_TREE;
9054 else
9055 {
9056 const char *p1;
9057
9058 if (TREE_CODE (arg2) != INTEGER_CST
9059 || !host_integerp (len, 1))
9060 return NULL_TREE;
9061
9062 p1 = c_getstr (arg1);
9063 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9064 {
9065 char c;
9066 const char *r;
9067 tree tem;
9068
9069 if (target_char_cast (arg2, &c))
9070 return NULL_TREE;
9071
9072 r = (const char *) memchr (p1, c, tree_low_cst (len, 1));
9073
9074 if (r == NULL)
9075 return build_int_cst (TREE_TYPE (arg1), 0);
9076
9077 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
9078 return fold_convert_loc (loc, type, tem);
9079 }
9080 return NULL_TREE;
9081 }
9082 }
9083
9084 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9085 Return NULL_TREE if no simplification can be made. */
9086
9087 static tree
9088 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9089 {
9090 const char *p1, *p2;
9091
9092 if (!validate_arg (arg1, POINTER_TYPE)
9093 || !validate_arg (arg2, POINTER_TYPE)
9094 || !validate_arg (len, INTEGER_TYPE))
9095 return NULL_TREE;
9096
9097 /* If the LEN parameter is zero, return zero. */
9098 if (integer_zerop (len))
9099 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9100 arg1, arg2);
9101
9102 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9103 if (operand_equal_p (arg1, arg2, 0))
9104 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9105
9106 p1 = c_getstr (arg1);
9107 p2 = c_getstr (arg2);
9108
9109 /* If all arguments are constant, and the value of len is not greater
9110 than the lengths of arg1 and arg2, evaluate at compile-time. */
9111 if (host_integerp (len, 1) && p1 && p2
9112 && compare_tree_int (len, strlen (p1) + 1) <= 0
9113 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9114 {
9115 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9116
9117 if (r > 0)
9118 return integer_one_node;
9119 else if (r < 0)
9120 return integer_minus_one_node;
9121 else
9122 return integer_zero_node;
9123 }
9124
9125 /* If len parameter is one, return an expression corresponding to
9126 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9127 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9128 {
9129 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9130 tree cst_uchar_ptr_node
9131 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9132
9133 tree ind1
9134 = fold_convert_loc (loc, integer_type_node,
9135 build1 (INDIRECT_REF, cst_uchar_node,
9136 fold_convert_loc (loc,
9137 cst_uchar_ptr_node,
9138 arg1)));
9139 tree ind2
9140 = fold_convert_loc (loc, integer_type_node,
9141 build1 (INDIRECT_REF, cst_uchar_node,
9142 fold_convert_loc (loc,
9143 cst_uchar_ptr_node,
9144 arg2)));
9145 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9146 }
9147
9148 return NULL_TREE;
9149 }
9150
9151 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9152 Return NULL_TREE if no simplification can be made. */
9153
9154 static tree
9155 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9156 {
9157 const char *p1, *p2;
9158
9159 if (!validate_arg (arg1, POINTER_TYPE)
9160 || !validate_arg (arg2, POINTER_TYPE))
9161 return NULL_TREE;
9162
9163 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9164 if (operand_equal_p (arg1, arg2, 0))
9165 return integer_zero_node;
9166
9167 p1 = c_getstr (arg1);
9168 p2 = c_getstr (arg2);
9169
9170 if (p1 && p2)
9171 {
9172 const int i = strcmp (p1, p2);
9173 if (i < 0)
9174 return integer_minus_one_node;
9175 else if (i > 0)
9176 return integer_one_node;
9177 else
9178 return integer_zero_node;
9179 }
9180
9181 /* If the second arg is "", return *(const unsigned char*)arg1. */
9182 if (p2 && *p2 == '\0')
9183 {
9184 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9185 tree cst_uchar_ptr_node
9186 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9187
9188 return fold_convert_loc (loc, integer_type_node,
9189 build1 (INDIRECT_REF, cst_uchar_node,
9190 fold_convert_loc (loc,
9191 cst_uchar_ptr_node,
9192 arg1)));
9193 }
9194
9195 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9196 if (p1 && *p1 == '\0')
9197 {
9198 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9199 tree cst_uchar_ptr_node
9200 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9201
9202 tree temp
9203 = fold_convert_loc (loc, integer_type_node,
9204 build1 (INDIRECT_REF, cst_uchar_node,
9205 fold_convert_loc (loc,
9206 cst_uchar_ptr_node,
9207 arg2)));
9208 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9209 }
9210
9211 return NULL_TREE;
9212 }
9213
9214 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9215 Return NULL_TREE if no simplification can be made. */
9216
9217 static tree
9218 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9219 {
9220 const char *p1, *p2;
9221
9222 if (!validate_arg (arg1, POINTER_TYPE)
9223 || !validate_arg (arg2, POINTER_TYPE)
9224 || !validate_arg (len, INTEGER_TYPE))
9225 return NULL_TREE;
9226
9227 /* If the LEN parameter is zero, return zero. */
9228 if (integer_zerop (len))
9229 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9230 arg1, arg2);
9231
9232 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9233 if (operand_equal_p (arg1, arg2, 0))
9234 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9235
9236 p1 = c_getstr (arg1);
9237 p2 = c_getstr (arg2);
9238
9239 if (host_integerp (len, 1) && p1 && p2)
9240 {
9241 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9242 if (i > 0)
9243 return integer_one_node;
9244 else if (i < 0)
9245 return integer_minus_one_node;
9246 else
9247 return integer_zero_node;
9248 }
9249
9250 /* If the second arg is "", and the length is greater than zero,
9251 return *(const unsigned char*)arg1. */
9252 if (p2 && *p2 == '\0'
9253 && TREE_CODE (len) == INTEGER_CST
9254 && tree_int_cst_sgn (len) == 1)
9255 {
9256 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9257 tree cst_uchar_ptr_node
9258 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9259
9260 return fold_convert_loc (loc, integer_type_node,
9261 build1 (INDIRECT_REF, cst_uchar_node,
9262 fold_convert_loc (loc,
9263 cst_uchar_ptr_node,
9264 arg1)));
9265 }
9266
9267 /* If the first arg is "", and the length is greater than zero,
9268 return -*(const unsigned char*)arg2. */
9269 if (p1 && *p1 == '\0'
9270 && TREE_CODE (len) == INTEGER_CST
9271 && tree_int_cst_sgn (len) == 1)
9272 {
9273 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9274 tree cst_uchar_ptr_node
9275 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9276
9277 tree temp = fold_convert_loc (loc, integer_type_node,
9278 build1 (INDIRECT_REF, cst_uchar_node,
9279 fold_convert_loc (loc,
9280 cst_uchar_ptr_node,
9281 arg2)));
9282 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9283 }
9284
9285 /* If len parameter is one, return an expression corresponding to
9286 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9287 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9288 {
9289 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9290 tree cst_uchar_ptr_node
9291 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9292
9293 tree ind1 = fold_convert_loc (loc, integer_type_node,
9294 build1 (INDIRECT_REF, cst_uchar_node,
9295 fold_convert_loc (loc,
9296 cst_uchar_ptr_node,
9297 arg1)));
9298 tree ind2 = fold_convert_loc (loc, integer_type_node,
9299 build1 (INDIRECT_REF, cst_uchar_node,
9300 fold_convert_loc (loc,
9301 cst_uchar_ptr_node,
9302 arg2)));
9303 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9304 }
9305
9306 return NULL_TREE;
9307 }
9308
9309 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9310 ARG. Return NULL_TREE if no simplification can be made. */
9311
9312 static tree
9313 fold_builtin_signbit (location_t loc, tree arg, tree type)
9314 {
9315 if (!validate_arg (arg, REAL_TYPE))
9316 return NULL_TREE;
9317
9318 /* If ARG is a compile-time constant, determine the result. */
9319 if (TREE_CODE (arg) == REAL_CST
9320 && !TREE_OVERFLOW (arg))
9321 {
9322 REAL_VALUE_TYPE c;
9323
9324 c = TREE_REAL_CST (arg);
9325 return (REAL_VALUE_NEGATIVE (c)
9326 ? build_one_cst (type)
9327 : build_zero_cst (type));
9328 }
9329
9330 /* If ARG is non-negative, the result is always zero. */
9331 if (tree_expr_nonnegative_p (arg))
9332 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9333
9334 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9335 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9336 return fold_convert (type,
9337 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9338 build_real (TREE_TYPE (arg), dconst0)));
9339
9340 return NULL_TREE;
9341 }
9342
9343 /* Fold function call to builtin copysign, copysignf or copysignl with
9344 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9345 be made. */
9346
9347 static tree
9348 fold_builtin_copysign (location_t loc, tree fndecl,
9349 tree arg1, tree arg2, tree type)
9350 {
9351 tree tem;
9352
9353 if (!validate_arg (arg1, REAL_TYPE)
9354 || !validate_arg (arg2, REAL_TYPE))
9355 return NULL_TREE;
9356
9357 /* copysign(X,X) is X. */
9358 if (operand_equal_p (arg1, arg2, 0))
9359 return fold_convert_loc (loc, type, arg1);
9360
9361 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9362 if (TREE_CODE (arg1) == REAL_CST
9363 && TREE_CODE (arg2) == REAL_CST
9364 && !TREE_OVERFLOW (arg1)
9365 && !TREE_OVERFLOW (arg2))
9366 {
9367 REAL_VALUE_TYPE c1, c2;
9368
9369 c1 = TREE_REAL_CST (arg1);
9370 c2 = TREE_REAL_CST (arg2);
9371 /* c1.sign := c2.sign. */
9372 real_copysign (&c1, &c2);
9373 return build_real (type, c1);
9374 }
9375
9376 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9377 Remember to evaluate Y for side-effects. */
9378 if (tree_expr_nonnegative_p (arg2))
9379 return omit_one_operand_loc (loc, type,
9380 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9381 arg2);
9382
9383 /* Strip sign changing operations for the first argument. */
9384 tem = fold_strip_sign_ops (arg1);
9385 if (tem)
9386 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9387
9388 return NULL_TREE;
9389 }
9390
9391 /* Fold a call to builtin isascii with argument ARG. */
9392
9393 static tree
9394 fold_builtin_isascii (location_t loc, tree arg)
9395 {
9396 if (!validate_arg (arg, INTEGER_TYPE))
9397 return NULL_TREE;
9398 else
9399 {
9400 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9401 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9402 build_int_cst (integer_type_node,
9403 ~ (unsigned HOST_WIDE_INT) 0x7f));
9404 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9405 arg, integer_zero_node);
9406 }
9407 }
9408
9409 /* Fold a call to builtin toascii with argument ARG. */
9410
9411 static tree
9412 fold_builtin_toascii (location_t loc, tree arg)
9413 {
9414 if (!validate_arg (arg, INTEGER_TYPE))
9415 return NULL_TREE;
9416
9417 /* Transform toascii(c) -> (c & 0x7f). */
9418 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9419 build_int_cst (integer_type_node, 0x7f));
9420 }
9421
9422 /* Fold a call to builtin isdigit with argument ARG. */
9423
9424 static tree
9425 fold_builtin_isdigit (location_t loc, tree arg)
9426 {
9427 if (!validate_arg (arg, INTEGER_TYPE))
9428 return NULL_TREE;
9429 else
9430 {
9431 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9432 /* According to the C standard, isdigit is unaffected by locale.
9433 However, it definitely is affected by the target character set. */
9434 unsigned HOST_WIDE_INT target_digit0
9435 = lang_hooks.to_target_charset ('0');
9436
9437 if (target_digit0 == 0)
9438 return NULL_TREE;
9439
9440 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9441 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9442 build_int_cst (unsigned_type_node, target_digit0));
9443 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9444 build_int_cst (unsigned_type_node, 9));
9445 }
9446 }
9447
9448 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9449
9450 static tree
9451 fold_builtin_fabs (location_t loc, tree arg, tree type)
9452 {
9453 if (!validate_arg (arg, REAL_TYPE))
9454 return NULL_TREE;
9455
9456 arg = fold_convert_loc (loc, type, arg);
9457 if (TREE_CODE (arg) == REAL_CST)
9458 return fold_abs_const (arg, type);
9459 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9460 }
9461
9462 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9463
9464 static tree
9465 fold_builtin_abs (location_t loc, tree arg, tree type)
9466 {
9467 if (!validate_arg (arg, INTEGER_TYPE))
9468 return NULL_TREE;
9469
9470 arg = fold_convert_loc (loc, type, arg);
9471 if (TREE_CODE (arg) == INTEGER_CST)
9472 return fold_abs_const (arg, type);
9473 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9474 }
9475
9476 /* Fold a fma operation with arguments ARG[012]. */
9477
9478 tree
9479 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9480 tree type, tree arg0, tree arg1, tree arg2)
9481 {
9482 if (TREE_CODE (arg0) == REAL_CST
9483 && TREE_CODE (arg1) == REAL_CST
9484 && TREE_CODE (arg2) == REAL_CST)
9485 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9486
9487 return NULL_TREE;
9488 }
9489
9490 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9491
9492 static tree
9493 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9494 {
9495 if (validate_arg (arg0, REAL_TYPE)
9496 && validate_arg (arg1, REAL_TYPE)
9497 && validate_arg (arg2, REAL_TYPE))
9498 {
9499 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9500 if (tem)
9501 return tem;
9502
9503 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9504 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9505 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9506 }
9507 return NULL_TREE;
9508 }
9509
9510 /* Fold a call to builtin fmin or fmax. */
9511
9512 static tree
9513 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9514 tree type, bool max)
9515 {
9516 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9517 {
9518 /* Calculate the result when the argument is a constant. */
9519 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9520
9521 if (res)
9522 return res;
9523
9524 /* If either argument is NaN, return the other one. Avoid the
9525 transformation if we get (and honor) a signalling NaN. Using
9526 omit_one_operand() ensures we create a non-lvalue. */
9527 if (TREE_CODE (arg0) == REAL_CST
9528 && real_isnan (&TREE_REAL_CST (arg0))
9529 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9530 || ! TREE_REAL_CST (arg0).signalling))
9531 return omit_one_operand_loc (loc, type, arg1, arg0);
9532 if (TREE_CODE (arg1) == REAL_CST
9533 && real_isnan (&TREE_REAL_CST (arg1))
9534 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9535 || ! TREE_REAL_CST (arg1).signalling))
9536 return omit_one_operand_loc (loc, type, arg0, arg1);
9537
9538 /* Transform fmin/fmax(x,x) -> x. */
9539 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9540 return omit_one_operand_loc (loc, type, arg0, arg1);
9541
9542 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9543 functions to return the numeric arg if the other one is NaN.
9544 These tree codes don't honor that, so only transform if
9545 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9546 handled, so we don't have to worry about it either. */
9547 if (flag_finite_math_only)
9548 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9549 fold_convert_loc (loc, type, arg0),
9550 fold_convert_loc (loc, type, arg1));
9551 }
9552 return NULL_TREE;
9553 }
9554
9555 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9556
9557 static tree
9558 fold_builtin_carg (location_t loc, tree arg, tree type)
9559 {
9560 if (validate_arg (arg, COMPLEX_TYPE)
9561 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9562 {
9563 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9564
9565 if (atan2_fn)
9566 {
9567 tree new_arg = builtin_save_expr (arg);
9568 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9569 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9570 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9571 }
9572 }
9573
9574 return NULL_TREE;
9575 }
9576
9577 /* Fold a call to builtin logb/ilogb. */
9578
9579 static tree
9580 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9581 {
9582 if (! validate_arg (arg, REAL_TYPE))
9583 return NULL_TREE;
9584
9585 STRIP_NOPS (arg);
9586
9587 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9588 {
9589 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9590
9591 switch (value->cl)
9592 {
9593 case rvc_nan:
9594 case rvc_inf:
9595 /* If arg is Inf or NaN and we're logb, return it. */
9596 if (TREE_CODE (rettype) == REAL_TYPE)
9597 {
9598 /* For logb(-Inf) we have to return +Inf. */
9599 if (real_isinf (value) && real_isneg (value))
9600 {
9601 REAL_VALUE_TYPE tem;
9602 real_inf (&tem);
9603 return build_real (rettype, tem);
9604 }
9605 return fold_convert_loc (loc, rettype, arg);
9606 }
9607 /* Fall through... */
9608 case rvc_zero:
9609 /* Zero may set errno and/or raise an exception for logb, also
9610 for ilogb we don't know FP_ILOGB0. */
9611 return NULL_TREE;
9612 case rvc_normal:
9613 /* For normal numbers, proceed iff radix == 2. In GCC,
9614 normalized significands are in the range [0.5, 1.0). We
9615 want the exponent as if they were [1.0, 2.0) so get the
9616 exponent and subtract 1. */
9617 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9618 return fold_convert_loc (loc, rettype,
9619 build_int_cst (integer_type_node,
9620 REAL_EXP (value)-1));
9621 break;
9622 }
9623 }
9624
9625 return NULL_TREE;
9626 }
9627
9628 /* Fold a call to builtin significand, if radix == 2. */
9629
9630 static tree
9631 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9632 {
9633 if (! validate_arg (arg, REAL_TYPE))
9634 return NULL_TREE;
9635
9636 STRIP_NOPS (arg);
9637
9638 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9639 {
9640 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9641
9642 switch (value->cl)
9643 {
9644 case rvc_zero:
9645 case rvc_nan:
9646 case rvc_inf:
9647 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9648 return fold_convert_loc (loc, rettype, arg);
9649 case rvc_normal:
9650 /* For normal numbers, proceed iff radix == 2. */
9651 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9652 {
9653 REAL_VALUE_TYPE result = *value;
9654 /* In GCC, normalized significands are in the range [0.5,
9655 1.0). We want them to be [1.0, 2.0) so set the
9656 exponent to 1. */
9657 SET_REAL_EXP (&result, 1);
9658 return build_real (rettype, result);
9659 }
9660 break;
9661 }
9662 }
9663
9664 return NULL_TREE;
9665 }
9666
9667 /* Fold a call to builtin frexp, we can assume the base is 2. */
9668
9669 static tree
9670 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9671 {
9672 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9673 return NULL_TREE;
9674
9675 STRIP_NOPS (arg0);
9676
9677 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9678 return NULL_TREE;
9679
9680 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9681
9682 /* Proceed if a valid pointer type was passed in. */
9683 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9684 {
9685 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9686 tree frac, exp;
9687
9688 switch (value->cl)
9689 {
9690 case rvc_zero:
9691 /* For +-0, return (*exp = 0, +-0). */
9692 exp = integer_zero_node;
9693 frac = arg0;
9694 break;
9695 case rvc_nan:
9696 case rvc_inf:
9697 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9698 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9699 case rvc_normal:
9700 {
9701 /* Since the frexp function always expects base 2, and in
9702 GCC normalized significands are already in the range
9703 [0.5, 1.0), we have exactly what frexp wants. */
9704 REAL_VALUE_TYPE frac_rvt = *value;
9705 SET_REAL_EXP (&frac_rvt, 0);
9706 frac = build_real (rettype, frac_rvt);
9707 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9708 }
9709 break;
9710 default:
9711 gcc_unreachable ();
9712 }
9713
9714 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9715 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9716 TREE_SIDE_EFFECTS (arg1) = 1;
9717 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9718 }
9719
9720 return NULL_TREE;
9721 }
9722
9723 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9724 then we can assume the base is two. If it's false, then we have to
9725 check the mode of the TYPE parameter in certain cases. */
9726
9727 static tree
9728 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9729 tree type, bool ldexp)
9730 {
9731 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9732 {
9733 STRIP_NOPS (arg0);
9734 STRIP_NOPS (arg1);
9735
9736 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9737 if (real_zerop (arg0) || integer_zerop (arg1)
9738 || (TREE_CODE (arg0) == REAL_CST
9739 && !real_isfinite (&TREE_REAL_CST (arg0))))
9740 return omit_one_operand_loc (loc, type, arg0, arg1);
9741
9742 /* If both arguments are constant, then try to evaluate it. */
9743 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9744 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9745 && host_integerp (arg1, 0))
9746 {
9747 /* Bound the maximum adjustment to twice the range of the
9748 mode's valid exponents. Use abs to ensure the range is
9749 positive as a sanity check. */
9750 const long max_exp_adj = 2 *
9751 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9752 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9753
9754 /* Get the user-requested adjustment. */
9755 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9756
9757 /* The requested adjustment must be inside this range. This
9758 is a preliminary cap to avoid things like overflow, we
9759 may still fail to compute the result for other reasons. */
9760 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9761 {
9762 REAL_VALUE_TYPE initial_result;
9763
9764 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9765
9766 /* Ensure we didn't overflow. */
9767 if (! real_isinf (&initial_result))
9768 {
9769 const REAL_VALUE_TYPE trunc_result
9770 = real_value_truncate (TYPE_MODE (type), initial_result);
9771
9772 /* Only proceed if the target mode can hold the
9773 resulting value. */
9774 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9775 return build_real (type, trunc_result);
9776 }
9777 }
9778 }
9779 }
9780
9781 return NULL_TREE;
9782 }
9783
9784 /* Fold a call to builtin modf. */
9785
9786 static tree
9787 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9788 {
9789 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9790 return NULL_TREE;
9791
9792 STRIP_NOPS (arg0);
9793
9794 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9795 return NULL_TREE;
9796
9797 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9798
9799 /* Proceed if a valid pointer type was passed in. */
9800 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9801 {
9802 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9803 REAL_VALUE_TYPE trunc, frac;
9804
9805 switch (value->cl)
9806 {
9807 case rvc_nan:
9808 case rvc_zero:
9809 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9810 trunc = frac = *value;
9811 break;
9812 case rvc_inf:
9813 /* For +-Inf, return (*arg1 = arg0, +-0). */
9814 frac = dconst0;
9815 frac.sign = value->sign;
9816 trunc = *value;
9817 break;
9818 case rvc_normal:
9819 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9820 real_trunc (&trunc, VOIDmode, value);
9821 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9822 /* If the original number was negative and already
9823 integral, then the fractional part is -0.0. */
9824 if (value->sign && frac.cl == rvc_zero)
9825 frac.sign = value->sign;
9826 break;
9827 }
9828
9829 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9830 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9831 build_real (rettype, trunc));
9832 TREE_SIDE_EFFECTS (arg1) = 1;
9833 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9834 build_real (rettype, frac));
9835 }
9836
9837 return NULL_TREE;
9838 }
9839
9840 /* Given a location LOC, an interclass builtin function decl FNDECL
9841 and its single argument ARG, return an folded expression computing
9842 the same, or NULL_TREE if we either couldn't or didn't want to fold
9843 (the latter happen if there's an RTL instruction available). */
9844
9845 static tree
9846 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9847 {
9848 enum machine_mode mode;
9849
9850 if (!validate_arg (arg, REAL_TYPE))
9851 return NULL_TREE;
9852
9853 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9854 return NULL_TREE;
9855
9856 mode = TYPE_MODE (TREE_TYPE (arg));
9857
9858 /* If there is no optab, try generic code. */
9859 switch (DECL_FUNCTION_CODE (fndecl))
9860 {
9861 tree result;
9862
9863 CASE_FLT_FN (BUILT_IN_ISINF):
9864 {
9865 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9866 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9867 tree const type = TREE_TYPE (arg);
9868 REAL_VALUE_TYPE r;
9869 char buf[128];
9870
9871 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9872 real_from_string (&r, buf);
9873 result = build_call_expr (isgr_fn, 2,
9874 fold_build1_loc (loc, ABS_EXPR, type, arg),
9875 build_real (type, r));
9876 return result;
9877 }
9878 CASE_FLT_FN (BUILT_IN_FINITE):
9879 case BUILT_IN_ISFINITE:
9880 {
9881 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9882 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9883 tree const type = TREE_TYPE (arg);
9884 REAL_VALUE_TYPE r;
9885 char buf[128];
9886
9887 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9888 real_from_string (&r, buf);
9889 result = build_call_expr (isle_fn, 2,
9890 fold_build1_loc (loc, ABS_EXPR, type, arg),
9891 build_real (type, r));
9892 /*result = fold_build2_loc (loc, UNGT_EXPR,
9893 TREE_TYPE (TREE_TYPE (fndecl)),
9894 fold_build1_loc (loc, ABS_EXPR, type, arg),
9895 build_real (type, r));
9896 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9897 TREE_TYPE (TREE_TYPE (fndecl)),
9898 result);*/
9899 return result;
9900 }
9901 case BUILT_IN_ISNORMAL:
9902 {
9903 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9904 islessequal(fabs(x),DBL_MAX). */
9905 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9906 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9907 tree const type = TREE_TYPE (arg);
9908 REAL_VALUE_TYPE rmax, rmin;
9909 char buf[128];
9910
9911 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9912 real_from_string (&rmax, buf);
9913 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9914 real_from_string (&rmin, buf);
9915 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9916 result = build_call_expr (isle_fn, 2, arg,
9917 build_real (type, rmax));
9918 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9919 build_call_expr (isge_fn, 2, arg,
9920 build_real (type, rmin)));
9921 return result;
9922 }
9923 default:
9924 break;
9925 }
9926
9927 return NULL_TREE;
9928 }
9929
9930 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9931 ARG is the argument for the call. */
9932
9933 static tree
9934 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9935 {
9936 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9937 REAL_VALUE_TYPE r;
9938
9939 if (!validate_arg (arg, REAL_TYPE))
9940 return NULL_TREE;
9941
9942 switch (builtin_index)
9943 {
9944 case BUILT_IN_ISINF:
9945 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9946 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9947
9948 if (TREE_CODE (arg) == REAL_CST)
9949 {
9950 r = TREE_REAL_CST (arg);
9951 if (real_isinf (&r))
9952 return real_compare (GT_EXPR, &r, &dconst0)
9953 ? integer_one_node : integer_minus_one_node;
9954 else
9955 return integer_zero_node;
9956 }
9957
9958 return NULL_TREE;
9959
9960 case BUILT_IN_ISINF_SIGN:
9961 {
9962 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9963 /* In a boolean context, GCC will fold the inner COND_EXPR to
9964 1. So e.g. "if (isinf_sign(x))" would be folded to just
9965 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9966 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9967 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9968 tree tmp = NULL_TREE;
9969
9970 arg = builtin_save_expr (arg);
9971
9972 if (signbit_fn && isinf_fn)
9973 {
9974 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9975 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9976
9977 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9978 signbit_call, integer_zero_node);
9979 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9980 isinf_call, integer_zero_node);
9981
9982 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9983 integer_minus_one_node, integer_one_node);
9984 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9985 isinf_call, tmp,
9986 integer_zero_node);
9987 }
9988
9989 return tmp;
9990 }
9991
9992 case BUILT_IN_ISFINITE:
9993 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9994 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9995 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9996
9997 if (TREE_CODE (arg) == REAL_CST)
9998 {
9999 r = TREE_REAL_CST (arg);
10000 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10001 }
10002
10003 return NULL_TREE;
10004
10005 case BUILT_IN_ISNAN:
10006 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10007 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10008
10009 if (TREE_CODE (arg) == REAL_CST)
10010 {
10011 r = TREE_REAL_CST (arg);
10012 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10013 }
10014
10015 arg = builtin_save_expr (arg);
10016 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10017
10018 default:
10019 gcc_unreachable ();
10020 }
10021 }
10022
10023 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10024 This builtin will generate code to return the appropriate floating
10025 point classification depending on the value of the floating point
10026 number passed in. The possible return values must be supplied as
10027 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10028 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10029 one floating point argument which is "type generic". */
10030
10031 static tree
10032 fold_builtin_fpclassify (location_t loc, tree exp)
10033 {
10034 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10035 arg, type, res, tmp;
10036 enum machine_mode mode;
10037 REAL_VALUE_TYPE r;
10038 char buf[128];
10039
10040 /* Verify the required arguments in the original call. */
10041 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10042 INTEGER_TYPE, INTEGER_TYPE,
10043 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10044 return NULL_TREE;
10045
10046 fp_nan = CALL_EXPR_ARG (exp, 0);
10047 fp_infinite = CALL_EXPR_ARG (exp, 1);
10048 fp_normal = CALL_EXPR_ARG (exp, 2);
10049 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10050 fp_zero = CALL_EXPR_ARG (exp, 4);
10051 arg = CALL_EXPR_ARG (exp, 5);
10052 type = TREE_TYPE (arg);
10053 mode = TYPE_MODE (type);
10054 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10055
10056 /* fpclassify(x) ->
10057 isnan(x) ? FP_NAN :
10058 (fabs(x) == Inf ? FP_INFINITE :
10059 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10060 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10061
10062 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10063 build_real (type, dconst0));
10064 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10065 tmp, fp_zero, fp_subnormal);
10066
10067 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10068 real_from_string (&r, buf);
10069 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10070 arg, build_real (type, r));
10071 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10072
10073 if (HONOR_INFINITIES (mode))
10074 {
10075 real_inf (&r);
10076 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10077 build_real (type, r));
10078 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10079 fp_infinite, res);
10080 }
10081
10082 if (HONOR_NANS (mode))
10083 {
10084 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10085 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10086 }
10087
10088 return res;
10089 }
10090
10091 /* Fold a call to an unordered comparison function such as
10092 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10093 being called and ARG0 and ARG1 are the arguments for the call.
10094 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10095 the opposite of the desired result. UNORDERED_CODE is used
10096 for modes that can hold NaNs and ORDERED_CODE is used for
10097 the rest. */
10098
10099 static tree
10100 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10101 enum tree_code unordered_code,
10102 enum tree_code ordered_code)
10103 {
10104 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10105 enum tree_code code;
10106 tree type0, type1;
10107 enum tree_code code0, code1;
10108 tree cmp_type = NULL_TREE;
10109
10110 type0 = TREE_TYPE (arg0);
10111 type1 = TREE_TYPE (arg1);
10112
10113 code0 = TREE_CODE (type0);
10114 code1 = TREE_CODE (type1);
10115
10116 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10117 /* Choose the wider of two real types. */
10118 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10119 ? type0 : type1;
10120 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10121 cmp_type = type0;
10122 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10123 cmp_type = type1;
10124
10125 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10126 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10127
10128 if (unordered_code == UNORDERED_EXPR)
10129 {
10130 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10131 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10132 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10133 }
10134
10135 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10136 : ordered_code;
10137 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10138 fold_build2_loc (loc, code, type, arg0, arg1));
10139 }
10140
10141 /* Fold a call to built-in function FNDECL with 0 arguments.
10142 IGNORE is true if the result of the function call is ignored. This
10143 function returns NULL_TREE if no simplification was possible. */
10144
10145 static tree
10146 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10147 {
10148 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10149 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10150 switch (fcode)
10151 {
10152 CASE_FLT_FN (BUILT_IN_INF):
10153 case BUILT_IN_INFD32:
10154 case BUILT_IN_INFD64:
10155 case BUILT_IN_INFD128:
10156 return fold_builtin_inf (loc, type, true);
10157
10158 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10159 return fold_builtin_inf (loc, type, false);
10160
10161 case BUILT_IN_CLASSIFY_TYPE:
10162 return fold_builtin_classify_type (NULL_TREE);
10163
10164 case BUILT_IN_UNREACHABLE:
10165 if (flag_sanitize & SANITIZE_UNREACHABLE
10166 && (current_function_decl == NULL
10167 || !lookup_attribute ("no_sanitize_undefined",
10168 DECL_ATTRIBUTES (current_function_decl))))
10169 return ubsan_instrument_unreachable (loc);
10170 break;
10171
10172 default:
10173 break;
10174 }
10175 return NULL_TREE;
10176 }
10177
10178 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10179 IGNORE is true if the result of the function call is ignored. This
10180 function returns NULL_TREE if no simplification was possible. */
10181
10182 static tree
10183 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10184 {
10185 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10186 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10187 switch (fcode)
10188 {
10189 case BUILT_IN_CONSTANT_P:
10190 {
10191 tree val = fold_builtin_constant_p (arg0);
10192
10193 /* Gimplification will pull the CALL_EXPR for the builtin out of
10194 an if condition. When not optimizing, we'll not CSE it back.
10195 To avoid link error types of regressions, return false now. */
10196 if (!val && !optimize)
10197 val = integer_zero_node;
10198
10199 return val;
10200 }
10201
10202 case BUILT_IN_CLASSIFY_TYPE:
10203 return fold_builtin_classify_type (arg0);
10204
10205 case BUILT_IN_STRLEN:
10206 return fold_builtin_strlen (loc, type, arg0);
10207
10208 CASE_FLT_FN (BUILT_IN_FABS):
10209 case BUILT_IN_FABSD32:
10210 case BUILT_IN_FABSD64:
10211 case BUILT_IN_FABSD128:
10212 return fold_builtin_fabs (loc, arg0, type);
10213
10214 case BUILT_IN_ABS:
10215 case BUILT_IN_LABS:
10216 case BUILT_IN_LLABS:
10217 case BUILT_IN_IMAXABS:
10218 return fold_builtin_abs (loc, arg0, type);
10219
10220 CASE_FLT_FN (BUILT_IN_CONJ):
10221 if (validate_arg (arg0, COMPLEX_TYPE)
10222 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10223 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10224 break;
10225
10226 CASE_FLT_FN (BUILT_IN_CREAL):
10227 if (validate_arg (arg0, COMPLEX_TYPE)
10228 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10229 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10230 break;
10231
10232 CASE_FLT_FN (BUILT_IN_CIMAG):
10233 if (validate_arg (arg0, COMPLEX_TYPE)
10234 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10235 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10236 break;
10237
10238 CASE_FLT_FN (BUILT_IN_CCOS):
10239 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
10240
10241 CASE_FLT_FN (BUILT_IN_CCOSH):
10242 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
10243
10244 CASE_FLT_FN (BUILT_IN_CPROJ):
10245 return fold_builtin_cproj (loc, arg0, type);
10246
10247 CASE_FLT_FN (BUILT_IN_CSIN):
10248 if (validate_arg (arg0, COMPLEX_TYPE)
10249 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10250 return do_mpc_arg1 (arg0, type, mpc_sin);
10251 break;
10252
10253 CASE_FLT_FN (BUILT_IN_CSINH):
10254 if (validate_arg (arg0, COMPLEX_TYPE)
10255 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10256 return do_mpc_arg1 (arg0, type, mpc_sinh);
10257 break;
10258
10259 CASE_FLT_FN (BUILT_IN_CTAN):
10260 if (validate_arg (arg0, COMPLEX_TYPE)
10261 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10262 return do_mpc_arg1 (arg0, type, mpc_tan);
10263 break;
10264
10265 CASE_FLT_FN (BUILT_IN_CTANH):
10266 if (validate_arg (arg0, COMPLEX_TYPE)
10267 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10268 return do_mpc_arg1 (arg0, type, mpc_tanh);
10269 break;
10270
10271 CASE_FLT_FN (BUILT_IN_CLOG):
10272 if (validate_arg (arg0, COMPLEX_TYPE)
10273 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10274 return do_mpc_arg1 (arg0, type, mpc_log);
10275 break;
10276
10277 CASE_FLT_FN (BUILT_IN_CSQRT):
10278 if (validate_arg (arg0, COMPLEX_TYPE)
10279 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10280 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10281 break;
10282
10283 CASE_FLT_FN (BUILT_IN_CASIN):
10284 if (validate_arg (arg0, COMPLEX_TYPE)
10285 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10286 return do_mpc_arg1 (arg0, type, mpc_asin);
10287 break;
10288
10289 CASE_FLT_FN (BUILT_IN_CACOS):
10290 if (validate_arg (arg0, COMPLEX_TYPE)
10291 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10292 return do_mpc_arg1 (arg0, type, mpc_acos);
10293 break;
10294
10295 CASE_FLT_FN (BUILT_IN_CATAN):
10296 if (validate_arg (arg0, COMPLEX_TYPE)
10297 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10298 return do_mpc_arg1 (arg0, type, mpc_atan);
10299 break;
10300
10301 CASE_FLT_FN (BUILT_IN_CASINH):
10302 if (validate_arg (arg0, COMPLEX_TYPE)
10303 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10304 return do_mpc_arg1 (arg0, type, mpc_asinh);
10305 break;
10306
10307 CASE_FLT_FN (BUILT_IN_CACOSH):
10308 if (validate_arg (arg0, COMPLEX_TYPE)
10309 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10310 return do_mpc_arg1 (arg0, type, mpc_acosh);
10311 break;
10312
10313 CASE_FLT_FN (BUILT_IN_CATANH):
10314 if (validate_arg (arg0, COMPLEX_TYPE)
10315 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10316 return do_mpc_arg1 (arg0, type, mpc_atanh);
10317 break;
10318
10319 CASE_FLT_FN (BUILT_IN_CABS):
10320 return fold_builtin_cabs (loc, arg0, type, fndecl);
10321
10322 CASE_FLT_FN (BUILT_IN_CARG):
10323 return fold_builtin_carg (loc, arg0, type);
10324
10325 CASE_FLT_FN (BUILT_IN_SQRT):
10326 return fold_builtin_sqrt (loc, arg0, type);
10327
10328 CASE_FLT_FN (BUILT_IN_CBRT):
10329 return fold_builtin_cbrt (loc, arg0, type);
10330
10331 CASE_FLT_FN (BUILT_IN_ASIN):
10332 if (validate_arg (arg0, REAL_TYPE))
10333 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10334 &dconstm1, &dconst1, true);
10335 break;
10336
10337 CASE_FLT_FN (BUILT_IN_ACOS):
10338 if (validate_arg (arg0, REAL_TYPE))
10339 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10340 &dconstm1, &dconst1, true);
10341 break;
10342
10343 CASE_FLT_FN (BUILT_IN_ATAN):
10344 if (validate_arg (arg0, REAL_TYPE))
10345 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10346 break;
10347
10348 CASE_FLT_FN (BUILT_IN_ASINH):
10349 if (validate_arg (arg0, REAL_TYPE))
10350 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10351 break;
10352
10353 CASE_FLT_FN (BUILT_IN_ACOSH):
10354 if (validate_arg (arg0, REAL_TYPE))
10355 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10356 &dconst1, NULL, true);
10357 break;
10358
10359 CASE_FLT_FN (BUILT_IN_ATANH):
10360 if (validate_arg (arg0, REAL_TYPE))
10361 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10362 &dconstm1, &dconst1, false);
10363 break;
10364
10365 CASE_FLT_FN (BUILT_IN_SIN):
10366 if (validate_arg (arg0, REAL_TYPE))
10367 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10368 break;
10369
10370 CASE_FLT_FN (BUILT_IN_COS):
10371 return fold_builtin_cos (loc, arg0, type, fndecl);
10372
10373 CASE_FLT_FN (BUILT_IN_TAN):
10374 return fold_builtin_tan (arg0, type);
10375
10376 CASE_FLT_FN (BUILT_IN_CEXP):
10377 return fold_builtin_cexp (loc, arg0, type);
10378
10379 CASE_FLT_FN (BUILT_IN_CEXPI):
10380 if (validate_arg (arg0, REAL_TYPE))
10381 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10382 break;
10383
10384 CASE_FLT_FN (BUILT_IN_SINH):
10385 if (validate_arg (arg0, REAL_TYPE))
10386 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10387 break;
10388
10389 CASE_FLT_FN (BUILT_IN_COSH):
10390 return fold_builtin_cosh (loc, arg0, type, fndecl);
10391
10392 CASE_FLT_FN (BUILT_IN_TANH):
10393 if (validate_arg (arg0, REAL_TYPE))
10394 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10395 break;
10396
10397 CASE_FLT_FN (BUILT_IN_ERF):
10398 if (validate_arg (arg0, REAL_TYPE))
10399 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10400 break;
10401
10402 CASE_FLT_FN (BUILT_IN_ERFC):
10403 if (validate_arg (arg0, REAL_TYPE))
10404 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10405 break;
10406
10407 CASE_FLT_FN (BUILT_IN_TGAMMA):
10408 if (validate_arg (arg0, REAL_TYPE))
10409 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10410 break;
10411
10412 CASE_FLT_FN (BUILT_IN_EXP):
10413 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10414
10415 CASE_FLT_FN (BUILT_IN_EXP2):
10416 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10417
10418 CASE_FLT_FN (BUILT_IN_EXP10):
10419 CASE_FLT_FN (BUILT_IN_POW10):
10420 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10421
10422 CASE_FLT_FN (BUILT_IN_EXPM1):
10423 if (validate_arg (arg0, REAL_TYPE))
10424 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10425 break;
10426
10427 CASE_FLT_FN (BUILT_IN_LOG):
10428 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10429
10430 CASE_FLT_FN (BUILT_IN_LOG2):
10431 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10432
10433 CASE_FLT_FN (BUILT_IN_LOG10):
10434 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10435
10436 CASE_FLT_FN (BUILT_IN_LOG1P):
10437 if (validate_arg (arg0, REAL_TYPE))
10438 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10439 &dconstm1, NULL, false);
10440 break;
10441
10442 CASE_FLT_FN (BUILT_IN_J0):
10443 if (validate_arg (arg0, REAL_TYPE))
10444 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10445 NULL, NULL, 0);
10446 break;
10447
10448 CASE_FLT_FN (BUILT_IN_J1):
10449 if (validate_arg (arg0, REAL_TYPE))
10450 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10451 NULL, NULL, 0);
10452 break;
10453
10454 CASE_FLT_FN (BUILT_IN_Y0):
10455 if (validate_arg (arg0, REAL_TYPE))
10456 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10457 &dconst0, NULL, false);
10458 break;
10459
10460 CASE_FLT_FN (BUILT_IN_Y1):
10461 if (validate_arg (arg0, REAL_TYPE))
10462 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10463 &dconst0, NULL, false);
10464 break;
10465
10466 CASE_FLT_FN (BUILT_IN_NAN):
10467 case BUILT_IN_NAND32:
10468 case BUILT_IN_NAND64:
10469 case BUILT_IN_NAND128:
10470 return fold_builtin_nan (arg0, type, true);
10471
10472 CASE_FLT_FN (BUILT_IN_NANS):
10473 return fold_builtin_nan (arg0, type, false);
10474
10475 CASE_FLT_FN (BUILT_IN_FLOOR):
10476 return fold_builtin_floor (loc, fndecl, arg0);
10477
10478 CASE_FLT_FN (BUILT_IN_CEIL):
10479 return fold_builtin_ceil (loc, fndecl, arg0);
10480
10481 CASE_FLT_FN (BUILT_IN_TRUNC):
10482 return fold_builtin_trunc (loc, fndecl, arg0);
10483
10484 CASE_FLT_FN (BUILT_IN_ROUND):
10485 return fold_builtin_round (loc, fndecl, arg0);
10486
10487 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10488 CASE_FLT_FN (BUILT_IN_RINT):
10489 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10490
10491 CASE_FLT_FN (BUILT_IN_ICEIL):
10492 CASE_FLT_FN (BUILT_IN_LCEIL):
10493 CASE_FLT_FN (BUILT_IN_LLCEIL):
10494 CASE_FLT_FN (BUILT_IN_LFLOOR):
10495 CASE_FLT_FN (BUILT_IN_IFLOOR):
10496 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10497 CASE_FLT_FN (BUILT_IN_IROUND):
10498 CASE_FLT_FN (BUILT_IN_LROUND):
10499 CASE_FLT_FN (BUILT_IN_LLROUND):
10500 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10501
10502 CASE_FLT_FN (BUILT_IN_IRINT):
10503 CASE_FLT_FN (BUILT_IN_LRINT):
10504 CASE_FLT_FN (BUILT_IN_LLRINT):
10505 return fold_fixed_mathfn (loc, fndecl, arg0);
10506
10507 case BUILT_IN_BSWAP16:
10508 case BUILT_IN_BSWAP32:
10509 case BUILT_IN_BSWAP64:
10510 return fold_builtin_bswap (fndecl, arg0);
10511
10512 CASE_INT_FN (BUILT_IN_FFS):
10513 CASE_INT_FN (BUILT_IN_CLZ):
10514 CASE_INT_FN (BUILT_IN_CTZ):
10515 CASE_INT_FN (BUILT_IN_CLRSB):
10516 CASE_INT_FN (BUILT_IN_POPCOUNT):
10517 CASE_INT_FN (BUILT_IN_PARITY):
10518 return fold_builtin_bitop (fndecl, arg0);
10519
10520 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10521 return fold_builtin_signbit (loc, arg0, type);
10522
10523 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10524 return fold_builtin_significand (loc, arg0, type);
10525
10526 CASE_FLT_FN (BUILT_IN_ILOGB):
10527 CASE_FLT_FN (BUILT_IN_LOGB):
10528 return fold_builtin_logb (loc, arg0, type);
10529
10530 case BUILT_IN_ISASCII:
10531 return fold_builtin_isascii (loc, arg0);
10532
10533 case BUILT_IN_TOASCII:
10534 return fold_builtin_toascii (loc, arg0);
10535
10536 case BUILT_IN_ISDIGIT:
10537 return fold_builtin_isdigit (loc, arg0);
10538
10539 CASE_FLT_FN (BUILT_IN_FINITE):
10540 case BUILT_IN_FINITED32:
10541 case BUILT_IN_FINITED64:
10542 case BUILT_IN_FINITED128:
10543 case BUILT_IN_ISFINITE:
10544 {
10545 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10546 if (ret)
10547 return ret;
10548 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10549 }
10550
10551 CASE_FLT_FN (BUILT_IN_ISINF):
10552 case BUILT_IN_ISINFD32:
10553 case BUILT_IN_ISINFD64:
10554 case BUILT_IN_ISINFD128:
10555 {
10556 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10557 if (ret)
10558 return ret;
10559 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10560 }
10561
10562 case BUILT_IN_ISNORMAL:
10563 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10564
10565 case BUILT_IN_ISINF_SIGN:
10566 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10567
10568 CASE_FLT_FN (BUILT_IN_ISNAN):
10569 case BUILT_IN_ISNAND32:
10570 case BUILT_IN_ISNAND64:
10571 case BUILT_IN_ISNAND128:
10572 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10573
10574 case BUILT_IN_PRINTF:
10575 case BUILT_IN_PRINTF_UNLOCKED:
10576 case BUILT_IN_VPRINTF:
10577 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10578
10579 case BUILT_IN_FREE:
10580 if (integer_zerop (arg0))
10581 return build_empty_stmt (loc);
10582 break;
10583
10584 default:
10585 break;
10586 }
10587
10588 return NULL_TREE;
10589
10590 }
10591
10592 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10593 IGNORE is true if the result of the function call is ignored. This
10594 function returns NULL_TREE if no simplification was possible. */
10595
10596 static tree
10597 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10598 {
10599 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10600 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10601
10602 switch (fcode)
10603 {
10604 CASE_FLT_FN (BUILT_IN_JN):
10605 if (validate_arg (arg0, INTEGER_TYPE)
10606 && validate_arg (arg1, REAL_TYPE))
10607 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10608 break;
10609
10610 CASE_FLT_FN (BUILT_IN_YN):
10611 if (validate_arg (arg0, INTEGER_TYPE)
10612 && validate_arg (arg1, REAL_TYPE))
10613 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10614 &dconst0, false);
10615 break;
10616
10617 CASE_FLT_FN (BUILT_IN_DREM):
10618 CASE_FLT_FN (BUILT_IN_REMAINDER):
10619 if (validate_arg (arg0, REAL_TYPE)
10620 && validate_arg (arg1, REAL_TYPE))
10621 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10622 break;
10623
10624 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10625 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10626 if (validate_arg (arg0, REAL_TYPE)
10627 && validate_arg (arg1, POINTER_TYPE))
10628 return do_mpfr_lgamma_r (arg0, arg1, type);
10629 break;
10630
10631 CASE_FLT_FN (BUILT_IN_ATAN2):
10632 if (validate_arg (arg0, REAL_TYPE)
10633 && validate_arg (arg1, REAL_TYPE))
10634 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10635 break;
10636
10637 CASE_FLT_FN (BUILT_IN_FDIM):
10638 if (validate_arg (arg0, REAL_TYPE)
10639 && validate_arg (arg1, REAL_TYPE))
10640 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10641 break;
10642
10643 CASE_FLT_FN (BUILT_IN_HYPOT):
10644 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10645
10646 CASE_FLT_FN (BUILT_IN_CPOW):
10647 if (validate_arg (arg0, COMPLEX_TYPE)
10648 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10649 && validate_arg (arg1, COMPLEX_TYPE)
10650 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10651 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10652 break;
10653
10654 CASE_FLT_FN (BUILT_IN_LDEXP):
10655 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10656 CASE_FLT_FN (BUILT_IN_SCALBN):
10657 CASE_FLT_FN (BUILT_IN_SCALBLN):
10658 return fold_builtin_load_exponent (loc, arg0, arg1,
10659 type, /*ldexp=*/false);
10660
10661 CASE_FLT_FN (BUILT_IN_FREXP):
10662 return fold_builtin_frexp (loc, arg0, arg1, type);
10663
10664 CASE_FLT_FN (BUILT_IN_MODF):
10665 return fold_builtin_modf (loc, arg0, arg1, type);
10666
10667 case BUILT_IN_BZERO:
10668 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10669
10670 case BUILT_IN_FPUTS:
10671 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10672
10673 case BUILT_IN_FPUTS_UNLOCKED:
10674 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10675
10676 case BUILT_IN_STRSTR:
10677 return fold_builtin_strstr (loc, arg0, arg1, type);
10678
10679 case BUILT_IN_STRCAT:
10680 return fold_builtin_strcat (loc, arg0, arg1);
10681
10682 case BUILT_IN_STRSPN:
10683 return fold_builtin_strspn (loc, arg0, arg1);
10684
10685 case BUILT_IN_STRCSPN:
10686 return fold_builtin_strcspn (loc, arg0, arg1);
10687
10688 case BUILT_IN_STRCHR:
10689 case BUILT_IN_INDEX:
10690 return fold_builtin_strchr (loc, arg0, arg1, type);
10691
10692 case BUILT_IN_STRRCHR:
10693 case BUILT_IN_RINDEX:
10694 return fold_builtin_strrchr (loc, arg0, arg1, type);
10695
10696 case BUILT_IN_STRCPY:
10697 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10698
10699 case BUILT_IN_STPCPY:
10700 if (ignore)
10701 {
10702 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10703 if (!fn)
10704 break;
10705
10706 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10707 }
10708 else
10709 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10710 break;
10711
10712 case BUILT_IN_STRCMP:
10713 return fold_builtin_strcmp (loc, arg0, arg1);
10714
10715 case BUILT_IN_STRPBRK:
10716 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10717
10718 case BUILT_IN_EXPECT:
10719 return fold_builtin_expect (loc, arg0, arg1);
10720
10721 CASE_FLT_FN (BUILT_IN_POW):
10722 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10723
10724 CASE_FLT_FN (BUILT_IN_POWI):
10725 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10726
10727 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10728 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10729
10730 CASE_FLT_FN (BUILT_IN_FMIN):
10731 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10732
10733 CASE_FLT_FN (BUILT_IN_FMAX):
10734 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10735
10736 case BUILT_IN_ISGREATER:
10737 return fold_builtin_unordered_cmp (loc, fndecl,
10738 arg0, arg1, UNLE_EXPR, LE_EXPR);
10739 case BUILT_IN_ISGREATEREQUAL:
10740 return fold_builtin_unordered_cmp (loc, fndecl,
10741 arg0, arg1, UNLT_EXPR, LT_EXPR);
10742 case BUILT_IN_ISLESS:
10743 return fold_builtin_unordered_cmp (loc, fndecl,
10744 arg0, arg1, UNGE_EXPR, GE_EXPR);
10745 case BUILT_IN_ISLESSEQUAL:
10746 return fold_builtin_unordered_cmp (loc, fndecl,
10747 arg0, arg1, UNGT_EXPR, GT_EXPR);
10748 case BUILT_IN_ISLESSGREATER:
10749 return fold_builtin_unordered_cmp (loc, fndecl,
10750 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10751 case BUILT_IN_ISUNORDERED:
10752 return fold_builtin_unordered_cmp (loc, fndecl,
10753 arg0, arg1, UNORDERED_EXPR,
10754 NOP_EXPR);
10755
10756 /* We do the folding for va_start in the expander. */
10757 case BUILT_IN_VA_START:
10758 break;
10759
10760 case BUILT_IN_SPRINTF:
10761 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10762
10763 case BUILT_IN_OBJECT_SIZE:
10764 return fold_builtin_object_size (arg0, arg1);
10765
10766 case BUILT_IN_PRINTF:
10767 case BUILT_IN_PRINTF_UNLOCKED:
10768 case BUILT_IN_VPRINTF:
10769 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10770
10771 case BUILT_IN_PRINTF_CHK:
10772 case BUILT_IN_VPRINTF_CHK:
10773 if (!validate_arg (arg0, INTEGER_TYPE)
10774 || TREE_SIDE_EFFECTS (arg0))
10775 return NULL_TREE;
10776 else
10777 return fold_builtin_printf (loc, fndecl,
10778 arg1, NULL_TREE, ignore, fcode);
10779 break;
10780
10781 case BUILT_IN_FPRINTF:
10782 case BUILT_IN_FPRINTF_UNLOCKED:
10783 case BUILT_IN_VFPRINTF:
10784 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10785 ignore, fcode);
10786
10787 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10788 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10789
10790 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10791 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10792
10793 default:
10794 break;
10795 }
10796 return NULL_TREE;
10797 }
10798
10799 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10800 and ARG2. IGNORE is true if the result of the function call is ignored.
10801 This function returns NULL_TREE if no simplification was possible. */
10802
10803 static tree
10804 fold_builtin_3 (location_t loc, tree fndecl,
10805 tree arg0, tree arg1, tree arg2, bool ignore)
10806 {
10807 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10808 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10809 switch (fcode)
10810 {
10811
10812 CASE_FLT_FN (BUILT_IN_SINCOS):
10813 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10814
10815 CASE_FLT_FN (BUILT_IN_FMA):
10816 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10817 break;
10818
10819 CASE_FLT_FN (BUILT_IN_REMQUO):
10820 if (validate_arg (arg0, REAL_TYPE)
10821 && validate_arg (arg1, REAL_TYPE)
10822 && validate_arg (arg2, POINTER_TYPE))
10823 return do_mpfr_remquo (arg0, arg1, arg2);
10824 break;
10825
10826 case BUILT_IN_MEMSET:
10827 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10828
10829 case BUILT_IN_BCOPY:
10830 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10831 void_type_node, true, /*endp=*/3);
10832
10833 case BUILT_IN_MEMCPY:
10834 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10835 type, ignore, /*endp=*/0);
10836
10837 case BUILT_IN_MEMPCPY:
10838 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10839 type, ignore, /*endp=*/1);
10840
10841 case BUILT_IN_MEMMOVE:
10842 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10843 type, ignore, /*endp=*/3);
10844
10845 case BUILT_IN_STRNCAT:
10846 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10847
10848 case BUILT_IN_STRNCPY:
10849 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10850
10851 case BUILT_IN_STRNCMP:
10852 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10853
10854 case BUILT_IN_MEMCHR:
10855 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10856
10857 case BUILT_IN_BCMP:
10858 case BUILT_IN_MEMCMP:
10859 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10860
10861 case BUILT_IN_SPRINTF:
10862 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10863
10864 case BUILT_IN_SNPRINTF:
10865 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
10866
10867 case BUILT_IN_STRCPY_CHK:
10868 case BUILT_IN_STPCPY_CHK:
10869 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10870 ignore, fcode);
10871
10872 case BUILT_IN_STRCAT_CHK:
10873 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10874
10875 case BUILT_IN_PRINTF_CHK:
10876 case BUILT_IN_VPRINTF_CHK:
10877 if (!validate_arg (arg0, INTEGER_TYPE)
10878 || TREE_SIDE_EFFECTS (arg0))
10879 return NULL_TREE;
10880 else
10881 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10882 break;
10883
10884 case BUILT_IN_FPRINTF:
10885 case BUILT_IN_FPRINTF_UNLOCKED:
10886 case BUILT_IN_VFPRINTF:
10887 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10888 ignore, fcode);
10889
10890 case BUILT_IN_FPRINTF_CHK:
10891 case BUILT_IN_VFPRINTF_CHK:
10892 if (!validate_arg (arg1, INTEGER_TYPE)
10893 || TREE_SIDE_EFFECTS (arg1))
10894 return NULL_TREE;
10895 else
10896 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10897 ignore, fcode);
10898
10899 default:
10900 break;
10901 }
10902 return NULL_TREE;
10903 }
10904
10905 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10906 ARG2, and ARG3. IGNORE is true if the result of the function call is
10907 ignored. This function returns NULL_TREE if no simplification was
10908 possible. */
10909
10910 static tree
10911 fold_builtin_4 (location_t loc, tree fndecl,
10912 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10913 {
10914 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10915
10916 switch (fcode)
10917 {
10918 case BUILT_IN_MEMCPY_CHK:
10919 case BUILT_IN_MEMPCPY_CHK:
10920 case BUILT_IN_MEMMOVE_CHK:
10921 case BUILT_IN_MEMSET_CHK:
10922 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10923 NULL_TREE, ignore,
10924 DECL_FUNCTION_CODE (fndecl));
10925
10926 case BUILT_IN_STRNCPY_CHK:
10927 case BUILT_IN_STPNCPY_CHK:
10928 return fold_builtin_stxncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE,
10929 ignore, fcode);
10930
10931 case BUILT_IN_STRNCAT_CHK:
10932 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10933
10934 case BUILT_IN_SNPRINTF:
10935 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
10936
10937 case BUILT_IN_FPRINTF_CHK:
10938 case BUILT_IN_VFPRINTF_CHK:
10939 if (!validate_arg (arg1, INTEGER_TYPE)
10940 || TREE_SIDE_EFFECTS (arg1))
10941 return NULL_TREE;
10942 else
10943 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10944 ignore, fcode);
10945 break;
10946
10947 default:
10948 break;
10949 }
10950 return NULL_TREE;
10951 }
10952
10953 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10954 arguments, where NARGS <= 4. IGNORE is true if the result of the
10955 function call is ignored. This function returns NULL_TREE if no
10956 simplification was possible. Note that this only folds builtins with
10957 fixed argument patterns. Foldings that do varargs-to-varargs
10958 transformations, or that match calls with more than 4 arguments,
10959 need to be handled with fold_builtin_varargs instead. */
10960
10961 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10962
10963 static tree
10964 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10965 {
10966 tree ret = NULL_TREE;
10967
10968 switch (nargs)
10969 {
10970 case 0:
10971 ret = fold_builtin_0 (loc, fndecl, ignore);
10972 break;
10973 case 1:
10974 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10975 break;
10976 case 2:
10977 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10978 break;
10979 case 3:
10980 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10981 break;
10982 case 4:
10983 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10984 ignore);
10985 break;
10986 default:
10987 break;
10988 }
10989 if (ret)
10990 {
10991 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10992 SET_EXPR_LOCATION (ret, loc);
10993 TREE_NO_WARNING (ret) = 1;
10994 return ret;
10995 }
10996 return NULL_TREE;
10997 }
10998
10999 /* Builtins with folding operations that operate on "..." arguments
11000 need special handling; we need to store the arguments in a convenient
11001 data structure before attempting any folding. Fortunately there are
11002 only a few builtins that fall into this category. FNDECL is the
11003 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11004 result of the function call is ignored. */
11005
11006 static tree
11007 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
11008 bool ignore ATTRIBUTE_UNUSED)
11009 {
11010 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11011 tree ret = NULL_TREE;
11012
11013 switch (fcode)
11014 {
11015 case BUILT_IN_SPRINTF_CHK:
11016 case BUILT_IN_VSPRINTF_CHK:
11017 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
11018 break;
11019
11020 case BUILT_IN_SNPRINTF_CHK:
11021 case BUILT_IN_VSNPRINTF_CHK:
11022 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
11023 break;
11024
11025 case BUILT_IN_FPCLASSIFY:
11026 ret = fold_builtin_fpclassify (loc, exp);
11027 break;
11028
11029 default:
11030 break;
11031 }
11032 if (ret)
11033 {
11034 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11035 SET_EXPR_LOCATION (ret, loc);
11036 TREE_NO_WARNING (ret) = 1;
11037 return ret;
11038 }
11039 return NULL_TREE;
11040 }
11041
11042 /* Return true if FNDECL shouldn't be folded right now.
11043 If a built-in function has an inline attribute always_inline
11044 wrapper, defer folding it after always_inline functions have
11045 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11046 might not be performed. */
11047
11048 bool
11049 avoid_folding_inline_builtin (tree fndecl)
11050 {
11051 return (DECL_DECLARED_INLINE_P (fndecl)
11052 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11053 && cfun
11054 && !cfun->always_inline_functions_inlined
11055 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11056 }
11057
11058 /* A wrapper function for builtin folding that prevents warnings for
11059 "statement without effect" and the like, caused by removing the
11060 call node earlier than the warning is generated. */
11061
11062 tree
11063 fold_call_expr (location_t loc, tree exp, bool ignore)
11064 {
11065 tree ret = NULL_TREE;
11066 tree fndecl = get_callee_fndecl (exp);
11067 if (fndecl
11068 && TREE_CODE (fndecl) == FUNCTION_DECL
11069 && DECL_BUILT_IN (fndecl)
11070 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11071 yet. Defer folding until we see all the arguments
11072 (after inlining). */
11073 && !CALL_EXPR_VA_ARG_PACK (exp))
11074 {
11075 int nargs = call_expr_nargs (exp);
11076
11077 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11078 instead last argument is __builtin_va_arg_pack (). Defer folding
11079 even in that case, until arguments are finalized. */
11080 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11081 {
11082 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11083 if (fndecl2
11084 && TREE_CODE (fndecl2) == FUNCTION_DECL
11085 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11086 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11087 return NULL_TREE;
11088 }
11089
11090 if (avoid_folding_inline_builtin (fndecl))
11091 return NULL_TREE;
11092
11093 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11094 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11095 CALL_EXPR_ARGP (exp), ignore);
11096 else
11097 {
11098 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11099 {
11100 tree *args = CALL_EXPR_ARGP (exp);
11101 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11102 }
11103 if (!ret)
11104 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11105 if (ret)
11106 return ret;
11107 }
11108 }
11109 return NULL_TREE;
11110 }
11111
11112 /* Conveniently construct a function call expression. FNDECL names the
11113 function to be called and N arguments are passed in the array
11114 ARGARRAY. */
11115
11116 tree
11117 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11118 {
11119 tree fntype = TREE_TYPE (fndecl);
11120 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11121
11122 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11123 }
11124
11125 /* Conveniently construct a function call expression. FNDECL names the
11126 function to be called and the arguments are passed in the vector
11127 VEC. */
11128
11129 tree
11130 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11131 {
11132 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11133 vec_safe_address (vec));
11134 }
11135
11136
11137 /* Conveniently construct a function call expression. FNDECL names the
11138 function to be called, N is the number of arguments, and the "..."
11139 parameters are the argument expressions. */
11140
11141 tree
11142 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11143 {
11144 va_list ap;
11145 tree *argarray = XALLOCAVEC (tree, n);
11146 int i;
11147
11148 va_start (ap, n);
11149 for (i = 0; i < n; i++)
11150 argarray[i] = va_arg (ap, tree);
11151 va_end (ap);
11152 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11153 }
11154
11155 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11156 varargs macros aren't supported by all bootstrap compilers. */
11157
11158 tree
11159 build_call_expr (tree fndecl, int n, ...)
11160 {
11161 va_list ap;
11162 tree *argarray = XALLOCAVEC (tree, n);
11163 int i;
11164
11165 va_start (ap, n);
11166 for (i = 0; i < n; i++)
11167 argarray[i] = va_arg (ap, tree);
11168 va_end (ap);
11169 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11170 }
11171
11172 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11173 N arguments are passed in the array ARGARRAY. */
11174
11175 tree
11176 fold_builtin_call_array (location_t loc, tree type,
11177 tree fn,
11178 int n,
11179 tree *argarray)
11180 {
11181 tree ret = NULL_TREE;
11182 tree exp;
11183
11184 if (TREE_CODE (fn) == ADDR_EXPR)
11185 {
11186 tree fndecl = TREE_OPERAND (fn, 0);
11187 if (TREE_CODE (fndecl) == FUNCTION_DECL
11188 && DECL_BUILT_IN (fndecl))
11189 {
11190 /* If last argument is __builtin_va_arg_pack (), arguments to this
11191 function are not finalized yet. Defer folding until they are. */
11192 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11193 {
11194 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11195 if (fndecl2
11196 && TREE_CODE (fndecl2) == FUNCTION_DECL
11197 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11198 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11199 return build_call_array_loc (loc, type, fn, n, argarray);
11200 }
11201 if (avoid_folding_inline_builtin (fndecl))
11202 return build_call_array_loc (loc, type, fn, n, argarray);
11203 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11204 {
11205 ret = targetm.fold_builtin (fndecl, n, argarray, false);
11206 if (ret)
11207 return ret;
11208
11209 return build_call_array_loc (loc, type, fn, n, argarray);
11210 }
11211 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11212 {
11213 /* First try the transformations that don't require consing up
11214 an exp. */
11215 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11216 if (ret)
11217 return ret;
11218 }
11219
11220 /* If we got this far, we need to build an exp. */
11221 exp = build_call_array_loc (loc, type, fn, n, argarray);
11222 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11223 return ret ? ret : exp;
11224 }
11225 }
11226
11227 return build_call_array_loc (loc, type, fn, n, argarray);
11228 }
11229
11230 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11231 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11232 of arguments in ARGS to be omitted. OLDNARGS is the number of
11233 elements in ARGS. */
11234
11235 static tree
11236 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11237 int skip, tree fndecl, int n, va_list newargs)
11238 {
11239 int nargs = oldnargs - skip + n;
11240 tree *buffer;
11241
11242 if (n > 0)
11243 {
11244 int i, j;
11245
11246 buffer = XALLOCAVEC (tree, nargs);
11247 for (i = 0; i < n; i++)
11248 buffer[i] = va_arg (newargs, tree);
11249 for (j = skip; j < oldnargs; j++, i++)
11250 buffer[i] = args[j];
11251 }
11252 else
11253 buffer = args + skip;
11254
11255 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11256 }
11257
11258 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11259 list ARGS along with N new arguments specified as the "..."
11260 parameters. SKIP is the number of arguments in ARGS to be omitted.
11261 OLDNARGS is the number of elements in ARGS. */
11262
11263 static tree
11264 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
11265 int skip, tree fndecl, int n, ...)
11266 {
11267 va_list ap;
11268 tree t;
11269
11270 va_start (ap, n);
11271 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
11272 va_end (ap);
11273
11274 return t;
11275 }
11276
11277 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11278 along with N new arguments specified as the "..." parameters. SKIP
11279 is the number of arguments in EXP to be omitted. This function is used
11280 to do varargs-to-varargs transformations. */
11281
11282 static tree
11283 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11284 {
11285 va_list ap;
11286 tree t;
11287
11288 va_start (ap, n);
11289 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11290 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11291 va_end (ap);
11292
11293 return t;
11294 }
11295
11296 /* Validate a single argument ARG against a tree code CODE representing
11297 a type. */
11298
11299 static bool
11300 validate_arg (const_tree arg, enum tree_code code)
11301 {
11302 if (!arg)
11303 return false;
11304 else if (code == POINTER_TYPE)
11305 return POINTER_TYPE_P (TREE_TYPE (arg));
11306 else if (code == INTEGER_TYPE)
11307 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11308 return code == TREE_CODE (TREE_TYPE (arg));
11309 }
11310
11311 /* This function validates the types of a function call argument list
11312 against a specified list of tree_codes. If the last specifier is a 0,
11313 that represents an ellipses, otherwise the last specifier must be a
11314 VOID_TYPE.
11315
11316 This is the GIMPLE version of validate_arglist. Eventually we want to
11317 completely convert builtins.c to work from GIMPLEs and the tree based
11318 validate_arglist will then be removed. */
11319
11320 bool
11321 validate_gimple_arglist (const_gimple call, ...)
11322 {
11323 enum tree_code code;
11324 bool res = 0;
11325 va_list ap;
11326 const_tree arg;
11327 size_t i;
11328
11329 va_start (ap, call);
11330 i = 0;
11331
11332 do
11333 {
11334 code = (enum tree_code) va_arg (ap, int);
11335 switch (code)
11336 {
11337 case 0:
11338 /* This signifies an ellipses, any further arguments are all ok. */
11339 res = true;
11340 goto end;
11341 case VOID_TYPE:
11342 /* This signifies an endlink, if no arguments remain, return
11343 true, otherwise return false. */
11344 res = (i == gimple_call_num_args (call));
11345 goto end;
11346 default:
11347 /* If no parameters remain or the parameter's code does not
11348 match the specified code, return false. Otherwise continue
11349 checking any remaining arguments. */
11350 arg = gimple_call_arg (call, i++);
11351 if (!validate_arg (arg, code))
11352 goto end;
11353 break;
11354 }
11355 }
11356 while (1);
11357
11358 /* We need gotos here since we can only have one VA_CLOSE in a
11359 function. */
11360 end: ;
11361 va_end (ap);
11362
11363 return res;
11364 }
11365
11366 /* This function validates the types of a function call argument list
11367 against a specified list of tree_codes. If the last specifier is a 0,
11368 that represents an ellipses, otherwise the last specifier must be a
11369 VOID_TYPE. */
11370
11371 bool
11372 validate_arglist (const_tree callexpr, ...)
11373 {
11374 enum tree_code code;
11375 bool res = 0;
11376 va_list ap;
11377 const_call_expr_arg_iterator iter;
11378 const_tree arg;
11379
11380 va_start (ap, callexpr);
11381 init_const_call_expr_arg_iterator (callexpr, &iter);
11382
11383 do
11384 {
11385 code = (enum tree_code) va_arg (ap, int);
11386 switch (code)
11387 {
11388 case 0:
11389 /* This signifies an ellipses, any further arguments are all ok. */
11390 res = true;
11391 goto end;
11392 case VOID_TYPE:
11393 /* This signifies an endlink, if no arguments remain, return
11394 true, otherwise return false. */
11395 res = !more_const_call_expr_args_p (&iter);
11396 goto end;
11397 default:
11398 /* If no parameters remain or the parameter's code does not
11399 match the specified code, return false. Otherwise continue
11400 checking any remaining arguments. */
11401 arg = next_const_call_expr_arg (&iter);
11402 if (!validate_arg (arg, code))
11403 goto end;
11404 break;
11405 }
11406 }
11407 while (1);
11408
11409 /* We need gotos here since we can only have one VA_CLOSE in a
11410 function. */
11411 end: ;
11412 va_end (ap);
11413
11414 return res;
11415 }
11416
11417 /* Default target-specific builtin expander that does nothing. */
11418
11419 rtx
11420 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11421 rtx target ATTRIBUTE_UNUSED,
11422 rtx subtarget ATTRIBUTE_UNUSED,
11423 enum machine_mode mode ATTRIBUTE_UNUSED,
11424 int ignore ATTRIBUTE_UNUSED)
11425 {
11426 return NULL_RTX;
11427 }
11428
11429 /* Returns true is EXP represents data that would potentially reside
11430 in a readonly section. */
11431
11432 static bool
11433 readonly_data_expr (tree exp)
11434 {
11435 STRIP_NOPS (exp);
11436
11437 if (TREE_CODE (exp) != ADDR_EXPR)
11438 return false;
11439
11440 exp = get_base_address (TREE_OPERAND (exp, 0));
11441 if (!exp)
11442 return false;
11443
11444 /* Make sure we call decl_readonly_section only for trees it
11445 can handle (since it returns true for everything it doesn't
11446 understand). */
11447 if (TREE_CODE (exp) == STRING_CST
11448 || TREE_CODE (exp) == CONSTRUCTOR
11449 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11450 return decl_readonly_section (exp, 0);
11451 else
11452 return false;
11453 }
11454
11455 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11456 to the call, and TYPE is its return type.
11457
11458 Return NULL_TREE if no simplification was possible, otherwise return the
11459 simplified form of the call as a tree.
11460
11461 The simplified form may be a constant or other expression which
11462 computes the same value, but in a more efficient manner (including
11463 calls to other builtin functions).
11464
11465 The call may contain arguments which need to be evaluated, but
11466 which are not useful to determine the result of the call. In
11467 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11468 COMPOUND_EXPR will be an argument which must be evaluated.
11469 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11470 COMPOUND_EXPR in the chain will contain the tree for the simplified
11471 form of the builtin function call. */
11472
11473 static tree
11474 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11475 {
11476 if (!validate_arg (s1, POINTER_TYPE)
11477 || !validate_arg (s2, POINTER_TYPE))
11478 return NULL_TREE;
11479 else
11480 {
11481 tree fn;
11482 const char *p1, *p2;
11483
11484 p2 = c_getstr (s2);
11485 if (p2 == NULL)
11486 return NULL_TREE;
11487
11488 p1 = c_getstr (s1);
11489 if (p1 != NULL)
11490 {
11491 const char *r = strstr (p1, p2);
11492 tree tem;
11493
11494 if (r == NULL)
11495 return build_int_cst (TREE_TYPE (s1), 0);
11496
11497 /* Return an offset into the constant string argument. */
11498 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11499 return fold_convert_loc (loc, type, tem);
11500 }
11501
11502 /* The argument is const char *, and the result is char *, so we need
11503 a type conversion here to avoid a warning. */
11504 if (p2[0] == '\0')
11505 return fold_convert_loc (loc, type, s1);
11506
11507 if (p2[1] != '\0')
11508 return NULL_TREE;
11509
11510 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11511 if (!fn)
11512 return NULL_TREE;
11513
11514 /* New argument list transforming strstr(s1, s2) to
11515 strchr(s1, s2[0]). */
11516 return build_call_expr_loc (loc, fn, 2, s1,
11517 build_int_cst (integer_type_node, p2[0]));
11518 }
11519 }
11520
11521 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11522 the call, and TYPE is its return type.
11523
11524 Return NULL_TREE if no simplification was possible, otherwise return the
11525 simplified form of the call as a tree.
11526
11527 The simplified form may be a constant or other expression which
11528 computes the same value, but in a more efficient manner (including
11529 calls to other builtin functions).
11530
11531 The call may contain arguments which need to be evaluated, but
11532 which are not useful to determine the result of the call. In
11533 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11534 COMPOUND_EXPR will be an argument which must be evaluated.
11535 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11536 COMPOUND_EXPR in the chain will contain the tree for the simplified
11537 form of the builtin function call. */
11538
11539 static tree
11540 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11541 {
11542 if (!validate_arg (s1, POINTER_TYPE)
11543 || !validate_arg (s2, INTEGER_TYPE))
11544 return NULL_TREE;
11545 else
11546 {
11547 const char *p1;
11548
11549 if (TREE_CODE (s2) != INTEGER_CST)
11550 return NULL_TREE;
11551
11552 p1 = c_getstr (s1);
11553 if (p1 != NULL)
11554 {
11555 char c;
11556 const char *r;
11557 tree tem;
11558
11559 if (target_char_cast (s2, &c))
11560 return NULL_TREE;
11561
11562 r = strchr (p1, c);
11563
11564 if (r == NULL)
11565 return build_int_cst (TREE_TYPE (s1), 0);
11566
11567 /* Return an offset into the constant string argument. */
11568 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11569 return fold_convert_loc (loc, type, tem);
11570 }
11571 return NULL_TREE;
11572 }
11573 }
11574
11575 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11576 the call, and TYPE is its return type.
11577
11578 Return NULL_TREE if no simplification was possible, otherwise return the
11579 simplified form of the call as a tree.
11580
11581 The simplified form may be a constant or other expression which
11582 computes the same value, but in a more efficient manner (including
11583 calls to other builtin functions).
11584
11585 The call may contain arguments which need to be evaluated, but
11586 which are not useful to determine the result of the call. In
11587 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11588 COMPOUND_EXPR will be an argument which must be evaluated.
11589 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11590 COMPOUND_EXPR in the chain will contain the tree for the simplified
11591 form of the builtin function call. */
11592
11593 static tree
11594 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11595 {
11596 if (!validate_arg (s1, POINTER_TYPE)
11597 || !validate_arg (s2, INTEGER_TYPE))
11598 return NULL_TREE;
11599 else
11600 {
11601 tree fn;
11602 const char *p1;
11603
11604 if (TREE_CODE (s2) != INTEGER_CST)
11605 return NULL_TREE;
11606
11607 p1 = c_getstr (s1);
11608 if (p1 != NULL)
11609 {
11610 char c;
11611 const char *r;
11612 tree tem;
11613
11614 if (target_char_cast (s2, &c))
11615 return NULL_TREE;
11616
11617 r = strrchr (p1, c);
11618
11619 if (r == NULL)
11620 return build_int_cst (TREE_TYPE (s1), 0);
11621
11622 /* Return an offset into the constant string argument. */
11623 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11624 return fold_convert_loc (loc, type, tem);
11625 }
11626
11627 if (! integer_zerop (s2))
11628 return NULL_TREE;
11629
11630 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11631 if (!fn)
11632 return NULL_TREE;
11633
11634 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11635 return build_call_expr_loc (loc, fn, 2, s1, s2);
11636 }
11637 }
11638
11639 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11640 to the call, and TYPE is its return type.
11641
11642 Return NULL_TREE if no simplification was possible, otherwise return the
11643 simplified form of the call as a tree.
11644
11645 The simplified form may be a constant or other expression which
11646 computes the same value, but in a more efficient manner (including
11647 calls to other builtin functions).
11648
11649 The call may contain arguments which need to be evaluated, but
11650 which are not useful to determine the result of the call. In
11651 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11652 COMPOUND_EXPR will be an argument which must be evaluated.
11653 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11654 COMPOUND_EXPR in the chain will contain the tree for the simplified
11655 form of the builtin function call. */
11656
11657 static tree
11658 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11659 {
11660 if (!validate_arg (s1, POINTER_TYPE)
11661 || !validate_arg (s2, POINTER_TYPE))
11662 return NULL_TREE;
11663 else
11664 {
11665 tree fn;
11666 const char *p1, *p2;
11667
11668 p2 = c_getstr (s2);
11669 if (p2 == NULL)
11670 return NULL_TREE;
11671
11672 p1 = c_getstr (s1);
11673 if (p1 != NULL)
11674 {
11675 const char *r = strpbrk (p1, p2);
11676 tree tem;
11677
11678 if (r == NULL)
11679 return build_int_cst (TREE_TYPE (s1), 0);
11680
11681 /* Return an offset into the constant string argument. */
11682 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11683 return fold_convert_loc (loc, type, tem);
11684 }
11685
11686 if (p2[0] == '\0')
11687 /* strpbrk(x, "") == NULL.
11688 Evaluate and ignore s1 in case it had side-effects. */
11689 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11690
11691 if (p2[1] != '\0')
11692 return NULL_TREE; /* Really call strpbrk. */
11693
11694 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11695 if (!fn)
11696 return NULL_TREE;
11697
11698 /* New argument list transforming strpbrk(s1, s2) to
11699 strchr(s1, s2[0]). */
11700 return build_call_expr_loc (loc, fn, 2, s1,
11701 build_int_cst (integer_type_node, p2[0]));
11702 }
11703 }
11704
11705 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11706 to the call.
11707
11708 Return NULL_TREE if no simplification was possible, otherwise return the
11709 simplified form of the call as a tree.
11710
11711 The simplified form may be a constant or other expression which
11712 computes the same value, but in a more efficient manner (including
11713 calls to other builtin functions).
11714
11715 The call may contain arguments which need to be evaluated, but
11716 which are not useful to determine the result of the call. In
11717 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11718 COMPOUND_EXPR will be an argument which must be evaluated.
11719 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11720 COMPOUND_EXPR in the chain will contain the tree for the simplified
11721 form of the builtin function call. */
11722
11723 static tree
11724 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11725 {
11726 if (!validate_arg (dst, POINTER_TYPE)
11727 || !validate_arg (src, POINTER_TYPE))
11728 return NULL_TREE;
11729 else
11730 {
11731 const char *p = c_getstr (src);
11732
11733 /* If the string length is zero, return the dst parameter. */
11734 if (p && *p == '\0')
11735 return dst;
11736
11737 if (optimize_insn_for_speed_p ())
11738 {
11739 /* See if we can store by pieces into (dst + strlen(dst)). */
11740 tree newdst, call;
11741 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11742 tree strcpy_fn = builtin_decl_implicit (BUILT_IN_STRCPY);
11743
11744 if (!strlen_fn || !strcpy_fn)
11745 return NULL_TREE;
11746
11747 /* If we don't have a movstr we don't want to emit an strcpy
11748 call. We have to do that if the length of the source string
11749 isn't computable (in that case we can use memcpy probably
11750 later expanding to a sequence of mov instructions). If we
11751 have movstr instructions we can emit strcpy calls. */
11752 if (!HAVE_movstr)
11753 {
11754 tree len = c_strlen (src, 1);
11755 if (! len || TREE_SIDE_EFFECTS (len))
11756 return NULL_TREE;
11757 }
11758
11759 /* Stabilize the argument list. */
11760 dst = builtin_save_expr (dst);
11761
11762 /* Create strlen (dst). */
11763 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11764 /* Create (dst p+ strlen (dst)). */
11765
11766 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11767 newdst = builtin_save_expr (newdst);
11768
11769 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11770 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11771 }
11772 return NULL_TREE;
11773 }
11774 }
11775
11776 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11777 arguments to the call.
11778
11779 Return NULL_TREE if no simplification was possible, otherwise return the
11780 simplified form of the call as a tree.
11781
11782 The simplified form may be a constant or other expression which
11783 computes the same value, but in a more efficient manner (including
11784 calls to other builtin functions).
11785
11786 The call may contain arguments which need to be evaluated, but
11787 which are not useful to determine the result of the call. In
11788 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11789 COMPOUND_EXPR will be an argument which must be evaluated.
11790 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11791 COMPOUND_EXPR in the chain will contain the tree for the simplified
11792 form of the builtin function call. */
11793
11794 static tree
11795 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11796 {
11797 if (!validate_arg (dst, POINTER_TYPE)
11798 || !validate_arg (src, POINTER_TYPE)
11799 || !validate_arg (len, INTEGER_TYPE))
11800 return NULL_TREE;
11801 else
11802 {
11803 const char *p = c_getstr (src);
11804
11805 /* If the requested length is zero, or the src parameter string
11806 length is zero, return the dst parameter. */
11807 if (integer_zerop (len) || (p && *p == '\0'))
11808 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11809
11810 /* If the requested len is greater than or equal to the string
11811 length, call strcat. */
11812 if (TREE_CODE (len) == INTEGER_CST && p
11813 && compare_tree_int (len, strlen (p)) >= 0)
11814 {
11815 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11816
11817 /* If the replacement _DECL isn't initialized, don't do the
11818 transformation. */
11819 if (!fn)
11820 return NULL_TREE;
11821
11822 return build_call_expr_loc (loc, fn, 2, dst, src);
11823 }
11824 return NULL_TREE;
11825 }
11826 }
11827
11828 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11829 to the call.
11830
11831 Return NULL_TREE if no simplification was possible, otherwise return the
11832 simplified form of the call as a tree.
11833
11834 The simplified form may be a constant or other expression which
11835 computes the same value, but in a more efficient manner (including
11836 calls to other builtin functions).
11837
11838 The call may contain arguments which need to be evaluated, but
11839 which are not useful to determine the result of the call. In
11840 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11841 COMPOUND_EXPR will be an argument which must be evaluated.
11842 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11843 COMPOUND_EXPR in the chain will contain the tree for the simplified
11844 form of the builtin function call. */
11845
11846 static tree
11847 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11848 {
11849 if (!validate_arg (s1, POINTER_TYPE)
11850 || !validate_arg (s2, POINTER_TYPE))
11851 return NULL_TREE;
11852 else
11853 {
11854 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11855
11856 /* If both arguments are constants, evaluate at compile-time. */
11857 if (p1 && p2)
11858 {
11859 const size_t r = strspn (p1, p2);
11860 return build_int_cst (size_type_node, r);
11861 }
11862
11863 /* If either argument is "", return NULL_TREE. */
11864 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11865 /* Evaluate and ignore both arguments in case either one has
11866 side-effects. */
11867 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11868 s1, s2);
11869 return NULL_TREE;
11870 }
11871 }
11872
11873 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11874 to the call.
11875
11876 Return NULL_TREE if no simplification was possible, otherwise return the
11877 simplified form of the call as a tree.
11878
11879 The simplified form may be a constant or other expression which
11880 computes the same value, but in a more efficient manner (including
11881 calls to other builtin functions).
11882
11883 The call may contain arguments which need to be evaluated, but
11884 which are not useful to determine the result of the call. In
11885 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11886 COMPOUND_EXPR will be an argument which must be evaluated.
11887 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11888 COMPOUND_EXPR in the chain will contain the tree for the simplified
11889 form of the builtin function call. */
11890
11891 static tree
11892 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11893 {
11894 if (!validate_arg (s1, POINTER_TYPE)
11895 || !validate_arg (s2, POINTER_TYPE))
11896 return NULL_TREE;
11897 else
11898 {
11899 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11900
11901 /* If both arguments are constants, evaluate at compile-time. */
11902 if (p1 && p2)
11903 {
11904 const size_t r = strcspn (p1, p2);
11905 return build_int_cst (size_type_node, r);
11906 }
11907
11908 /* If the first argument is "", return NULL_TREE. */
11909 if (p1 && *p1 == '\0')
11910 {
11911 /* Evaluate and ignore argument s2 in case it has
11912 side-effects. */
11913 return omit_one_operand_loc (loc, size_type_node,
11914 size_zero_node, s2);
11915 }
11916
11917 /* If the second argument is "", return __builtin_strlen(s1). */
11918 if (p2 && *p2 == '\0')
11919 {
11920 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11921
11922 /* If the replacement _DECL isn't initialized, don't do the
11923 transformation. */
11924 if (!fn)
11925 return NULL_TREE;
11926
11927 return build_call_expr_loc (loc, fn, 1, s1);
11928 }
11929 return NULL_TREE;
11930 }
11931 }
11932
11933 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11934 to the call. IGNORE is true if the value returned
11935 by the builtin will be ignored. UNLOCKED is true is true if this
11936 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11937 the known length of the string. Return NULL_TREE if no simplification
11938 was possible. */
11939
11940 tree
11941 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11942 bool ignore, bool unlocked, tree len)
11943 {
11944 /* If we're using an unlocked function, assume the other unlocked
11945 functions exist explicitly. */
11946 tree const fn_fputc = (unlocked
11947 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
11948 : builtin_decl_implicit (BUILT_IN_FPUTC));
11949 tree const fn_fwrite = (unlocked
11950 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
11951 : builtin_decl_implicit (BUILT_IN_FWRITE));
11952
11953 /* If the return value is used, don't do the transformation. */
11954 if (!ignore)
11955 return NULL_TREE;
11956
11957 /* Verify the arguments in the original call. */
11958 if (!validate_arg (arg0, POINTER_TYPE)
11959 || !validate_arg (arg1, POINTER_TYPE))
11960 return NULL_TREE;
11961
11962 if (! len)
11963 len = c_strlen (arg0, 0);
11964
11965 /* Get the length of the string passed to fputs. If the length
11966 can't be determined, punt. */
11967 if (!len
11968 || TREE_CODE (len) != INTEGER_CST)
11969 return NULL_TREE;
11970
11971 switch (compare_tree_int (len, 1))
11972 {
11973 case -1: /* length is 0, delete the call entirely . */
11974 return omit_one_operand_loc (loc, integer_type_node,
11975 integer_zero_node, arg1);;
11976
11977 case 0: /* length is 1, call fputc. */
11978 {
11979 const char *p = c_getstr (arg0);
11980
11981 if (p != NULL)
11982 {
11983 if (fn_fputc)
11984 return build_call_expr_loc (loc, fn_fputc, 2,
11985 build_int_cst
11986 (integer_type_node, p[0]), arg1);
11987 else
11988 return NULL_TREE;
11989 }
11990 }
11991 /* FALLTHROUGH */
11992 case 1: /* length is greater than 1, call fwrite. */
11993 {
11994 /* If optimizing for size keep fputs. */
11995 if (optimize_function_for_size_p (cfun))
11996 return NULL_TREE;
11997 /* New argument list transforming fputs(string, stream) to
11998 fwrite(string, 1, len, stream). */
11999 if (fn_fwrite)
12000 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
12001 size_one_node, len, arg1);
12002 else
12003 return NULL_TREE;
12004 }
12005 default:
12006 gcc_unreachable ();
12007 }
12008 return NULL_TREE;
12009 }
12010
12011 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12012 produced. False otherwise. This is done so that we don't output the error
12013 or warning twice or three times. */
12014
12015 bool
12016 fold_builtin_next_arg (tree exp, bool va_start_p)
12017 {
12018 tree fntype = TREE_TYPE (current_function_decl);
12019 int nargs = call_expr_nargs (exp);
12020 tree arg;
12021 /* There is good chance the current input_location points inside the
12022 definition of the va_start macro (perhaps on the token for
12023 builtin) in a system header, so warnings will not be emitted.
12024 Use the location in real source code. */
12025 source_location current_location =
12026 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
12027 NULL);
12028
12029 if (!stdarg_p (fntype))
12030 {
12031 error ("%<va_start%> used in function with fixed args");
12032 return true;
12033 }
12034
12035 if (va_start_p)
12036 {
12037 if (va_start_p && (nargs != 2))
12038 {
12039 error ("wrong number of arguments to function %<va_start%>");
12040 return true;
12041 }
12042 arg = CALL_EXPR_ARG (exp, 1);
12043 }
12044 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12045 when we checked the arguments and if needed issued a warning. */
12046 else
12047 {
12048 if (nargs == 0)
12049 {
12050 /* Evidently an out of date version of <stdarg.h>; can't validate
12051 va_start's second argument, but can still work as intended. */
12052 warning_at (current_location,
12053 OPT_Wvarargs,
12054 "%<__builtin_next_arg%> called without an argument");
12055 return true;
12056 }
12057 else if (nargs > 1)
12058 {
12059 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12060 return true;
12061 }
12062 arg = CALL_EXPR_ARG (exp, 0);
12063 }
12064
12065 if (TREE_CODE (arg) == SSA_NAME)
12066 arg = SSA_NAME_VAR (arg);
12067
12068 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12069 or __builtin_next_arg (0) the first time we see it, after checking
12070 the arguments and if needed issuing a warning. */
12071 if (!integer_zerop (arg))
12072 {
12073 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12074
12075 /* Strip off all nops for the sake of the comparison. This
12076 is not quite the same as STRIP_NOPS. It does more.
12077 We must also strip off INDIRECT_EXPR for C++ reference
12078 parameters. */
12079 while (CONVERT_EXPR_P (arg)
12080 || TREE_CODE (arg) == INDIRECT_REF)
12081 arg = TREE_OPERAND (arg, 0);
12082 if (arg != last_parm)
12083 {
12084 /* FIXME: Sometimes with the tree optimizers we can get the
12085 not the last argument even though the user used the last
12086 argument. We just warn and set the arg to be the last
12087 argument so that we will get wrong-code because of
12088 it. */
12089 warning_at (current_location,
12090 OPT_Wvarargs,
12091 "second parameter of %<va_start%> not last named argument");
12092 }
12093
12094 /* Undefined by C99 7.15.1.4p4 (va_start):
12095 "If the parameter parmN is declared with the register storage
12096 class, with a function or array type, or with a type that is
12097 not compatible with the type that results after application of
12098 the default argument promotions, the behavior is undefined."
12099 */
12100 else if (DECL_REGISTER (arg))
12101 {
12102 warning_at (current_location,
12103 OPT_Wvarargs,
12104 "undefined behaviour when second parameter of "
12105 "%<va_start%> is declared with %<register%> storage");
12106 }
12107
12108 /* We want to verify the second parameter just once before the tree
12109 optimizers are run and then avoid keeping it in the tree,
12110 as otherwise we could warn even for correct code like:
12111 void foo (int i, ...)
12112 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12113 if (va_start_p)
12114 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12115 else
12116 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12117 }
12118 return false;
12119 }
12120
12121
12122 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12123 ORIG may be null if this is a 2-argument call. We don't attempt to
12124 simplify calls with more than 3 arguments.
12125
12126 Return NULL_TREE if no simplification was possible, otherwise return the
12127 simplified form of the call as a tree. If IGNORED is true, it means that
12128 the caller does not use the returned value of the function. */
12129
12130 static tree
12131 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12132 tree orig, int ignored)
12133 {
12134 tree call, retval;
12135 const char *fmt_str = NULL;
12136
12137 /* Verify the required arguments in the original call. We deal with two
12138 types of sprintf() calls: 'sprintf (str, fmt)' and
12139 'sprintf (dest, "%s", orig)'. */
12140 if (!validate_arg (dest, POINTER_TYPE)
12141 || !validate_arg (fmt, POINTER_TYPE))
12142 return NULL_TREE;
12143 if (orig && !validate_arg (orig, POINTER_TYPE))
12144 return NULL_TREE;
12145
12146 /* Check whether the format is a literal string constant. */
12147 fmt_str = c_getstr (fmt);
12148 if (fmt_str == NULL)
12149 return NULL_TREE;
12150
12151 call = NULL_TREE;
12152 retval = NULL_TREE;
12153
12154 if (!init_target_chars ())
12155 return NULL_TREE;
12156
12157 /* If the format doesn't contain % args or %%, use strcpy. */
12158 if (strchr (fmt_str, target_percent) == NULL)
12159 {
12160 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12161
12162 if (!fn)
12163 return NULL_TREE;
12164
12165 /* Don't optimize sprintf (buf, "abc", ptr++). */
12166 if (orig)
12167 return NULL_TREE;
12168
12169 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12170 'format' is known to contain no % formats. */
12171 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12172 if (!ignored)
12173 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12174 }
12175
12176 /* If the format is "%s", use strcpy if the result isn't used. */
12177 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12178 {
12179 tree fn;
12180 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12181
12182 if (!fn)
12183 return NULL_TREE;
12184
12185 /* Don't crash on sprintf (str1, "%s"). */
12186 if (!orig)
12187 return NULL_TREE;
12188
12189 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12190 if (!ignored)
12191 {
12192 retval = c_strlen (orig, 1);
12193 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12194 return NULL_TREE;
12195 }
12196 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12197 }
12198
12199 if (call && retval)
12200 {
12201 retval = fold_convert_loc
12202 (loc, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF))),
12203 retval);
12204 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12205 }
12206 else
12207 return call;
12208 }
12209
12210 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12211 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12212 attempt to simplify calls with more than 4 arguments.
12213
12214 Return NULL_TREE if no simplification was possible, otherwise return the
12215 simplified form of the call as a tree. If IGNORED is true, it means that
12216 the caller does not use the returned value of the function. */
12217
12218 static tree
12219 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
12220 tree orig, int ignored)
12221 {
12222 tree call, retval;
12223 const char *fmt_str = NULL;
12224 unsigned HOST_WIDE_INT destlen;
12225
12226 /* Verify the required arguments in the original call. We deal with two
12227 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12228 'snprintf (dest, cst, "%s", orig)'. */
12229 if (!validate_arg (dest, POINTER_TYPE)
12230 || !validate_arg (destsize, INTEGER_TYPE)
12231 || !validate_arg (fmt, POINTER_TYPE))
12232 return NULL_TREE;
12233 if (orig && !validate_arg (orig, POINTER_TYPE))
12234 return NULL_TREE;
12235
12236 if (!host_integerp (destsize, 1))
12237 return NULL_TREE;
12238
12239 /* Check whether the format is a literal string constant. */
12240 fmt_str = c_getstr (fmt);
12241 if (fmt_str == NULL)
12242 return NULL_TREE;
12243
12244 call = NULL_TREE;
12245 retval = NULL_TREE;
12246
12247 if (!init_target_chars ())
12248 return NULL_TREE;
12249
12250 destlen = tree_low_cst (destsize, 1);
12251
12252 /* If the format doesn't contain % args or %%, use strcpy. */
12253 if (strchr (fmt_str, target_percent) == NULL)
12254 {
12255 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12256 size_t len = strlen (fmt_str);
12257
12258 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12259 if (orig)
12260 return NULL_TREE;
12261
12262 /* We could expand this as
12263 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12264 or to
12265 memcpy (str, fmt_with_nul_at_cstm1, cst);
12266 but in the former case that might increase code size
12267 and in the latter case grow .rodata section too much.
12268 So punt for now. */
12269 if (len >= destlen)
12270 return NULL_TREE;
12271
12272 if (!fn)
12273 return NULL_TREE;
12274
12275 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12276 'format' is known to contain no % formats and
12277 strlen (fmt) < cst. */
12278 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12279
12280 if (!ignored)
12281 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12282 }
12283
12284 /* If the format is "%s", use strcpy if the result isn't used. */
12285 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12286 {
12287 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12288 unsigned HOST_WIDE_INT origlen;
12289
12290 /* Don't crash on snprintf (str1, cst, "%s"). */
12291 if (!orig)
12292 return NULL_TREE;
12293
12294 retval = c_strlen (orig, 1);
12295 if (!retval || !host_integerp (retval, 1))
12296 return NULL_TREE;
12297
12298 origlen = tree_low_cst (retval, 1);
12299 /* We could expand this as
12300 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12301 or to
12302 memcpy (str1, str2_with_nul_at_cstm1, cst);
12303 but in the former case that might increase code size
12304 and in the latter case grow .rodata section too much.
12305 So punt for now. */
12306 if (origlen >= destlen)
12307 return NULL_TREE;
12308
12309 /* Convert snprintf (str1, cst, "%s", str2) into
12310 strcpy (str1, str2) if strlen (str2) < cst. */
12311 if (!fn)
12312 return NULL_TREE;
12313
12314 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12315
12316 if (ignored)
12317 retval = NULL_TREE;
12318 }
12319
12320 if (call && retval)
12321 {
12322 tree fn = builtin_decl_explicit (BUILT_IN_SNPRINTF);
12323 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
12324 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12325 }
12326 else
12327 return call;
12328 }
12329
12330 /* Expand a call EXP to __builtin_object_size. */
12331
12332 rtx
12333 expand_builtin_object_size (tree exp)
12334 {
12335 tree ost;
12336 int object_size_type;
12337 tree fndecl = get_callee_fndecl (exp);
12338
12339 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12340 {
12341 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12342 exp, fndecl);
12343 expand_builtin_trap ();
12344 return const0_rtx;
12345 }
12346
12347 ost = CALL_EXPR_ARG (exp, 1);
12348 STRIP_NOPS (ost);
12349
12350 if (TREE_CODE (ost) != INTEGER_CST
12351 || tree_int_cst_sgn (ost) < 0
12352 || compare_tree_int (ost, 3) > 0)
12353 {
12354 error ("%Klast argument of %D is not integer constant between 0 and 3",
12355 exp, fndecl);
12356 expand_builtin_trap ();
12357 return const0_rtx;
12358 }
12359
12360 object_size_type = tree_low_cst (ost, 0);
12361
12362 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12363 }
12364
12365 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12366 FCODE is the BUILT_IN_* to use.
12367 Return NULL_RTX if we failed; the caller should emit a normal call,
12368 otherwise try to get the result in TARGET, if convenient (and in
12369 mode MODE if that's convenient). */
12370
12371 static rtx
12372 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12373 enum built_in_function fcode)
12374 {
12375 tree dest, src, len, size;
12376
12377 if (!validate_arglist (exp,
12378 POINTER_TYPE,
12379 fcode == BUILT_IN_MEMSET_CHK
12380 ? INTEGER_TYPE : POINTER_TYPE,
12381 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12382 return NULL_RTX;
12383
12384 dest = CALL_EXPR_ARG (exp, 0);
12385 src = CALL_EXPR_ARG (exp, 1);
12386 len = CALL_EXPR_ARG (exp, 2);
12387 size = CALL_EXPR_ARG (exp, 3);
12388
12389 if (! host_integerp (size, 1))
12390 return NULL_RTX;
12391
12392 if (host_integerp (len, 1) || integer_all_onesp (size))
12393 {
12394 tree fn;
12395
12396 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12397 {
12398 warning_at (tree_nonartificial_location (exp),
12399 0, "%Kcall to %D will always overflow destination buffer",
12400 exp, get_callee_fndecl (exp));
12401 return NULL_RTX;
12402 }
12403
12404 fn = NULL_TREE;
12405 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12406 mem{cpy,pcpy,move,set} is available. */
12407 switch (fcode)
12408 {
12409 case BUILT_IN_MEMCPY_CHK:
12410 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12411 break;
12412 case BUILT_IN_MEMPCPY_CHK:
12413 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12414 break;
12415 case BUILT_IN_MEMMOVE_CHK:
12416 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12417 break;
12418 case BUILT_IN_MEMSET_CHK:
12419 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12420 break;
12421 default:
12422 break;
12423 }
12424
12425 if (! fn)
12426 return NULL_RTX;
12427
12428 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12429 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12430 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12431 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12432 }
12433 else if (fcode == BUILT_IN_MEMSET_CHK)
12434 return NULL_RTX;
12435 else
12436 {
12437 unsigned int dest_align = get_pointer_alignment (dest);
12438
12439 /* If DEST is not a pointer type, call the normal function. */
12440 if (dest_align == 0)
12441 return NULL_RTX;
12442
12443 /* If SRC and DEST are the same (and not volatile), do nothing. */
12444 if (operand_equal_p (src, dest, 0))
12445 {
12446 tree expr;
12447
12448 if (fcode != BUILT_IN_MEMPCPY_CHK)
12449 {
12450 /* Evaluate and ignore LEN in case it has side-effects. */
12451 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12452 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12453 }
12454
12455 expr = fold_build_pointer_plus (dest, len);
12456 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12457 }
12458
12459 /* __memmove_chk special case. */
12460 if (fcode == BUILT_IN_MEMMOVE_CHK)
12461 {
12462 unsigned int src_align = get_pointer_alignment (src);
12463
12464 if (src_align == 0)
12465 return NULL_RTX;
12466
12467 /* If src is categorized for a readonly section we can use
12468 normal __memcpy_chk. */
12469 if (readonly_data_expr (src))
12470 {
12471 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12472 if (!fn)
12473 return NULL_RTX;
12474 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12475 dest, src, len, size);
12476 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12477 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12478 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12479 }
12480 }
12481 return NULL_RTX;
12482 }
12483 }
12484
12485 /* Emit warning if a buffer overflow is detected at compile time. */
12486
12487 static void
12488 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12489 {
12490 int is_strlen = 0;
12491 tree len, size;
12492 location_t loc = tree_nonartificial_location (exp);
12493
12494 switch (fcode)
12495 {
12496 case BUILT_IN_STRCPY_CHK:
12497 case BUILT_IN_STPCPY_CHK:
12498 /* For __strcat_chk the warning will be emitted only if overflowing
12499 by at least strlen (dest) + 1 bytes. */
12500 case BUILT_IN_STRCAT_CHK:
12501 len = CALL_EXPR_ARG (exp, 1);
12502 size = CALL_EXPR_ARG (exp, 2);
12503 is_strlen = 1;
12504 break;
12505 case BUILT_IN_STRNCAT_CHK:
12506 case BUILT_IN_STRNCPY_CHK:
12507 case BUILT_IN_STPNCPY_CHK:
12508 len = CALL_EXPR_ARG (exp, 2);
12509 size = CALL_EXPR_ARG (exp, 3);
12510 break;
12511 case BUILT_IN_SNPRINTF_CHK:
12512 case BUILT_IN_VSNPRINTF_CHK:
12513 len = CALL_EXPR_ARG (exp, 1);
12514 size = CALL_EXPR_ARG (exp, 3);
12515 break;
12516 default:
12517 gcc_unreachable ();
12518 }
12519
12520 if (!len || !size)
12521 return;
12522
12523 if (! host_integerp (size, 1) || integer_all_onesp (size))
12524 return;
12525
12526 if (is_strlen)
12527 {
12528 len = c_strlen (len, 1);
12529 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12530 return;
12531 }
12532 else if (fcode == BUILT_IN_STRNCAT_CHK)
12533 {
12534 tree src = CALL_EXPR_ARG (exp, 1);
12535 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12536 return;
12537 src = c_strlen (src, 1);
12538 if (! src || ! host_integerp (src, 1))
12539 {
12540 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12541 exp, get_callee_fndecl (exp));
12542 return;
12543 }
12544 else if (tree_int_cst_lt (src, size))
12545 return;
12546 }
12547 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12548 return;
12549
12550 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12551 exp, get_callee_fndecl (exp));
12552 }
12553
12554 /* Emit warning if a buffer overflow is detected at compile time
12555 in __sprintf_chk/__vsprintf_chk calls. */
12556
12557 static void
12558 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12559 {
12560 tree size, len, fmt;
12561 const char *fmt_str;
12562 int nargs = call_expr_nargs (exp);
12563
12564 /* Verify the required arguments in the original call. */
12565
12566 if (nargs < 4)
12567 return;
12568 size = CALL_EXPR_ARG (exp, 2);
12569 fmt = CALL_EXPR_ARG (exp, 3);
12570
12571 if (! host_integerp (size, 1) || integer_all_onesp (size))
12572 return;
12573
12574 /* Check whether the format is a literal string constant. */
12575 fmt_str = c_getstr (fmt);
12576 if (fmt_str == NULL)
12577 return;
12578
12579 if (!init_target_chars ())
12580 return;
12581
12582 /* If the format doesn't contain % args or %%, we know its size. */
12583 if (strchr (fmt_str, target_percent) == 0)
12584 len = build_int_cstu (size_type_node, strlen (fmt_str));
12585 /* If the format is "%s" and first ... argument is a string literal,
12586 we know it too. */
12587 else if (fcode == BUILT_IN_SPRINTF_CHK
12588 && strcmp (fmt_str, target_percent_s) == 0)
12589 {
12590 tree arg;
12591
12592 if (nargs < 5)
12593 return;
12594 arg = CALL_EXPR_ARG (exp, 4);
12595 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12596 return;
12597
12598 len = c_strlen (arg, 1);
12599 if (!len || ! host_integerp (len, 1))
12600 return;
12601 }
12602 else
12603 return;
12604
12605 if (! tree_int_cst_lt (len, size))
12606 warning_at (tree_nonartificial_location (exp),
12607 0, "%Kcall to %D will always overflow destination buffer",
12608 exp, get_callee_fndecl (exp));
12609 }
12610
12611 /* Emit warning if a free is called with address of a variable. */
12612
12613 static void
12614 maybe_emit_free_warning (tree exp)
12615 {
12616 tree arg = CALL_EXPR_ARG (exp, 0);
12617
12618 STRIP_NOPS (arg);
12619 if (TREE_CODE (arg) != ADDR_EXPR)
12620 return;
12621
12622 arg = get_base_address (TREE_OPERAND (arg, 0));
12623 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12624 return;
12625
12626 if (SSA_VAR_P (arg))
12627 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12628 "%Kattempt to free a non-heap object %qD", exp, arg);
12629 else
12630 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12631 "%Kattempt to free a non-heap object", exp);
12632 }
12633
12634 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12635 if possible. */
12636
12637 tree
12638 fold_builtin_object_size (tree ptr, tree ost)
12639 {
12640 unsigned HOST_WIDE_INT bytes;
12641 int object_size_type;
12642
12643 if (!validate_arg (ptr, POINTER_TYPE)
12644 || !validate_arg (ost, INTEGER_TYPE))
12645 return NULL_TREE;
12646
12647 STRIP_NOPS (ost);
12648
12649 if (TREE_CODE (ost) != INTEGER_CST
12650 || tree_int_cst_sgn (ost) < 0
12651 || compare_tree_int (ost, 3) > 0)
12652 return NULL_TREE;
12653
12654 object_size_type = tree_low_cst (ost, 0);
12655
12656 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12657 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12658 and (size_t) 0 for types 2 and 3. */
12659 if (TREE_SIDE_EFFECTS (ptr))
12660 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12661
12662 if (TREE_CODE (ptr) == ADDR_EXPR)
12663 {
12664 bytes = compute_builtin_object_size (ptr, object_size_type);
12665 if (double_int_fits_to_tree_p (size_type_node,
12666 double_int::from_uhwi (bytes)))
12667 return build_int_cstu (size_type_node, bytes);
12668 }
12669 else if (TREE_CODE (ptr) == SSA_NAME)
12670 {
12671 /* If object size is not known yet, delay folding until
12672 later. Maybe subsequent passes will help determining
12673 it. */
12674 bytes = compute_builtin_object_size (ptr, object_size_type);
12675 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12676 && double_int_fits_to_tree_p (size_type_node,
12677 double_int::from_uhwi (bytes)))
12678 return build_int_cstu (size_type_node, bytes);
12679 }
12680
12681 return NULL_TREE;
12682 }
12683
12684 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12685 DEST, SRC, LEN, and SIZE are the arguments to the call.
12686 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12687 code of the builtin. If MAXLEN is not NULL, it is maximum length
12688 passed as third argument. */
12689
12690 tree
12691 fold_builtin_memory_chk (location_t loc, tree fndecl,
12692 tree dest, tree src, tree len, tree size,
12693 tree maxlen, bool ignore,
12694 enum built_in_function fcode)
12695 {
12696 tree fn;
12697
12698 if (!validate_arg (dest, POINTER_TYPE)
12699 || !validate_arg (src,
12700 (fcode == BUILT_IN_MEMSET_CHK
12701 ? INTEGER_TYPE : POINTER_TYPE))
12702 || !validate_arg (len, INTEGER_TYPE)
12703 || !validate_arg (size, INTEGER_TYPE))
12704 return NULL_TREE;
12705
12706 /* If SRC and DEST are the same (and not volatile), return DEST
12707 (resp. DEST+LEN for __mempcpy_chk). */
12708 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12709 {
12710 if (fcode != BUILT_IN_MEMPCPY_CHK)
12711 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12712 dest, len);
12713 else
12714 {
12715 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12716 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12717 }
12718 }
12719
12720 if (! host_integerp (size, 1))
12721 return NULL_TREE;
12722
12723 if (! integer_all_onesp (size))
12724 {
12725 if (! host_integerp (len, 1))
12726 {
12727 /* If LEN is not constant, try MAXLEN too.
12728 For MAXLEN only allow optimizing into non-_ocs function
12729 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12730 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12731 {
12732 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12733 {
12734 /* (void) __mempcpy_chk () can be optimized into
12735 (void) __memcpy_chk (). */
12736 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12737 if (!fn)
12738 return NULL_TREE;
12739
12740 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12741 }
12742 return NULL_TREE;
12743 }
12744 }
12745 else
12746 maxlen = len;
12747
12748 if (tree_int_cst_lt (size, maxlen))
12749 return NULL_TREE;
12750 }
12751
12752 fn = NULL_TREE;
12753 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12754 mem{cpy,pcpy,move,set} is available. */
12755 switch (fcode)
12756 {
12757 case BUILT_IN_MEMCPY_CHK:
12758 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12759 break;
12760 case BUILT_IN_MEMPCPY_CHK:
12761 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12762 break;
12763 case BUILT_IN_MEMMOVE_CHK:
12764 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12765 break;
12766 case BUILT_IN_MEMSET_CHK:
12767 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12768 break;
12769 default:
12770 break;
12771 }
12772
12773 if (!fn)
12774 return NULL_TREE;
12775
12776 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12777 }
12778
12779 /* Fold a call to the __st[rp]cpy_chk builtin.
12780 DEST, SRC, and SIZE are the arguments to the call.
12781 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12782 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12783 strings passed as second argument. */
12784
12785 tree
12786 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12787 tree src, tree size,
12788 tree maxlen, bool ignore,
12789 enum built_in_function fcode)
12790 {
12791 tree len, fn;
12792
12793 if (!validate_arg (dest, POINTER_TYPE)
12794 || !validate_arg (src, POINTER_TYPE)
12795 || !validate_arg (size, INTEGER_TYPE))
12796 return NULL_TREE;
12797
12798 /* If SRC and DEST are the same (and not volatile), return DEST. */
12799 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12800 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12801
12802 if (! host_integerp (size, 1))
12803 return NULL_TREE;
12804
12805 if (! integer_all_onesp (size))
12806 {
12807 len = c_strlen (src, 1);
12808 if (! len || ! host_integerp (len, 1))
12809 {
12810 /* If LEN is not constant, try MAXLEN too.
12811 For MAXLEN only allow optimizing into non-_ocs function
12812 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12813 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12814 {
12815 if (fcode == BUILT_IN_STPCPY_CHK)
12816 {
12817 if (! ignore)
12818 return NULL_TREE;
12819
12820 /* If return value of __stpcpy_chk is ignored,
12821 optimize into __strcpy_chk. */
12822 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
12823 if (!fn)
12824 return NULL_TREE;
12825
12826 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12827 }
12828
12829 if (! len || TREE_SIDE_EFFECTS (len))
12830 return NULL_TREE;
12831
12832 /* If c_strlen returned something, but not a constant,
12833 transform __strcpy_chk into __memcpy_chk. */
12834 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12835 if (!fn)
12836 return NULL_TREE;
12837
12838 len = fold_convert_loc (loc, size_type_node, len);
12839 len = size_binop_loc (loc, PLUS_EXPR, len,
12840 build_int_cst (size_type_node, 1));
12841 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12842 build_call_expr_loc (loc, fn, 4,
12843 dest, src, len, size));
12844 }
12845 }
12846 else
12847 maxlen = len;
12848
12849 if (! tree_int_cst_lt (maxlen, size))
12850 return NULL_TREE;
12851 }
12852
12853 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12854 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
12855 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
12856 if (!fn)
12857 return NULL_TREE;
12858
12859 return build_call_expr_loc (loc, fn, 2, dest, src);
12860 }
12861
12862 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
12863 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12864 length passed as third argument. IGNORE is true if return value can be
12865 ignored. FCODE is the BUILT_IN_* code of the builtin. */
12866
12867 tree
12868 fold_builtin_stxncpy_chk (location_t loc, tree dest, tree src,
12869 tree len, tree size, tree maxlen, bool ignore,
12870 enum built_in_function fcode)
12871 {
12872 tree fn;
12873
12874 if (!validate_arg (dest, POINTER_TYPE)
12875 || !validate_arg (src, POINTER_TYPE)
12876 || !validate_arg (len, INTEGER_TYPE)
12877 || !validate_arg (size, INTEGER_TYPE))
12878 return NULL_TREE;
12879
12880 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
12881 {
12882 /* If return value of __stpncpy_chk is ignored,
12883 optimize into __strncpy_chk. */
12884 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
12885 if (fn)
12886 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12887 }
12888
12889 if (! host_integerp (size, 1))
12890 return NULL_TREE;
12891
12892 if (! integer_all_onesp (size))
12893 {
12894 if (! host_integerp (len, 1))
12895 {
12896 /* If LEN is not constant, try MAXLEN too.
12897 For MAXLEN only allow optimizing into non-_ocs function
12898 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12899 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12900 return NULL_TREE;
12901 }
12902 else
12903 maxlen = len;
12904
12905 if (tree_int_cst_lt (size, maxlen))
12906 return NULL_TREE;
12907 }
12908
12909 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
12910 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
12911 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
12912 if (!fn)
12913 return NULL_TREE;
12914
12915 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12916 }
12917
12918 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12919 are the arguments to the call. */
12920
12921 static tree
12922 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12923 tree src, tree size)
12924 {
12925 tree fn;
12926 const char *p;
12927
12928 if (!validate_arg (dest, POINTER_TYPE)
12929 || !validate_arg (src, POINTER_TYPE)
12930 || !validate_arg (size, INTEGER_TYPE))
12931 return NULL_TREE;
12932
12933 p = c_getstr (src);
12934 /* If the SRC parameter is "", return DEST. */
12935 if (p && *p == '\0')
12936 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12937
12938 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12939 return NULL_TREE;
12940
12941 /* If __builtin_strcat_chk is used, assume strcat is available. */
12942 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
12943 if (!fn)
12944 return NULL_TREE;
12945
12946 return build_call_expr_loc (loc, fn, 2, dest, src);
12947 }
12948
12949 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12950 LEN, and SIZE. */
12951
12952 static tree
12953 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12954 tree dest, tree src, tree len, tree size)
12955 {
12956 tree fn;
12957 const char *p;
12958
12959 if (!validate_arg (dest, POINTER_TYPE)
12960 || !validate_arg (src, POINTER_TYPE)
12961 || !validate_arg (size, INTEGER_TYPE)
12962 || !validate_arg (size, INTEGER_TYPE))
12963 return NULL_TREE;
12964
12965 p = c_getstr (src);
12966 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12967 if (p && *p == '\0')
12968 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12969 else if (integer_zerop (len))
12970 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12971
12972 if (! host_integerp (size, 1))
12973 return NULL_TREE;
12974
12975 if (! integer_all_onesp (size))
12976 {
12977 tree src_len = c_strlen (src, 1);
12978 if (src_len
12979 && host_integerp (src_len, 1)
12980 && host_integerp (len, 1)
12981 && ! tree_int_cst_lt (len, src_len))
12982 {
12983 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12984 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
12985 if (!fn)
12986 return NULL_TREE;
12987
12988 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12989 }
12990 return NULL_TREE;
12991 }
12992
12993 /* If __builtin_strncat_chk is used, assume strncat is available. */
12994 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
12995 if (!fn)
12996 return NULL_TREE;
12997
12998 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12999 }
13000
13001 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
13002 Return NULL_TREE if a normal call should be emitted rather than
13003 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
13004 or BUILT_IN_VSPRINTF_CHK. */
13005
13006 static tree
13007 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
13008 enum built_in_function fcode)
13009 {
13010 tree dest, size, len, fn, fmt, flag;
13011 const char *fmt_str;
13012
13013 /* Verify the required arguments in the original call. */
13014 if (nargs < 4)
13015 return NULL_TREE;
13016 dest = args[0];
13017 if (!validate_arg (dest, POINTER_TYPE))
13018 return NULL_TREE;
13019 flag = args[1];
13020 if (!validate_arg (flag, INTEGER_TYPE))
13021 return NULL_TREE;
13022 size = args[2];
13023 if (!validate_arg (size, INTEGER_TYPE))
13024 return NULL_TREE;
13025 fmt = args[3];
13026 if (!validate_arg (fmt, POINTER_TYPE))
13027 return NULL_TREE;
13028
13029 if (! host_integerp (size, 1))
13030 return NULL_TREE;
13031
13032 len = NULL_TREE;
13033
13034 if (!init_target_chars ())
13035 return NULL_TREE;
13036
13037 /* Check whether the format is a literal string constant. */
13038 fmt_str = c_getstr (fmt);
13039 if (fmt_str != NULL)
13040 {
13041 /* If the format doesn't contain % args or %%, we know the size. */
13042 if (strchr (fmt_str, target_percent) == 0)
13043 {
13044 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13045 len = build_int_cstu (size_type_node, strlen (fmt_str));
13046 }
13047 /* If the format is "%s" and first ... argument is a string literal,
13048 we know the size too. */
13049 else if (fcode == BUILT_IN_SPRINTF_CHK
13050 && strcmp (fmt_str, target_percent_s) == 0)
13051 {
13052 tree arg;
13053
13054 if (nargs == 5)
13055 {
13056 arg = args[4];
13057 if (validate_arg (arg, POINTER_TYPE))
13058 {
13059 len = c_strlen (arg, 1);
13060 if (! len || ! host_integerp (len, 1))
13061 len = NULL_TREE;
13062 }
13063 }
13064 }
13065 }
13066
13067 if (! integer_all_onesp (size))
13068 {
13069 if (! len || ! tree_int_cst_lt (len, size))
13070 return NULL_TREE;
13071 }
13072
13073 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13074 or if format doesn't contain % chars or is "%s". */
13075 if (! integer_zerop (flag))
13076 {
13077 if (fmt_str == NULL)
13078 return NULL_TREE;
13079 if (strchr (fmt_str, target_percent) != NULL
13080 && strcmp (fmt_str, target_percent_s))
13081 return NULL_TREE;
13082 }
13083
13084 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13085 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
13086 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
13087 if (!fn)
13088 return NULL_TREE;
13089
13090 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
13091 }
13092
13093 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13094 a normal call should be emitted rather than expanding the function
13095 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13096
13097 static tree
13098 fold_builtin_sprintf_chk (location_t loc, tree exp,
13099 enum built_in_function fcode)
13100 {
13101 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
13102 CALL_EXPR_ARGP (exp), fcode);
13103 }
13104
13105 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13106 NULL_TREE if a normal call should be emitted rather than expanding
13107 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13108 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13109 passed as second argument. */
13110
13111 static tree
13112 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
13113 tree maxlen, enum built_in_function fcode)
13114 {
13115 tree dest, size, len, fn, fmt, flag;
13116 const char *fmt_str;
13117
13118 /* Verify the required arguments in the original call. */
13119 if (nargs < 5)
13120 return NULL_TREE;
13121 dest = args[0];
13122 if (!validate_arg (dest, POINTER_TYPE))
13123 return NULL_TREE;
13124 len = args[1];
13125 if (!validate_arg (len, INTEGER_TYPE))
13126 return NULL_TREE;
13127 flag = args[2];
13128 if (!validate_arg (flag, INTEGER_TYPE))
13129 return NULL_TREE;
13130 size = args[3];
13131 if (!validate_arg (size, INTEGER_TYPE))
13132 return NULL_TREE;
13133 fmt = args[4];
13134 if (!validate_arg (fmt, POINTER_TYPE))
13135 return NULL_TREE;
13136
13137 if (! host_integerp (size, 1))
13138 return NULL_TREE;
13139
13140 if (! integer_all_onesp (size))
13141 {
13142 if (! host_integerp (len, 1))
13143 {
13144 /* If LEN is not constant, try MAXLEN too.
13145 For MAXLEN only allow optimizing into non-_ocs function
13146 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13147 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13148 return NULL_TREE;
13149 }
13150 else
13151 maxlen = len;
13152
13153 if (tree_int_cst_lt (size, maxlen))
13154 return NULL_TREE;
13155 }
13156
13157 if (!init_target_chars ())
13158 return NULL_TREE;
13159
13160 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13161 or if format doesn't contain % chars or is "%s". */
13162 if (! integer_zerop (flag))
13163 {
13164 fmt_str = c_getstr (fmt);
13165 if (fmt_str == NULL)
13166 return NULL_TREE;
13167 if (strchr (fmt_str, target_percent) != NULL
13168 && strcmp (fmt_str, target_percent_s))
13169 return NULL_TREE;
13170 }
13171
13172 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13173 available. */
13174 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
13175 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
13176 if (!fn)
13177 return NULL_TREE;
13178
13179 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
13180 }
13181
13182 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13183 a normal call should be emitted rather than expanding the function
13184 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13185 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13186 passed as second argument. */
13187
13188 tree
13189 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
13190 enum built_in_function fcode)
13191 {
13192 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
13193 CALL_EXPR_ARGP (exp), maxlen, fcode);
13194 }
13195
13196 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13197 FMT and ARG are the arguments to the call; we don't fold cases with
13198 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13199
13200 Return NULL_TREE if no simplification was possible, otherwise return the
13201 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13202 code of the function to be simplified. */
13203
13204 static tree
13205 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13206 tree arg, bool ignore,
13207 enum built_in_function fcode)
13208 {
13209 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13210 const char *fmt_str = NULL;
13211
13212 /* If the return value is used, don't do the transformation. */
13213 if (! ignore)
13214 return NULL_TREE;
13215
13216 /* Verify the required arguments in the original call. */
13217 if (!validate_arg (fmt, POINTER_TYPE))
13218 return NULL_TREE;
13219
13220 /* Check whether the format is a literal string constant. */
13221 fmt_str = c_getstr (fmt);
13222 if (fmt_str == NULL)
13223 return NULL_TREE;
13224
13225 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13226 {
13227 /* If we're using an unlocked function, assume the other
13228 unlocked functions exist explicitly. */
13229 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
13230 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
13231 }
13232 else
13233 {
13234 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
13235 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
13236 }
13237
13238 if (!init_target_chars ())
13239 return NULL_TREE;
13240
13241 if (strcmp (fmt_str, target_percent_s) == 0
13242 || strchr (fmt_str, target_percent) == NULL)
13243 {
13244 const char *str;
13245
13246 if (strcmp (fmt_str, target_percent_s) == 0)
13247 {
13248 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13249 return NULL_TREE;
13250
13251 if (!arg || !validate_arg (arg, POINTER_TYPE))
13252 return NULL_TREE;
13253
13254 str = c_getstr (arg);
13255 if (str == NULL)
13256 return NULL_TREE;
13257 }
13258 else
13259 {
13260 /* The format specifier doesn't contain any '%' characters. */
13261 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13262 && arg)
13263 return NULL_TREE;
13264 str = fmt_str;
13265 }
13266
13267 /* If the string was "", printf does nothing. */
13268 if (str[0] == '\0')
13269 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13270
13271 /* If the string has length of 1, call putchar. */
13272 if (str[1] == '\0')
13273 {
13274 /* Given printf("c"), (where c is any one character,)
13275 convert "c"[0] to an int and pass that to the replacement
13276 function. */
13277 newarg = build_int_cst (integer_type_node, str[0]);
13278 if (fn_putchar)
13279 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13280 }
13281 else
13282 {
13283 /* If the string was "string\n", call puts("string"). */
13284 size_t len = strlen (str);
13285 if ((unsigned char)str[len - 1] == target_newline
13286 && (size_t) (int) len == len
13287 && (int) len > 0)
13288 {
13289 char *newstr;
13290 tree offset_node, string_cst;
13291
13292 /* Create a NUL-terminated string that's one char shorter
13293 than the original, stripping off the trailing '\n'. */
13294 newarg = build_string_literal (len, str);
13295 string_cst = string_constant (newarg, &offset_node);
13296 gcc_checking_assert (string_cst
13297 && (TREE_STRING_LENGTH (string_cst)
13298 == (int) len)
13299 && integer_zerop (offset_node)
13300 && (unsigned char)
13301 TREE_STRING_POINTER (string_cst)[len - 1]
13302 == target_newline);
13303 /* build_string_literal creates a new STRING_CST,
13304 modify it in place to avoid double copying. */
13305 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
13306 newstr[len - 1] = '\0';
13307 if (fn_puts)
13308 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13309 }
13310 else
13311 /* We'd like to arrange to call fputs(string,stdout) here,
13312 but we need stdout and don't have a way to get it yet. */
13313 return NULL_TREE;
13314 }
13315 }
13316
13317 /* The other optimizations can be done only on the non-va_list variants. */
13318 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13319 return NULL_TREE;
13320
13321 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13322 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13323 {
13324 if (!arg || !validate_arg (arg, POINTER_TYPE))
13325 return NULL_TREE;
13326 if (fn_puts)
13327 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13328 }
13329
13330 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13331 else if (strcmp (fmt_str, target_percent_c) == 0)
13332 {
13333 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13334 return NULL_TREE;
13335 if (fn_putchar)
13336 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13337 }
13338
13339 if (!call)
13340 return NULL_TREE;
13341
13342 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13343 }
13344
13345 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13346 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13347 more than 3 arguments, and ARG may be null in the 2-argument case.
13348
13349 Return NULL_TREE if no simplification was possible, otherwise return the
13350 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13351 code of the function to be simplified. */
13352
13353 static tree
13354 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13355 tree fmt, tree arg, bool ignore,
13356 enum built_in_function fcode)
13357 {
13358 tree fn_fputc, fn_fputs, call = NULL_TREE;
13359 const char *fmt_str = NULL;
13360
13361 /* If the return value is used, don't do the transformation. */
13362 if (! ignore)
13363 return NULL_TREE;
13364
13365 /* Verify the required arguments in the original call. */
13366 if (!validate_arg (fp, POINTER_TYPE))
13367 return NULL_TREE;
13368 if (!validate_arg (fmt, POINTER_TYPE))
13369 return NULL_TREE;
13370
13371 /* Check whether the format is a literal string constant. */
13372 fmt_str = c_getstr (fmt);
13373 if (fmt_str == NULL)
13374 return NULL_TREE;
13375
13376 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13377 {
13378 /* If we're using an unlocked function, assume the other
13379 unlocked functions exist explicitly. */
13380 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
13381 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
13382 }
13383 else
13384 {
13385 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
13386 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
13387 }
13388
13389 if (!init_target_chars ())
13390 return NULL_TREE;
13391
13392 /* If the format doesn't contain % args or %%, use strcpy. */
13393 if (strchr (fmt_str, target_percent) == NULL)
13394 {
13395 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13396 && arg)
13397 return NULL_TREE;
13398
13399 /* If the format specifier was "", fprintf does nothing. */
13400 if (fmt_str[0] == '\0')
13401 {
13402 /* If FP has side-effects, just wait until gimplification is
13403 done. */
13404 if (TREE_SIDE_EFFECTS (fp))
13405 return NULL_TREE;
13406
13407 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13408 }
13409
13410 /* When "string" doesn't contain %, replace all cases of
13411 fprintf (fp, string) with fputs (string, fp). The fputs
13412 builtin will take care of special cases like length == 1. */
13413 if (fn_fputs)
13414 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13415 }
13416
13417 /* The other optimizations can be done only on the non-va_list variants. */
13418 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13419 return NULL_TREE;
13420
13421 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13422 else if (strcmp (fmt_str, target_percent_s) == 0)
13423 {
13424 if (!arg || !validate_arg (arg, POINTER_TYPE))
13425 return NULL_TREE;
13426 if (fn_fputs)
13427 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13428 }
13429
13430 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13431 else if (strcmp (fmt_str, target_percent_c) == 0)
13432 {
13433 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13434 return NULL_TREE;
13435 if (fn_fputc)
13436 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13437 }
13438
13439 if (!call)
13440 return NULL_TREE;
13441 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13442 }
13443
13444 /* Initialize format string characters in the target charset. */
13445
13446 static bool
13447 init_target_chars (void)
13448 {
13449 static bool init;
13450 if (!init)
13451 {
13452 target_newline = lang_hooks.to_target_charset ('\n');
13453 target_percent = lang_hooks.to_target_charset ('%');
13454 target_c = lang_hooks.to_target_charset ('c');
13455 target_s = lang_hooks.to_target_charset ('s');
13456 if (target_newline == 0 || target_percent == 0 || target_c == 0
13457 || target_s == 0)
13458 return false;
13459
13460 target_percent_c[0] = target_percent;
13461 target_percent_c[1] = target_c;
13462 target_percent_c[2] = '\0';
13463
13464 target_percent_s[0] = target_percent;
13465 target_percent_s[1] = target_s;
13466 target_percent_s[2] = '\0';
13467
13468 target_percent_s_newline[0] = target_percent;
13469 target_percent_s_newline[1] = target_s;
13470 target_percent_s_newline[2] = target_newline;
13471 target_percent_s_newline[3] = '\0';
13472
13473 init = true;
13474 }
13475 return true;
13476 }
13477
13478 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13479 and no overflow/underflow occurred. INEXACT is true if M was not
13480 exactly calculated. TYPE is the tree type for the result. This
13481 function assumes that you cleared the MPFR flags and then
13482 calculated M to see if anything subsequently set a flag prior to
13483 entering this function. Return NULL_TREE if any checks fail. */
13484
13485 static tree
13486 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13487 {
13488 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13489 overflow/underflow occurred. If -frounding-math, proceed iff the
13490 result of calling FUNC was exact. */
13491 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13492 && (!flag_rounding_math || !inexact))
13493 {
13494 REAL_VALUE_TYPE rr;
13495
13496 real_from_mpfr (&rr, m, type, GMP_RNDN);
13497 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13498 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13499 but the mpft_t is not, then we underflowed in the
13500 conversion. */
13501 if (real_isfinite (&rr)
13502 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13503 {
13504 REAL_VALUE_TYPE rmode;
13505
13506 real_convert (&rmode, TYPE_MODE (type), &rr);
13507 /* Proceed iff the specified mode can hold the value. */
13508 if (real_identical (&rmode, &rr))
13509 return build_real (type, rmode);
13510 }
13511 }
13512 return NULL_TREE;
13513 }
13514
13515 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13516 number and no overflow/underflow occurred. INEXACT is true if M
13517 was not exactly calculated. TYPE is the tree type for the result.
13518 This function assumes that you cleared the MPFR flags and then
13519 calculated M to see if anything subsequently set a flag prior to
13520 entering this function. Return NULL_TREE if any checks fail, if
13521 FORCE_CONVERT is true, then bypass the checks. */
13522
13523 static tree
13524 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13525 {
13526 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13527 overflow/underflow occurred. If -frounding-math, proceed iff the
13528 result of calling FUNC was exact. */
13529 if (force_convert
13530 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13531 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13532 && (!flag_rounding_math || !inexact)))
13533 {
13534 REAL_VALUE_TYPE re, im;
13535
13536 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13537 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13538 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13539 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13540 but the mpft_t is not, then we underflowed in the
13541 conversion. */
13542 if (force_convert
13543 || (real_isfinite (&re) && real_isfinite (&im)
13544 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13545 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13546 {
13547 REAL_VALUE_TYPE re_mode, im_mode;
13548
13549 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13550 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13551 /* Proceed iff the specified mode can hold the value. */
13552 if (force_convert
13553 || (real_identical (&re_mode, &re)
13554 && real_identical (&im_mode, &im)))
13555 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13556 build_real (TREE_TYPE (type), im_mode));
13557 }
13558 }
13559 return NULL_TREE;
13560 }
13561
13562 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13563 FUNC on it and return the resulting value as a tree with type TYPE.
13564 If MIN and/or MAX are not NULL, then the supplied ARG must be
13565 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13566 acceptable values, otherwise they are not. The mpfr precision is
13567 set to the precision of TYPE. We assume that function FUNC returns
13568 zero if the result could be calculated exactly within the requested
13569 precision. */
13570
13571 static tree
13572 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13573 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13574 bool inclusive)
13575 {
13576 tree result = NULL_TREE;
13577
13578 STRIP_NOPS (arg);
13579
13580 /* To proceed, MPFR must exactly represent the target floating point
13581 format, which only happens when the target base equals two. */
13582 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13583 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13584 {
13585 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13586
13587 if (real_isfinite (ra)
13588 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13589 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13590 {
13591 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13592 const int prec = fmt->p;
13593 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13594 int inexact;
13595 mpfr_t m;
13596
13597 mpfr_init2 (m, prec);
13598 mpfr_from_real (m, ra, GMP_RNDN);
13599 mpfr_clear_flags ();
13600 inexact = func (m, m, rnd);
13601 result = do_mpfr_ckconv (m, type, inexact);
13602 mpfr_clear (m);
13603 }
13604 }
13605
13606 return result;
13607 }
13608
13609 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13610 FUNC on it and return the resulting value as a tree with type TYPE.
13611 The mpfr precision is set to the precision of TYPE. We assume that
13612 function FUNC returns zero if the result could be calculated
13613 exactly within the requested precision. */
13614
13615 static tree
13616 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13617 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13618 {
13619 tree result = NULL_TREE;
13620
13621 STRIP_NOPS (arg1);
13622 STRIP_NOPS (arg2);
13623
13624 /* To proceed, MPFR must exactly represent the target floating point
13625 format, which only happens when the target base equals two. */
13626 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13627 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13628 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13629 {
13630 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13631 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13632
13633 if (real_isfinite (ra1) && real_isfinite (ra2))
13634 {
13635 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13636 const int prec = fmt->p;
13637 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13638 int inexact;
13639 mpfr_t m1, m2;
13640
13641 mpfr_inits2 (prec, m1, m2, NULL);
13642 mpfr_from_real (m1, ra1, GMP_RNDN);
13643 mpfr_from_real (m2, ra2, GMP_RNDN);
13644 mpfr_clear_flags ();
13645 inexact = func (m1, m1, m2, rnd);
13646 result = do_mpfr_ckconv (m1, type, inexact);
13647 mpfr_clears (m1, m2, NULL);
13648 }
13649 }
13650
13651 return result;
13652 }
13653
13654 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13655 FUNC on it and return the resulting value as a tree with type TYPE.
13656 The mpfr precision is set to the precision of TYPE. We assume that
13657 function FUNC returns zero if the result could be calculated
13658 exactly within the requested precision. */
13659
13660 static tree
13661 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13662 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13663 {
13664 tree result = NULL_TREE;
13665
13666 STRIP_NOPS (arg1);
13667 STRIP_NOPS (arg2);
13668 STRIP_NOPS (arg3);
13669
13670 /* To proceed, MPFR must exactly represent the target floating point
13671 format, which only happens when the target base equals two. */
13672 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13673 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13674 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13675 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13676 {
13677 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13678 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13679 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13680
13681 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13682 {
13683 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13684 const int prec = fmt->p;
13685 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13686 int inexact;
13687 mpfr_t m1, m2, m3;
13688
13689 mpfr_inits2 (prec, m1, m2, m3, NULL);
13690 mpfr_from_real (m1, ra1, GMP_RNDN);
13691 mpfr_from_real (m2, ra2, GMP_RNDN);
13692 mpfr_from_real (m3, ra3, GMP_RNDN);
13693 mpfr_clear_flags ();
13694 inexact = func (m1, m1, m2, m3, rnd);
13695 result = do_mpfr_ckconv (m1, type, inexact);
13696 mpfr_clears (m1, m2, m3, NULL);
13697 }
13698 }
13699
13700 return result;
13701 }
13702
13703 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13704 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13705 If ARG_SINP and ARG_COSP are NULL then the result is returned
13706 as a complex value.
13707 The type is taken from the type of ARG and is used for setting the
13708 precision of the calculation and results. */
13709
13710 static tree
13711 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13712 {
13713 tree const type = TREE_TYPE (arg);
13714 tree result = NULL_TREE;
13715
13716 STRIP_NOPS (arg);
13717
13718 /* To proceed, MPFR must exactly represent the target floating point
13719 format, which only happens when the target base equals two. */
13720 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13721 && TREE_CODE (arg) == REAL_CST
13722 && !TREE_OVERFLOW (arg))
13723 {
13724 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13725
13726 if (real_isfinite (ra))
13727 {
13728 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13729 const int prec = fmt->p;
13730 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13731 tree result_s, result_c;
13732 int inexact;
13733 mpfr_t m, ms, mc;
13734
13735 mpfr_inits2 (prec, m, ms, mc, NULL);
13736 mpfr_from_real (m, ra, GMP_RNDN);
13737 mpfr_clear_flags ();
13738 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13739 result_s = do_mpfr_ckconv (ms, type, inexact);
13740 result_c = do_mpfr_ckconv (mc, type, inexact);
13741 mpfr_clears (m, ms, mc, NULL);
13742 if (result_s && result_c)
13743 {
13744 /* If we are to return in a complex value do so. */
13745 if (!arg_sinp && !arg_cosp)
13746 return build_complex (build_complex_type (type),
13747 result_c, result_s);
13748
13749 /* Dereference the sin/cos pointer arguments. */
13750 arg_sinp = build_fold_indirect_ref (arg_sinp);
13751 arg_cosp = build_fold_indirect_ref (arg_cosp);
13752 /* Proceed if valid pointer type were passed in. */
13753 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13754 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13755 {
13756 /* Set the values. */
13757 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13758 result_s);
13759 TREE_SIDE_EFFECTS (result_s) = 1;
13760 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13761 result_c);
13762 TREE_SIDE_EFFECTS (result_c) = 1;
13763 /* Combine the assignments into a compound expr. */
13764 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13765 result_s, result_c));
13766 }
13767 }
13768 }
13769 }
13770 return result;
13771 }
13772
13773 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13774 two-argument mpfr order N Bessel function FUNC on them and return
13775 the resulting value as a tree with type TYPE. The mpfr precision
13776 is set to the precision of TYPE. We assume that function FUNC
13777 returns zero if the result could be calculated exactly within the
13778 requested precision. */
13779 static tree
13780 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13781 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13782 const REAL_VALUE_TYPE *min, bool inclusive)
13783 {
13784 tree result = NULL_TREE;
13785
13786 STRIP_NOPS (arg1);
13787 STRIP_NOPS (arg2);
13788
13789 /* To proceed, MPFR must exactly represent the target floating point
13790 format, which only happens when the target base equals two. */
13791 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13792 && host_integerp (arg1, 0)
13793 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13794 {
13795 const HOST_WIDE_INT n = tree_low_cst (arg1, 0);
13796 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13797
13798 if (n == (long)n
13799 && real_isfinite (ra)
13800 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13801 {
13802 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13803 const int prec = fmt->p;
13804 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13805 int inexact;
13806 mpfr_t m;
13807
13808 mpfr_init2 (m, prec);
13809 mpfr_from_real (m, ra, GMP_RNDN);
13810 mpfr_clear_flags ();
13811 inexact = func (m, n, m, rnd);
13812 result = do_mpfr_ckconv (m, type, inexact);
13813 mpfr_clear (m);
13814 }
13815 }
13816
13817 return result;
13818 }
13819
13820 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13821 the pointer *(ARG_QUO) and return the result. The type is taken
13822 from the type of ARG0 and is used for setting the precision of the
13823 calculation and results. */
13824
13825 static tree
13826 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13827 {
13828 tree const type = TREE_TYPE (arg0);
13829 tree result = NULL_TREE;
13830
13831 STRIP_NOPS (arg0);
13832 STRIP_NOPS (arg1);
13833
13834 /* To proceed, MPFR must exactly represent the target floating point
13835 format, which only happens when the target base equals two. */
13836 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13837 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13838 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13839 {
13840 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13841 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13842
13843 if (real_isfinite (ra0) && real_isfinite (ra1))
13844 {
13845 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13846 const int prec = fmt->p;
13847 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13848 tree result_rem;
13849 long integer_quo;
13850 mpfr_t m0, m1;
13851
13852 mpfr_inits2 (prec, m0, m1, NULL);
13853 mpfr_from_real (m0, ra0, GMP_RNDN);
13854 mpfr_from_real (m1, ra1, GMP_RNDN);
13855 mpfr_clear_flags ();
13856 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13857 /* Remquo is independent of the rounding mode, so pass
13858 inexact=0 to do_mpfr_ckconv(). */
13859 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13860 mpfr_clears (m0, m1, NULL);
13861 if (result_rem)
13862 {
13863 /* MPFR calculates quo in the host's long so it may
13864 return more bits in quo than the target int can hold
13865 if sizeof(host long) > sizeof(target int). This can
13866 happen even for native compilers in LP64 mode. In
13867 these cases, modulo the quo value with the largest
13868 number that the target int can hold while leaving one
13869 bit for the sign. */
13870 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13871 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13872
13873 /* Dereference the quo pointer argument. */
13874 arg_quo = build_fold_indirect_ref (arg_quo);
13875 /* Proceed iff a valid pointer type was passed in. */
13876 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13877 {
13878 /* Set the value. */
13879 tree result_quo
13880 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13881 build_int_cst (TREE_TYPE (arg_quo),
13882 integer_quo));
13883 TREE_SIDE_EFFECTS (result_quo) = 1;
13884 /* Combine the quo assignment with the rem. */
13885 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13886 result_quo, result_rem));
13887 }
13888 }
13889 }
13890 }
13891 return result;
13892 }
13893
13894 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13895 resulting value as a tree with type TYPE. The mpfr precision is
13896 set to the precision of TYPE. We assume that this mpfr function
13897 returns zero if the result could be calculated exactly within the
13898 requested precision. In addition, the integer pointer represented
13899 by ARG_SG will be dereferenced and set to the appropriate signgam
13900 (-1,1) value. */
13901
13902 static tree
13903 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13904 {
13905 tree result = NULL_TREE;
13906
13907 STRIP_NOPS (arg);
13908
13909 /* To proceed, MPFR must exactly represent the target floating point
13910 format, which only happens when the target base equals two. Also
13911 verify ARG is a constant and that ARG_SG is an int pointer. */
13912 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13913 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13914 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13915 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13916 {
13917 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13918
13919 /* In addition to NaN and Inf, the argument cannot be zero or a
13920 negative integer. */
13921 if (real_isfinite (ra)
13922 && ra->cl != rvc_zero
13923 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
13924 {
13925 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13926 const int prec = fmt->p;
13927 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13928 int inexact, sg;
13929 mpfr_t m;
13930 tree result_lg;
13931
13932 mpfr_init2 (m, prec);
13933 mpfr_from_real (m, ra, GMP_RNDN);
13934 mpfr_clear_flags ();
13935 inexact = mpfr_lgamma (m, &sg, m, rnd);
13936 result_lg = do_mpfr_ckconv (m, type, inexact);
13937 mpfr_clear (m);
13938 if (result_lg)
13939 {
13940 tree result_sg;
13941
13942 /* Dereference the arg_sg pointer argument. */
13943 arg_sg = build_fold_indirect_ref (arg_sg);
13944 /* Assign the signgam value into *arg_sg. */
13945 result_sg = fold_build2 (MODIFY_EXPR,
13946 TREE_TYPE (arg_sg), arg_sg,
13947 build_int_cst (TREE_TYPE (arg_sg), sg));
13948 TREE_SIDE_EFFECTS (result_sg) = 1;
13949 /* Combine the signgam assignment with the lgamma result. */
13950 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13951 result_sg, result_lg));
13952 }
13953 }
13954 }
13955
13956 return result;
13957 }
13958
13959 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13960 function FUNC on it and return the resulting value as a tree with
13961 type TYPE. The mpfr precision is set to the precision of TYPE. We
13962 assume that function FUNC returns zero if the result could be
13963 calculated exactly within the requested precision. */
13964
13965 static tree
13966 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13967 {
13968 tree result = NULL_TREE;
13969
13970 STRIP_NOPS (arg);
13971
13972 /* To proceed, MPFR must exactly represent the target floating point
13973 format, which only happens when the target base equals two. */
13974 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13975 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13976 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13977 {
13978 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13979 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13980
13981 if (real_isfinite (re) && real_isfinite (im))
13982 {
13983 const struct real_format *const fmt =
13984 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13985 const int prec = fmt->p;
13986 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13987 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13988 int inexact;
13989 mpc_t m;
13990
13991 mpc_init2 (m, prec);
13992 mpfr_from_real (mpc_realref (m), re, rnd);
13993 mpfr_from_real (mpc_imagref (m), im, rnd);
13994 mpfr_clear_flags ();
13995 inexact = func (m, m, crnd);
13996 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13997 mpc_clear (m);
13998 }
13999 }
14000
14001 return result;
14002 }
14003
14004 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
14005 mpc function FUNC on it and return the resulting value as a tree
14006 with type TYPE. The mpfr precision is set to the precision of
14007 TYPE. We assume that function FUNC returns zero if the result
14008 could be calculated exactly within the requested precision. If
14009 DO_NONFINITE is true, then fold expressions containing Inf or NaN
14010 in the arguments and/or results. */
14011
14012 tree
14013 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
14014 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
14015 {
14016 tree result = NULL_TREE;
14017
14018 STRIP_NOPS (arg0);
14019 STRIP_NOPS (arg1);
14020
14021 /* To proceed, MPFR must exactly represent the target floating point
14022 format, which only happens when the target base equals two. */
14023 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
14024 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
14025 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
14026 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
14027 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
14028 {
14029 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
14030 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
14031 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
14032 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
14033
14034 if (do_nonfinite
14035 || (real_isfinite (re0) && real_isfinite (im0)
14036 && real_isfinite (re1) && real_isfinite (im1)))
14037 {
14038 const struct real_format *const fmt =
14039 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14040 const int prec = fmt->p;
14041 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14042 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14043 int inexact;
14044 mpc_t m0, m1;
14045
14046 mpc_init2 (m0, prec);
14047 mpc_init2 (m1, prec);
14048 mpfr_from_real (mpc_realref (m0), re0, rnd);
14049 mpfr_from_real (mpc_imagref (m0), im0, rnd);
14050 mpfr_from_real (mpc_realref (m1), re1, rnd);
14051 mpfr_from_real (mpc_imagref (m1), im1, rnd);
14052 mpfr_clear_flags ();
14053 inexact = func (m0, m0, m1, crnd);
14054 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
14055 mpc_clear (m0);
14056 mpc_clear (m1);
14057 }
14058 }
14059
14060 return result;
14061 }
14062
14063 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14064 a normal call should be emitted rather than expanding the function
14065 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14066
14067 static tree
14068 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
14069 {
14070 int nargs = gimple_call_num_args (stmt);
14071
14072 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
14073 (nargs > 0
14074 ? gimple_call_arg_ptr (stmt, 0)
14075 : &error_mark_node), fcode);
14076 }
14077
14078 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14079 a normal call should be emitted rather than expanding the function
14080 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14081 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14082 passed as second argument. */
14083
14084 tree
14085 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14086 enum built_in_function fcode)
14087 {
14088 int nargs = gimple_call_num_args (stmt);
14089
14090 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
14091 (nargs > 0
14092 ? gimple_call_arg_ptr (stmt, 0)
14093 : &error_mark_node), maxlen, fcode);
14094 }
14095
14096 /* Builtins with folding operations that operate on "..." arguments
14097 need special handling; we need to store the arguments in a convenient
14098 data structure before attempting any folding. Fortunately there are
14099 only a few builtins that fall into this category. FNDECL is the
14100 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14101 result of the function call is ignored. */
14102
14103 static tree
14104 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14105 bool ignore ATTRIBUTE_UNUSED)
14106 {
14107 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14108 tree ret = NULL_TREE;
14109
14110 switch (fcode)
14111 {
14112 case BUILT_IN_SPRINTF_CHK:
14113 case BUILT_IN_VSPRINTF_CHK:
14114 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14115 break;
14116
14117 case BUILT_IN_SNPRINTF_CHK:
14118 case BUILT_IN_VSNPRINTF_CHK:
14119 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14120
14121 default:
14122 break;
14123 }
14124 if (ret)
14125 {
14126 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14127 TREE_NO_WARNING (ret) = 1;
14128 return ret;
14129 }
14130 return NULL_TREE;
14131 }
14132
14133 /* A wrapper function for builtin folding that prevents warnings for
14134 "statement without effect" and the like, caused by removing the
14135 call node earlier than the warning is generated. */
14136
14137 tree
14138 fold_call_stmt (gimple stmt, bool ignore)
14139 {
14140 tree ret = NULL_TREE;
14141 tree fndecl = gimple_call_fndecl (stmt);
14142 location_t loc = gimple_location (stmt);
14143 if (fndecl
14144 && TREE_CODE (fndecl) == FUNCTION_DECL
14145 && DECL_BUILT_IN (fndecl)
14146 && !gimple_call_va_arg_pack_p (stmt))
14147 {
14148 int nargs = gimple_call_num_args (stmt);
14149 tree *args = (nargs > 0
14150 ? gimple_call_arg_ptr (stmt, 0)
14151 : &error_mark_node);
14152
14153 if (avoid_folding_inline_builtin (fndecl))
14154 return NULL_TREE;
14155 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14156 {
14157 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14158 }
14159 else
14160 {
14161 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14162 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14163 if (!ret)
14164 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14165 if (ret)
14166 {
14167 /* Propagate location information from original call to
14168 expansion of builtin. Otherwise things like
14169 maybe_emit_chk_warning, that operate on the expansion
14170 of a builtin, will use the wrong location information. */
14171 if (gimple_has_location (stmt))
14172 {
14173 tree realret = ret;
14174 if (TREE_CODE (ret) == NOP_EXPR)
14175 realret = TREE_OPERAND (ret, 0);
14176 if (CAN_HAVE_LOCATION_P (realret)
14177 && !EXPR_HAS_LOCATION (realret))
14178 SET_EXPR_LOCATION (realret, loc);
14179 return realret;
14180 }
14181 return ret;
14182 }
14183 }
14184 }
14185 return NULL_TREE;
14186 }
14187
14188 /* Look up the function in builtin_decl that corresponds to DECL
14189 and set ASMSPEC as its user assembler name. DECL must be a
14190 function decl that declares a builtin. */
14191
14192 void
14193 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14194 {
14195 tree builtin;
14196 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
14197 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
14198 && asmspec != 0);
14199
14200 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14201 set_user_assembler_name (builtin, asmspec);
14202 switch (DECL_FUNCTION_CODE (decl))
14203 {
14204 case BUILT_IN_MEMCPY:
14205 init_block_move_fn (asmspec);
14206 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
14207 break;
14208 case BUILT_IN_MEMSET:
14209 init_block_clear_fn (asmspec);
14210 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
14211 break;
14212 case BUILT_IN_MEMMOVE:
14213 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
14214 break;
14215 case BUILT_IN_MEMCMP:
14216 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
14217 break;
14218 case BUILT_IN_ABORT:
14219 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
14220 break;
14221 case BUILT_IN_FFS:
14222 if (INT_TYPE_SIZE < BITS_PER_WORD)
14223 {
14224 set_user_assembler_libfunc ("ffs", asmspec);
14225 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
14226 MODE_INT, 0), "ffs");
14227 }
14228 break;
14229 default:
14230 break;
14231 }
14232 }
14233
14234 /* Return true if DECL is a builtin that expands to a constant or similarly
14235 simple code. */
14236 bool
14237 is_simple_builtin (tree decl)
14238 {
14239 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14240 switch (DECL_FUNCTION_CODE (decl))
14241 {
14242 /* Builtins that expand to constants. */
14243 case BUILT_IN_CONSTANT_P:
14244 case BUILT_IN_EXPECT:
14245 case BUILT_IN_OBJECT_SIZE:
14246 case BUILT_IN_UNREACHABLE:
14247 /* Simple register moves or loads from stack. */
14248 case BUILT_IN_ASSUME_ALIGNED:
14249 case BUILT_IN_RETURN_ADDRESS:
14250 case BUILT_IN_EXTRACT_RETURN_ADDR:
14251 case BUILT_IN_FROB_RETURN_ADDR:
14252 case BUILT_IN_RETURN:
14253 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14254 case BUILT_IN_FRAME_ADDRESS:
14255 case BUILT_IN_VA_END:
14256 case BUILT_IN_STACK_SAVE:
14257 case BUILT_IN_STACK_RESTORE:
14258 /* Exception state returns or moves registers around. */
14259 case BUILT_IN_EH_FILTER:
14260 case BUILT_IN_EH_POINTER:
14261 case BUILT_IN_EH_COPY_VALUES:
14262 return true;
14263
14264 default:
14265 return false;
14266 }
14267
14268 return false;
14269 }
14270
14271 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14272 most probably expanded inline into reasonably simple code. This is a
14273 superset of is_simple_builtin. */
14274 bool
14275 is_inexpensive_builtin (tree decl)
14276 {
14277 if (!decl)
14278 return false;
14279 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14280 return true;
14281 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14282 switch (DECL_FUNCTION_CODE (decl))
14283 {
14284 case BUILT_IN_ABS:
14285 case BUILT_IN_ALLOCA:
14286 case BUILT_IN_ALLOCA_WITH_ALIGN:
14287 case BUILT_IN_BSWAP16:
14288 case BUILT_IN_BSWAP32:
14289 case BUILT_IN_BSWAP64:
14290 case BUILT_IN_CLZ:
14291 case BUILT_IN_CLZIMAX:
14292 case BUILT_IN_CLZL:
14293 case BUILT_IN_CLZLL:
14294 case BUILT_IN_CTZ:
14295 case BUILT_IN_CTZIMAX:
14296 case BUILT_IN_CTZL:
14297 case BUILT_IN_CTZLL:
14298 case BUILT_IN_FFS:
14299 case BUILT_IN_FFSIMAX:
14300 case BUILT_IN_FFSL:
14301 case BUILT_IN_FFSLL:
14302 case BUILT_IN_IMAXABS:
14303 case BUILT_IN_FINITE:
14304 case BUILT_IN_FINITEF:
14305 case BUILT_IN_FINITEL:
14306 case BUILT_IN_FINITED32:
14307 case BUILT_IN_FINITED64:
14308 case BUILT_IN_FINITED128:
14309 case BUILT_IN_FPCLASSIFY:
14310 case BUILT_IN_ISFINITE:
14311 case BUILT_IN_ISINF_SIGN:
14312 case BUILT_IN_ISINF:
14313 case BUILT_IN_ISINFF:
14314 case BUILT_IN_ISINFL:
14315 case BUILT_IN_ISINFD32:
14316 case BUILT_IN_ISINFD64:
14317 case BUILT_IN_ISINFD128:
14318 case BUILT_IN_ISNAN:
14319 case BUILT_IN_ISNANF:
14320 case BUILT_IN_ISNANL:
14321 case BUILT_IN_ISNAND32:
14322 case BUILT_IN_ISNAND64:
14323 case BUILT_IN_ISNAND128:
14324 case BUILT_IN_ISNORMAL:
14325 case BUILT_IN_ISGREATER:
14326 case BUILT_IN_ISGREATEREQUAL:
14327 case BUILT_IN_ISLESS:
14328 case BUILT_IN_ISLESSEQUAL:
14329 case BUILT_IN_ISLESSGREATER:
14330 case BUILT_IN_ISUNORDERED:
14331 case BUILT_IN_VA_ARG_PACK:
14332 case BUILT_IN_VA_ARG_PACK_LEN:
14333 case BUILT_IN_VA_COPY:
14334 case BUILT_IN_TRAP:
14335 case BUILT_IN_SAVEREGS:
14336 case BUILT_IN_POPCOUNTL:
14337 case BUILT_IN_POPCOUNTLL:
14338 case BUILT_IN_POPCOUNTIMAX:
14339 case BUILT_IN_POPCOUNT:
14340 case BUILT_IN_PARITYL:
14341 case BUILT_IN_PARITYLL:
14342 case BUILT_IN_PARITYIMAX:
14343 case BUILT_IN_PARITY:
14344 case BUILT_IN_LABS:
14345 case BUILT_IN_LLABS:
14346 case BUILT_IN_PREFETCH:
14347 return true;
14348
14349 default:
14350 return is_simple_builtin (decl);
14351 }
14352
14353 return false;
14354 }