Added _Cilk_spawn and _Cilk_sync (2 cilk keywords) for C.
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "realmpfr.h"
28 #include "gimple.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "expr.h"
36 #include "optabs.h"
37 #include "libfuncs.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "predict.h"
42 #include "tm_p.h"
43 #include "target.h"
44 #include "langhooks.h"
45 #include "basic-block.h"
46 #include "tree-ssanames.h"
47 #include "tree-dfa.h"
48 #include "value-prof.h"
49 #include "diagnostic-core.h"
50 #include "builtins.h"
51 #include "ubsan.h"
52 #include "cilk.h"
53
54
55 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
56
57 struct target_builtins default_target_builtins;
58 #if SWITCHABLE_TARGET
59 struct target_builtins *this_target_builtins = &default_target_builtins;
60 #endif
61
62 /* Define the names of the builtin function types and codes. */
63 const char *const built_in_class_names[BUILT_IN_LAST]
64 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
65
66 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
67 const char * built_in_names[(int) END_BUILTINS] =
68 {
69 #include "builtins.def"
70 };
71 #undef DEF_BUILTIN
72
73 /* Setup an array of _DECL trees, make sure each element is
74 initialized to NULL_TREE. */
75 builtin_info_type builtin_info;
76
77 /* Non-zero if __builtin_constant_p should be folded right away. */
78 bool force_folding_builtin_constant_p;
79
80 static const char *c_getstr (tree);
81 static rtx c_readstr (const char *, enum machine_mode);
82 static int target_char_cast (tree, char *);
83 static rtx get_memory_rtx (tree, tree);
84 static int apply_args_size (void);
85 static int apply_result_size (void);
86 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
87 static rtx result_vector (int, rtx);
88 #endif
89 static void expand_builtin_update_setjmp_buf (rtx);
90 static void expand_builtin_prefetch (tree);
91 static rtx expand_builtin_apply_args (void);
92 static rtx expand_builtin_apply_args_1 (void);
93 static rtx expand_builtin_apply (rtx, rtx, rtx);
94 static void expand_builtin_return (rtx);
95 static enum type_class type_to_class (tree);
96 static rtx expand_builtin_classify_type (tree);
97 static void expand_errno_check (tree, rtx);
98 static rtx expand_builtin_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
100 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
102 static rtx expand_builtin_interclass_mathfn (tree, rtx);
103 static rtx expand_builtin_sincos (tree);
104 static rtx expand_builtin_cexpi (tree, rtx);
105 static rtx expand_builtin_int_roundingfn (tree, rtx);
106 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
107 static rtx expand_builtin_next_arg (void);
108 static rtx expand_builtin_va_start (tree);
109 static rtx expand_builtin_va_end (tree);
110 static rtx expand_builtin_va_copy (tree);
111 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
112 static rtx expand_builtin_strcmp (tree, rtx);
113 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
114 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
115 static rtx expand_builtin_memcpy (tree, rtx);
116 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
118 enum machine_mode, int);
119 static rtx expand_builtin_strcpy (tree, rtx);
120 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
121 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_strncpy (tree, rtx);
123 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
124 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
126 static rtx expand_builtin_bzero (tree);
127 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_alloca (tree, bool);
129 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
130 static rtx expand_builtin_frame_address (tree, tree);
131 static tree stabilize_va_list_loc (location_t, tree, int);
132 static rtx expand_builtin_expect (tree, rtx);
133 static tree fold_builtin_constant_p (tree);
134 static tree fold_builtin_expect (location_t, tree, tree);
135 static tree fold_builtin_classify_type (tree);
136 static tree fold_builtin_strlen (location_t, tree, tree);
137 static tree fold_builtin_inf (location_t, tree, int);
138 static tree fold_builtin_nan (tree, tree, int);
139 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
140 static bool validate_arg (const_tree, enum tree_code code);
141 static bool integer_valued_real_p (tree);
142 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
143 static bool readonly_data_expr (tree);
144 static rtx expand_builtin_fabs (tree, rtx, rtx);
145 static rtx expand_builtin_signbit (tree, rtx);
146 static tree fold_builtin_sqrt (location_t, tree, tree);
147 static tree fold_builtin_cbrt (location_t, tree, tree);
148 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
149 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
150 static tree fold_builtin_cos (location_t, tree, tree, tree);
151 static tree fold_builtin_cosh (location_t, tree, tree, tree);
152 static tree fold_builtin_tan (tree, tree);
153 static tree fold_builtin_trunc (location_t, tree, tree);
154 static tree fold_builtin_floor (location_t, tree, tree);
155 static tree fold_builtin_ceil (location_t, tree, tree);
156 static tree fold_builtin_round (location_t, tree, tree);
157 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
158 static tree fold_builtin_bitop (tree, tree);
159 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
160 static tree fold_builtin_strchr (location_t, tree, tree, tree);
161 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
162 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
163 static tree fold_builtin_strcmp (location_t, tree, tree);
164 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
165 static tree fold_builtin_signbit (location_t, tree, tree);
166 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
167 static tree fold_builtin_isascii (location_t, tree);
168 static tree fold_builtin_toascii (location_t, tree);
169 static tree fold_builtin_isdigit (location_t, tree);
170 static tree fold_builtin_fabs (location_t, tree, tree);
171 static tree fold_builtin_abs (location_t, tree, tree);
172 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
173 enum tree_code);
174 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
175 static tree fold_builtin_0 (location_t, tree, bool);
176 static tree fold_builtin_1 (location_t, tree, tree, bool);
177 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
178 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
179 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
180 static tree fold_builtin_varargs (location_t, tree, tree, bool);
181
182 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
183 static tree fold_builtin_strstr (location_t, tree, tree, tree);
184 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
185 static tree fold_builtin_strcat (location_t, tree, tree);
186 static tree fold_builtin_strncat (location_t, tree, tree, tree);
187 static tree fold_builtin_strspn (location_t, tree, tree);
188 static tree fold_builtin_strcspn (location_t, tree, tree);
189 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
190 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
191
192 static rtx expand_builtin_object_size (tree);
193 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
194 enum built_in_function);
195 static void maybe_emit_chk_warning (tree, enum built_in_function);
196 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
197 static void maybe_emit_free_warning (tree);
198 static tree fold_builtin_object_size (tree, tree);
199 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
200 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
201 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
202 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
203 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
204 enum built_in_function);
205 static bool init_target_chars (void);
206
207 static unsigned HOST_WIDE_INT target_newline;
208 static unsigned HOST_WIDE_INT target_percent;
209 static unsigned HOST_WIDE_INT target_c;
210 static unsigned HOST_WIDE_INT target_s;
211 static char target_percent_c[3];
212 static char target_percent_s[3];
213 static char target_percent_s_newline[4];
214 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
215 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
216 static tree do_mpfr_arg2 (tree, tree, tree,
217 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
218 static tree do_mpfr_arg3 (tree, tree, tree, tree,
219 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
220 static tree do_mpfr_sincos (tree, tree, tree);
221 static tree do_mpfr_bessel_n (tree, tree, tree,
222 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
223 const REAL_VALUE_TYPE *, bool);
224 static tree do_mpfr_remquo (tree, tree, tree);
225 static tree do_mpfr_lgamma_r (tree, tree, tree);
226 static void expand_builtin_sync_synchronize (void);
227
228 /* Return true if NAME starts with __builtin_ or __sync_. */
229
230 static bool
231 is_builtin_name (const char *name)
232 {
233 if (strncmp (name, "__builtin_", 10) == 0)
234 return true;
235 if (strncmp (name, "__sync_", 7) == 0)
236 return true;
237 if (strncmp (name, "__atomic_", 9) == 0)
238 return true;
239 if (flag_enable_cilkplus
240 && (!strcmp (name, "__cilkrts_detach")
241 || !strcmp (name, "__cilkrts_pop_frame")))
242 return true;
243 return false;
244 }
245
246
247 /* Return true if DECL is a function symbol representing a built-in. */
248
249 bool
250 is_builtin_fn (tree decl)
251 {
252 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
253 }
254
255 /* By default we assume that c99 functions are present at the runtime,
256 but sincos is not. */
257 bool
258 default_libc_has_function (enum function_class fn_class)
259 {
260 if (fn_class == function_c94
261 || fn_class == function_c99_misc
262 || fn_class == function_c99_math_complex)
263 return true;
264
265 return false;
266 }
267
268 bool
269 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
270 {
271 return true;
272 }
273
274 bool
275 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
276 {
277 return false;
278 }
279
280 /* Return true if NODE should be considered for inline expansion regardless
281 of the optimization level. This means whenever a function is invoked with
282 its "internal" name, which normally contains the prefix "__builtin". */
283
284 static bool
285 called_as_built_in (tree node)
286 {
287 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
288 we want the name used to call the function, not the name it
289 will have. */
290 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
291 return is_builtin_name (name);
292 }
293
294 /* Compute values M and N such that M divides (address of EXP - N) and such
295 that N < M. If these numbers can be determined, store M in alignp and N in
296 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
297 *alignp and any bit-offset to *bitposp.
298
299 Note that the address (and thus the alignment) computed here is based
300 on the address to which a symbol resolves, whereas DECL_ALIGN is based
301 on the address at which an object is actually located. These two
302 addresses are not always the same. For example, on ARM targets,
303 the address &foo of a Thumb function foo() has the lowest bit set,
304 whereas foo() itself starts on an even address.
305
306 If ADDR_P is true we are taking the address of the memory reference EXP
307 and thus cannot rely on the access taking place. */
308
309 static bool
310 get_object_alignment_2 (tree exp, unsigned int *alignp,
311 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
312 {
313 HOST_WIDE_INT bitsize, bitpos;
314 tree offset;
315 enum machine_mode mode;
316 int unsignedp, volatilep;
317 unsigned int inner, align = BITS_PER_UNIT;
318 bool known_alignment = false;
319
320 /* Get the innermost object and the constant (bitpos) and possibly
321 variable (offset) offset of the access. */
322 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
323 &mode, &unsignedp, &volatilep, true);
324
325 /* Extract alignment information from the innermost object and
326 possibly adjust bitpos and offset. */
327 if (TREE_CODE (exp) == FUNCTION_DECL)
328 {
329 /* Function addresses can encode extra information besides their
330 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
331 allows the low bit to be used as a virtual bit, we know
332 that the address itself must be at least 2-byte aligned. */
333 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
334 align = 2 * BITS_PER_UNIT;
335 }
336 else if (TREE_CODE (exp) == LABEL_DECL)
337 ;
338 else if (TREE_CODE (exp) == CONST_DECL)
339 {
340 /* The alignment of a CONST_DECL is determined by its initializer. */
341 exp = DECL_INITIAL (exp);
342 align = TYPE_ALIGN (TREE_TYPE (exp));
343 #ifdef CONSTANT_ALIGNMENT
344 if (CONSTANT_CLASS_P (exp))
345 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
346 #endif
347 known_alignment = true;
348 }
349 else if (DECL_P (exp))
350 {
351 align = DECL_ALIGN (exp);
352 known_alignment = true;
353 }
354 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
355 {
356 align = TYPE_ALIGN (TREE_TYPE (exp));
357 }
358 else if (TREE_CODE (exp) == INDIRECT_REF
359 || TREE_CODE (exp) == MEM_REF
360 || TREE_CODE (exp) == TARGET_MEM_REF)
361 {
362 tree addr = TREE_OPERAND (exp, 0);
363 unsigned ptr_align;
364 unsigned HOST_WIDE_INT ptr_bitpos;
365
366 if (TREE_CODE (addr) == BIT_AND_EXPR
367 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
368 {
369 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
370 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
371 align *= BITS_PER_UNIT;
372 addr = TREE_OPERAND (addr, 0);
373 }
374
375 known_alignment
376 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
377 align = MAX (ptr_align, align);
378
379 /* The alignment of the pointer operand in a TARGET_MEM_REF
380 has to take the variable offset parts into account. */
381 if (TREE_CODE (exp) == TARGET_MEM_REF)
382 {
383 if (TMR_INDEX (exp))
384 {
385 unsigned HOST_WIDE_INT step = 1;
386 if (TMR_STEP (exp))
387 step = TREE_INT_CST_LOW (TMR_STEP (exp));
388 align = MIN (align, (step & -step) * BITS_PER_UNIT);
389 }
390 if (TMR_INDEX2 (exp))
391 align = BITS_PER_UNIT;
392 known_alignment = false;
393 }
394
395 /* When EXP is an actual memory reference then we can use
396 TYPE_ALIGN of a pointer indirection to derive alignment.
397 Do so only if get_pointer_alignment_1 did not reveal absolute
398 alignment knowledge and if using that alignment would
399 improve the situation. */
400 if (!addr_p && !known_alignment
401 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
402 align = TYPE_ALIGN (TREE_TYPE (exp));
403 else
404 {
405 /* Else adjust bitpos accordingly. */
406 bitpos += ptr_bitpos;
407 if (TREE_CODE (exp) == MEM_REF
408 || TREE_CODE (exp) == TARGET_MEM_REF)
409 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
410 }
411 }
412 else if (TREE_CODE (exp) == STRING_CST)
413 {
414 /* STRING_CST are the only constant objects we allow to be not
415 wrapped inside a CONST_DECL. */
416 align = TYPE_ALIGN (TREE_TYPE (exp));
417 #ifdef CONSTANT_ALIGNMENT
418 if (CONSTANT_CLASS_P (exp))
419 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
420 #endif
421 known_alignment = true;
422 }
423
424 /* If there is a non-constant offset part extract the maximum
425 alignment that can prevail. */
426 inner = ~0U;
427 while (offset)
428 {
429 tree next_offset;
430
431 if (TREE_CODE (offset) == PLUS_EXPR)
432 {
433 next_offset = TREE_OPERAND (offset, 0);
434 offset = TREE_OPERAND (offset, 1);
435 }
436 else
437 next_offset = NULL;
438 if (host_integerp (offset, 1))
439 {
440 /* Any overflow in calculating offset_bits won't change
441 the alignment. */
442 unsigned offset_bits
443 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
444
445 if (offset_bits)
446 inner = MIN (inner, (offset_bits & -offset_bits));
447 }
448 else if (TREE_CODE (offset) == MULT_EXPR
449 && host_integerp (TREE_OPERAND (offset, 1), 1))
450 {
451 /* Any overflow in calculating offset_factor won't change
452 the alignment. */
453 unsigned offset_factor
454 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
455 * BITS_PER_UNIT);
456
457 if (offset_factor)
458 inner = MIN (inner, (offset_factor & -offset_factor));
459 }
460 else
461 {
462 inner = MIN (inner, BITS_PER_UNIT);
463 break;
464 }
465 offset = next_offset;
466 }
467 /* Alignment is innermost object alignment adjusted by the constant
468 and non-constant offset parts. */
469 align = MIN (align, inner);
470
471 *alignp = align;
472 *bitposp = bitpos & (*alignp - 1);
473 return known_alignment;
474 }
475
476 /* For a memory reference expression EXP compute values M and N such that M
477 divides (&EXP - N) and such that N < M. If these numbers can be determined,
478 store M in alignp and N in *BITPOSP and return true. Otherwise return false
479 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
480
481 bool
482 get_object_alignment_1 (tree exp, unsigned int *alignp,
483 unsigned HOST_WIDE_INT *bitposp)
484 {
485 return get_object_alignment_2 (exp, alignp, bitposp, false);
486 }
487
488 /* Return the alignment in bits of EXP, an object. */
489
490 unsigned int
491 get_object_alignment (tree exp)
492 {
493 unsigned HOST_WIDE_INT bitpos = 0;
494 unsigned int align;
495
496 get_object_alignment_1 (exp, &align, &bitpos);
497
498 /* align and bitpos now specify known low bits of the pointer.
499 ptr & (align - 1) == bitpos. */
500
501 if (bitpos != 0)
502 align = (bitpos & -bitpos);
503 return align;
504 }
505
506 /* For a pointer valued expression EXP compute values M and N such that M
507 divides (EXP - N) and such that N < M. If these numbers can be determined,
508 store M in alignp and N in *BITPOSP and return true. Return false if
509 the results are just a conservative approximation.
510
511 If EXP is not a pointer, false is returned too. */
512
513 bool
514 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
515 unsigned HOST_WIDE_INT *bitposp)
516 {
517 STRIP_NOPS (exp);
518
519 if (TREE_CODE (exp) == ADDR_EXPR)
520 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
521 alignp, bitposp, true);
522 else if (TREE_CODE (exp) == SSA_NAME
523 && POINTER_TYPE_P (TREE_TYPE (exp)))
524 {
525 unsigned int ptr_align, ptr_misalign;
526 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
527
528 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
529 {
530 *bitposp = ptr_misalign * BITS_PER_UNIT;
531 *alignp = ptr_align * BITS_PER_UNIT;
532 /* We cannot really tell whether this result is an approximation. */
533 return true;
534 }
535 else
536 {
537 *bitposp = 0;
538 *alignp = BITS_PER_UNIT;
539 return false;
540 }
541 }
542 else if (TREE_CODE (exp) == INTEGER_CST)
543 {
544 *alignp = BIGGEST_ALIGNMENT;
545 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
546 & (BIGGEST_ALIGNMENT - 1));
547 return true;
548 }
549
550 *bitposp = 0;
551 *alignp = BITS_PER_UNIT;
552 return false;
553 }
554
555 /* Return the alignment in bits of EXP, a pointer valued expression.
556 The alignment returned is, by default, the alignment of the thing that
557 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
558
559 Otherwise, look at the expression to see if we can do better, i.e., if the
560 expression is actually pointing at an object whose alignment is tighter. */
561
562 unsigned int
563 get_pointer_alignment (tree exp)
564 {
565 unsigned HOST_WIDE_INT bitpos = 0;
566 unsigned int align;
567
568 get_pointer_alignment_1 (exp, &align, &bitpos);
569
570 /* align and bitpos now specify known low bits of the pointer.
571 ptr & (align - 1) == bitpos. */
572
573 if (bitpos != 0)
574 align = (bitpos & -bitpos);
575
576 return align;
577 }
578
579 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
580 way, because it could contain a zero byte in the middle.
581 TREE_STRING_LENGTH is the size of the character array, not the string.
582
583 ONLY_VALUE should be nonzero if the result is not going to be emitted
584 into the instruction stream and zero if it is going to be expanded.
585 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
586 is returned, otherwise NULL, since
587 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
588 evaluate the side-effects.
589
590 The value returned is of type `ssizetype'.
591
592 Unfortunately, string_constant can't access the values of const char
593 arrays with initializers, so neither can we do so here. */
594
595 tree
596 c_strlen (tree src, int only_value)
597 {
598 tree offset_node;
599 HOST_WIDE_INT offset;
600 int max;
601 const char *ptr;
602 location_t loc;
603
604 STRIP_NOPS (src);
605 if (TREE_CODE (src) == COND_EXPR
606 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
607 {
608 tree len1, len2;
609
610 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
611 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
612 if (tree_int_cst_equal (len1, len2))
613 return len1;
614 }
615
616 if (TREE_CODE (src) == COMPOUND_EXPR
617 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
618 return c_strlen (TREE_OPERAND (src, 1), only_value);
619
620 loc = EXPR_LOC_OR_HERE (src);
621
622 src = string_constant (src, &offset_node);
623 if (src == 0)
624 return NULL_TREE;
625
626 max = TREE_STRING_LENGTH (src) - 1;
627 ptr = TREE_STRING_POINTER (src);
628
629 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
630 {
631 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
632 compute the offset to the following null if we don't know where to
633 start searching for it. */
634 int i;
635
636 for (i = 0; i < max; i++)
637 if (ptr[i] == 0)
638 return NULL_TREE;
639
640 /* We don't know the starting offset, but we do know that the string
641 has no internal zero bytes. We can assume that the offset falls
642 within the bounds of the string; otherwise, the programmer deserves
643 what he gets. Subtract the offset from the length of the string,
644 and return that. This would perhaps not be valid if we were dealing
645 with named arrays in addition to literal string constants. */
646
647 return size_diffop_loc (loc, size_int (max), offset_node);
648 }
649
650 /* We have a known offset into the string. Start searching there for
651 a null character if we can represent it as a single HOST_WIDE_INT. */
652 if (offset_node == 0)
653 offset = 0;
654 else if (! host_integerp (offset_node, 0))
655 offset = -1;
656 else
657 offset = tree_low_cst (offset_node, 0);
658
659 /* If the offset is known to be out of bounds, warn, and call strlen at
660 runtime. */
661 if (offset < 0 || offset > max)
662 {
663 /* Suppress multiple warnings for propagated constant strings. */
664 if (! TREE_NO_WARNING (src))
665 {
666 warning_at (loc, 0, "offset outside bounds of constant string");
667 TREE_NO_WARNING (src) = 1;
668 }
669 return NULL_TREE;
670 }
671
672 /* Use strlen to search for the first zero byte. Since any strings
673 constructed with build_string will have nulls appended, we win even
674 if we get handed something like (char[4])"abcd".
675
676 Since OFFSET is our starting index into the string, no further
677 calculation is needed. */
678 return ssize_int (strlen (ptr + offset));
679 }
680
681 /* Return a char pointer for a C string if it is a string constant
682 or sum of string constant and integer constant. */
683
684 static const char *
685 c_getstr (tree src)
686 {
687 tree offset_node;
688
689 src = string_constant (src, &offset_node);
690 if (src == 0)
691 return 0;
692
693 if (offset_node == 0)
694 return TREE_STRING_POINTER (src);
695 else if (!host_integerp (offset_node, 1)
696 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
697 return 0;
698
699 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
700 }
701
702 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
703 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
704
705 static rtx
706 c_readstr (const char *str, enum machine_mode mode)
707 {
708 HOST_WIDE_INT c[2];
709 HOST_WIDE_INT ch;
710 unsigned int i, j;
711
712 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
713
714 c[0] = 0;
715 c[1] = 0;
716 ch = 1;
717 for (i = 0; i < GET_MODE_SIZE (mode); i++)
718 {
719 j = i;
720 if (WORDS_BIG_ENDIAN)
721 j = GET_MODE_SIZE (mode) - i - 1;
722 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
723 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
724 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
725 j *= BITS_PER_UNIT;
726 gcc_assert (j < HOST_BITS_PER_DOUBLE_INT);
727
728 if (ch)
729 ch = (unsigned char) str[i];
730 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
731 }
732 return immed_double_const (c[0], c[1], mode);
733 }
734
735 /* Cast a target constant CST to target CHAR and if that value fits into
736 host char type, return zero and put that value into variable pointed to by
737 P. */
738
739 static int
740 target_char_cast (tree cst, char *p)
741 {
742 unsigned HOST_WIDE_INT val, hostval;
743
744 if (TREE_CODE (cst) != INTEGER_CST
745 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
746 return 1;
747
748 val = TREE_INT_CST_LOW (cst);
749 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
750 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
751
752 hostval = val;
753 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
754 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
755
756 if (val != hostval)
757 return 1;
758
759 *p = hostval;
760 return 0;
761 }
762
763 /* Similar to save_expr, but assumes that arbitrary code is not executed
764 in between the multiple evaluations. In particular, we assume that a
765 non-addressable local variable will not be modified. */
766
767 static tree
768 builtin_save_expr (tree exp)
769 {
770 if (TREE_CODE (exp) == SSA_NAME
771 || (TREE_ADDRESSABLE (exp) == 0
772 && (TREE_CODE (exp) == PARM_DECL
773 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
774 return exp;
775
776 return save_expr (exp);
777 }
778
779 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
780 times to get the address of either a higher stack frame, or a return
781 address located within it (depending on FNDECL_CODE). */
782
783 static rtx
784 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
785 {
786 int i;
787
788 #ifdef INITIAL_FRAME_ADDRESS_RTX
789 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
790 #else
791 rtx tem;
792
793 /* For a zero count with __builtin_return_address, we don't care what
794 frame address we return, because target-specific definitions will
795 override us. Therefore frame pointer elimination is OK, and using
796 the soft frame pointer is OK.
797
798 For a nonzero count, or a zero count with __builtin_frame_address,
799 we require a stable offset from the current frame pointer to the
800 previous one, so we must use the hard frame pointer, and
801 we must disable frame pointer elimination. */
802 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
803 tem = frame_pointer_rtx;
804 else
805 {
806 tem = hard_frame_pointer_rtx;
807
808 /* Tell reload not to eliminate the frame pointer. */
809 crtl->accesses_prior_frames = 1;
810 }
811 #endif
812
813 /* Some machines need special handling before we can access
814 arbitrary frames. For example, on the SPARC, we must first flush
815 all register windows to the stack. */
816 #ifdef SETUP_FRAME_ADDRESSES
817 if (count > 0)
818 SETUP_FRAME_ADDRESSES ();
819 #endif
820
821 /* On the SPARC, the return address is not in the frame, it is in a
822 register. There is no way to access it off of the current frame
823 pointer, but it can be accessed off the previous frame pointer by
824 reading the value from the register window save area. */
825 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
826 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
827 count--;
828 #endif
829
830 /* Scan back COUNT frames to the specified frame. */
831 for (i = 0; i < count; i++)
832 {
833 /* Assume the dynamic chain pointer is in the word that the
834 frame address points to, unless otherwise specified. */
835 #ifdef DYNAMIC_CHAIN_ADDRESS
836 tem = DYNAMIC_CHAIN_ADDRESS (tem);
837 #endif
838 tem = memory_address (Pmode, tem);
839 tem = gen_frame_mem (Pmode, tem);
840 tem = copy_to_reg (tem);
841 }
842
843 /* For __builtin_frame_address, return what we've got. But, on
844 the SPARC for example, we may have to add a bias. */
845 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
846 #ifdef FRAME_ADDR_RTX
847 return FRAME_ADDR_RTX (tem);
848 #else
849 return tem;
850 #endif
851
852 /* For __builtin_return_address, get the return address from that frame. */
853 #ifdef RETURN_ADDR_RTX
854 tem = RETURN_ADDR_RTX (count, tem);
855 #else
856 tem = memory_address (Pmode,
857 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
858 tem = gen_frame_mem (Pmode, tem);
859 #endif
860 return tem;
861 }
862
863 /* Alias set used for setjmp buffer. */
864 static alias_set_type setjmp_alias_set = -1;
865
866 /* Construct the leading half of a __builtin_setjmp call. Control will
867 return to RECEIVER_LABEL. This is also called directly by the SJLJ
868 exception handling code. */
869
870 void
871 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
872 {
873 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
874 rtx stack_save;
875 rtx mem;
876
877 if (setjmp_alias_set == -1)
878 setjmp_alias_set = new_alias_set ();
879
880 buf_addr = convert_memory_address (Pmode, buf_addr);
881
882 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
883
884 /* We store the frame pointer and the address of receiver_label in
885 the buffer and use the rest of it for the stack save area, which
886 is machine-dependent. */
887
888 mem = gen_rtx_MEM (Pmode, buf_addr);
889 set_mem_alias_set (mem, setjmp_alias_set);
890 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
891
892 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
893 GET_MODE_SIZE (Pmode))),
894 set_mem_alias_set (mem, setjmp_alias_set);
895
896 emit_move_insn (validize_mem (mem),
897 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
898
899 stack_save = gen_rtx_MEM (sa_mode,
900 plus_constant (Pmode, buf_addr,
901 2 * GET_MODE_SIZE (Pmode)));
902 set_mem_alias_set (stack_save, setjmp_alias_set);
903 emit_stack_save (SAVE_NONLOCAL, &stack_save);
904
905 /* If there is further processing to do, do it. */
906 #ifdef HAVE_builtin_setjmp_setup
907 if (HAVE_builtin_setjmp_setup)
908 emit_insn (gen_builtin_setjmp_setup (buf_addr));
909 #endif
910
911 /* We have a nonlocal label. */
912 cfun->has_nonlocal_label = 1;
913 }
914
915 /* Construct the trailing part of a __builtin_setjmp call. This is
916 also called directly by the SJLJ exception handling code.
917 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
918
919 void
920 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
921 {
922 rtx chain;
923
924 /* Mark the FP as used when we get here, so we have to make sure it's
925 marked as used by this function. */
926 emit_use (hard_frame_pointer_rtx);
927
928 /* Mark the static chain as clobbered here so life information
929 doesn't get messed up for it. */
930 chain = targetm.calls.static_chain (current_function_decl, true);
931 if (chain && REG_P (chain))
932 emit_clobber (chain);
933
934 /* Now put in the code to restore the frame pointer, and argument
935 pointer, if needed. */
936 #ifdef HAVE_nonlocal_goto
937 if (! HAVE_nonlocal_goto)
938 #endif
939 /* First adjust our frame pointer to its actual value. It was
940 previously set to the start of the virtual area corresponding to
941 the stacked variables when we branched here and now needs to be
942 adjusted to the actual hardware fp value.
943
944 Assignments to virtual registers are converted by
945 instantiate_virtual_regs into the corresponding assignment
946 to the underlying register (fp in this case) that makes
947 the original assignment true.
948 So the following insn will actually be decrementing fp by
949 STARTING_FRAME_OFFSET. */
950 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
951
952 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
953 if (fixed_regs[ARG_POINTER_REGNUM])
954 {
955 #ifdef ELIMINABLE_REGS
956 /* If the argument pointer can be eliminated in favor of the
957 frame pointer, we don't need to restore it. We assume here
958 that if such an elimination is present, it can always be used.
959 This is the case on all known machines; if we don't make this
960 assumption, we do unnecessary saving on many machines. */
961 size_t i;
962 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
963
964 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
965 if (elim_regs[i].from == ARG_POINTER_REGNUM
966 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
967 break;
968
969 if (i == ARRAY_SIZE (elim_regs))
970 #endif
971 {
972 /* Now restore our arg pointer from the address at which it
973 was saved in our stack frame. */
974 emit_move_insn (crtl->args.internal_arg_pointer,
975 copy_to_reg (get_arg_pointer_save_area ()));
976 }
977 }
978 #endif
979
980 #ifdef HAVE_builtin_setjmp_receiver
981 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
982 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
983 else
984 #endif
985 #ifdef HAVE_nonlocal_goto_receiver
986 if (HAVE_nonlocal_goto_receiver)
987 emit_insn (gen_nonlocal_goto_receiver ());
988 else
989 #endif
990 { /* Nothing */ }
991
992 /* We must not allow the code we just generated to be reordered by
993 scheduling. Specifically, the update of the frame pointer must
994 happen immediately, not later. Similarly, we must block
995 (frame-related) register values to be used across this code. */
996 emit_insn (gen_blockage ());
997 }
998
999 /* __builtin_longjmp is passed a pointer to an array of five words (not
1000 all will be used on all machines). It operates similarly to the C
1001 library function of the same name, but is more efficient. Much of
1002 the code below is copied from the handling of non-local gotos. */
1003
1004 static void
1005 expand_builtin_longjmp (rtx buf_addr, rtx value)
1006 {
1007 rtx fp, lab, stack, insn, last;
1008 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1009
1010 /* DRAP is needed for stack realign if longjmp is expanded to current
1011 function */
1012 if (SUPPORTS_STACK_ALIGNMENT)
1013 crtl->need_drap = true;
1014
1015 if (setjmp_alias_set == -1)
1016 setjmp_alias_set = new_alias_set ();
1017
1018 buf_addr = convert_memory_address (Pmode, buf_addr);
1019
1020 buf_addr = force_reg (Pmode, buf_addr);
1021
1022 /* We require that the user must pass a second argument of 1, because
1023 that is what builtin_setjmp will return. */
1024 gcc_assert (value == const1_rtx);
1025
1026 last = get_last_insn ();
1027 #ifdef HAVE_builtin_longjmp
1028 if (HAVE_builtin_longjmp)
1029 emit_insn (gen_builtin_longjmp (buf_addr));
1030 else
1031 #endif
1032 {
1033 fp = gen_rtx_MEM (Pmode, buf_addr);
1034 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1035 GET_MODE_SIZE (Pmode)));
1036
1037 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1038 2 * GET_MODE_SIZE (Pmode)));
1039 set_mem_alias_set (fp, setjmp_alias_set);
1040 set_mem_alias_set (lab, setjmp_alias_set);
1041 set_mem_alias_set (stack, setjmp_alias_set);
1042
1043 /* Pick up FP, label, and SP from the block and jump. This code is
1044 from expand_goto in stmt.c; see there for detailed comments. */
1045 #ifdef HAVE_nonlocal_goto
1046 if (HAVE_nonlocal_goto)
1047 /* We have to pass a value to the nonlocal_goto pattern that will
1048 get copied into the static_chain pointer, but it does not matter
1049 what that value is, because builtin_setjmp does not use it. */
1050 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1051 else
1052 #endif
1053 {
1054 lab = copy_to_reg (lab);
1055
1056 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1057 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1058
1059 emit_move_insn (hard_frame_pointer_rtx, fp);
1060 emit_stack_restore (SAVE_NONLOCAL, stack);
1061
1062 emit_use (hard_frame_pointer_rtx);
1063 emit_use (stack_pointer_rtx);
1064 emit_indirect_jump (lab);
1065 }
1066 }
1067
1068 /* Search backwards and mark the jump insn as a non-local goto.
1069 Note that this precludes the use of __builtin_longjmp to a
1070 __builtin_setjmp target in the same function. However, we've
1071 already cautioned the user that these functions are for
1072 internal exception handling use only. */
1073 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1074 {
1075 gcc_assert (insn != last);
1076
1077 if (JUMP_P (insn))
1078 {
1079 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1080 break;
1081 }
1082 else if (CALL_P (insn))
1083 break;
1084 }
1085 }
1086
1087 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1088 and the address of the save area. */
1089
1090 static rtx
1091 expand_builtin_nonlocal_goto (tree exp)
1092 {
1093 tree t_label, t_save_area;
1094 rtx r_label, r_save_area, r_fp, r_sp, insn;
1095
1096 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1097 return NULL_RTX;
1098
1099 t_label = CALL_EXPR_ARG (exp, 0);
1100 t_save_area = CALL_EXPR_ARG (exp, 1);
1101
1102 r_label = expand_normal (t_label);
1103 r_label = convert_memory_address (Pmode, r_label);
1104 r_save_area = expand_normal (t_save_area);
1105 r_save_area = convert_memory_address (Pmode, r_save_area);
1106 /* Copy the address of the save location to a register just in case it was
1107 based on the frame pointer. */
1108 r_save_area = copy_to_reg (r_save_area);
1109 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1110 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1111 plus_constant (Pmode, r_save_area,
1112 GET_MODE_SIZE (Pmode)));
1113
1114 crtl->has_nonlocal_goto = 1;
1115
1116 #ifdef HAVE_nonlocal_goto
1117 /* ??? We no longer need to pass the static chain value, afaik. */
1118 if (HAVE_nonlocal_goto)
1119 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1120 else
1121 #endif
1122 {
1123 r_label = copy_to_reg (r_label);
1124
1125 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1126 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1127
1128 /* Restore frame pointer for containing function. */
1129 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1130 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1131
1132 /* USE of hard_frame_pointer_rtx added for consistency;
1133 not clear if really needed. */
1134 emit_use (hard_frame_pointer_rtx);
1135 emit_use (stack_pointer_rtx);
1136
1137 /* If the architecture is using a GP register, we must
1138 conservatively assume that the target function makes use of it.
1139 The prologue of functions with nonlocal gotos must therefore
1140 initialize the GP register to the appropriate value, and we
1141 must then make sure that this value is live at the point
1142 of the jump. (Note that this doesn't necessarily apply
1143 to targets with a nonlocal_goto pattern; they are free
1144 to implement it in their own way. Note also that this is
1145 a no-op if the GP register is a global invariant.) */
1146 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1147 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1148 emit_use (pic_offset_table_rtx);
1149
1150 emit_indirect_jump (r_label);
1151 }
1152
1153 /* Search backwards to the jump insn and mark it as a
1154 non-local goto. */
1155 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1156 {
1157 if (JUMP_P (insn))
1158 {
1159 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1160 break;
1161 }
1162 else if (CALL_P (insn))
1163 break;
1164 }
1165
1166 return const0_rtx;
1167 }
1168
1169 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1170 (not all will be used on all machines) that was passed to __builtin_setjmp.
1171 It updates the stack pointer in that block to correspond to the current
1172 stack pointer. */
1173
1174 static void
1175 expand_builtin_update_setjmp_buf (rtx buf_addr)
1176 {
1177 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1178 rtx stack_save
1179 = gen_rtx_MEM (sa_mode,
1180 memory_address
1181 (sa_mode,
1182 plus_constant (Pmode, buf_addr,
1183 2 * GET_MODE_SIZE (Pmode))));
1184
1185 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1186 }
1187
1188 /* Expand a call to __builtin_prefetch. For a target that does not support
1189 data prefetch, evaluate the memory address argument in case it has side
1190 effects. */
1191
1192 static void
1193 expand_builtin_prefetch (tree exp)
1194 {
1195 tree arg0, arg1, arg2;
1196 int nargs;
1197 rtx op0, op1, op2;
1198
1199 if (!validate_arglist (exp, POINTER_TYPE, 0))
1200 return;
1201
1202 arg0 = CALL_EXPR_ARG (exp, 0);
1203
1204 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1205 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1206 locality). */
1207 nargs = call_expr_nargs (exp);
1208 if (nargs > 1)
1209 arg1 = CALL_EXPR_ARG (exp, 1);
1210 else
1211 arg1 = integer_zero_node;
1212 if (nargs > 2)
1213 arg2 = CALL_EXPR_ARG (exp, 2);
1214 else
1215 arg2 = integer_three_node;
1216
1217 /* Argument 0 is an address. */
1218 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1219
1220 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1221 if (TREE_CODE (arg1) != INTEGER_CST)
1222 {
1223 error ("second argument to %<__builtin_prefetch%> must be a constant");
1224 arg1 = integer_zero_node;
1225 }
1226 op1 = expand_normal (arg1);
1227 /* Argument 1 must be either zero or one. */
1228 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1229 {
1230 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1231 " using zero");
1232 op1 = const0_rtx;
1233 }
1234
1235 /* Argument 2 (locality) must be a compile-time constant int. */
1236 if (TREE_CODE (arg2) != INTEGER_CST)
1237 {
1238 error ("third argument to %<__builtin_prefetch%> must be a constant");
1239 arg2 = integer_zero_node;
1240 }
1241 op2 = expand_normal (arg2);
1242 /* Argument 2 must be 0, 1, 2, or 3. */
1243 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1244 {
1245 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1246 op2 = const0_rtx;
1247 }
1248
1249 #ifdef HAVE_prefetch
1250 if (HAVE_prefetch)
1251 {
1252 struct expand_operand ops[3];
1253
1254 create_address_operand (&ops[0], op0);
1255 create_integer_operand (&ops[1], INTVAL (op1));
1256 create_integer_operand (&ops[2], INTVAL (op2));
1257 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1258 return;
1259 }
1260 #endif
1261
1262 /* Don't do anything with direct references to volatile memory, but
1263 generate code to handle other side effects. */
1264 if (!MEM_P (op0) && side_effects_p (op0))
1265 emit_insn (op0);
1266 }
1267
1268 /* Get a MEM rtx for expression EXP which is the address of an operand
1269 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1270 the maximum length of the block of memory that might be accessed or
1271 NULL if unknown. */
1272
1273 static rtx
1274 get_memory_rtx (tree exp, tree len)
1275 {
1276 tree orig_exp = exp;
1277 rtx addr, mem;
1278
1279 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1280 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1281 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1282 exp = TREE_OPERAND (exp, 0);
1283
1284 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1285 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1286
1287 /* Get an expression we can use to find the attributes to assign to MEM.
1288 First remove any nops. */
1289 while (CONVERT_EXPR_P (exp)
1290 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1291 exp = TREE_OPERAND (exp, 0);
1292
1293 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1294 (as builtin stringops may alias with anything). */
1295 exp = fold_build2 (MEM_REF,
1296 build_array_type (char_type_node,
1297 build_range_type (sizetype,
1298 size_one_node, len)),
1299 exp, build_int_cst (ptr_type_node, 0));
1300
1301 /* If the MEM_REF has no acceptable address, try to get the base object
1302 from the original address we got, and build an all-aliasing
1303 unknown-sized access to that one. */
1304 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1305 set_mem_attributes (mem, exp, 0);
1306 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1307 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1308 0))))
1309 {
1310 exp = build_fold_addr_expr (exp);
1311 exp = fold_build2 (MEM_REF,
1312 build_array_type (char_type_node,
1313 build_range_type (sizetype,
1314 size_zero_node,
1315 NULL)),
1316 exp, build_int_cst (ptr_type_node, 0));
1317 set_mem_attributes (mem, exp, 0);
1318 }
1319 set_mem_alias_set (mem, 0);
1320 return mem;
1321 }
1322 \f
1323 /* Built-in functions to perform an untyped call and return. */
1324
1325 #define apply_args_mode \
1326 (this_target_builtins->x_apply_args_mode)
1327 #define apply_result_mode \
1328 (this_target_builtins->x_apply_result_mode)
1329
1330 /* Return the size required for the block returned by __builtin_apply_args,
1331 and initialize apply_args_mode. */
1332
1333 static int
1334 apply_args_size (void)
1335 {
1336 static int size = -1;
1337 int align;
1338 unsigned int regno;
1339 enum machine_mode mode;
1340
1341 /* The values computed by this function never change. */
1342 if (size < 0)
1343 {
1344 /* The first value is the incoming arg-pointer. */
1345 size = GET_MODE_SIZE (Pmode);
1346
1347 /* The second value is the structure value address unless this is
1348 passed as an "invisible" first argument. */
1349 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1350 size += GET_MODE_SIZE (Pmode);
1351
1352 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1353 if (FUNCTION_ARG_REGNO_P (regno))
1354 {
1355 mode = targetm.calls.get_raw_arg_mode (regno);
1356
1357 gcc_assert (mode != VOIDmode);
1358
1359 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1360 if (size % align != 0)
1361 size = CEIL (size, align) * align;
1362 size += GET_MODE_SIZE (mode);
1363 apply_args_mode[regno] = mode;
1364 }
1365 else
1366 {
1367 apply_args_mode[regno] = VOIDmode;
1368 }
1369 }
1370 return size;
1371 }
1372
1373 /* Return the size required for the block returned by __builtin_apply,
1374 and initialize apply_result_mode. */
1375
1376 static int
1377 apply_result_size (void)
1378 {
1379 static int size = -1;
1380 int align, regno;
1381 enum machine_mode mode;
1382
1383 /* The values computed by this function never change. */
1384 if (size < 0)
1385 {
1386 size = 0;
1387
1388 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1389 if (targetm.calls.function_value_regno_p (regno))
1390 {
1391 mode = targetm.calls.get_raw_result_mode (regno);
1392
1393 gcc_assert (mode != VOIDmode);
1394
1395 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1396 if (size % align != 0)
1397 size = CEIL (size, align) * align;
1398 size += GET_MODE_SIZE (mode);
1399 apply_result_mode[regno] = mode;
1400 }
1401 else
1402 apply_result_mode[regno] = VOIDmode;
1403
1404 /* Allow targets that use untyped_call and untyped_return to override
1405 the size so that machine-specific information can be stored here. */
1406 #ifdef APPLY_RESULT_SIZE
1407 size = APPLY_RESULT_SIZE;
1408 #endif
1409 }
1410 return size;
1411 }
1412
1413 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1414 /* Create a vector describing the result block RESULT. If SAVEP is true,
1415 the result block is used to save the values; otherwise it is used to
1416 restore the values. */
1417
1418 static rtx
1419 result_vector (int savep, rtx result)
1420 {
1421 int regno, size, align, nelts;
1422 enum machine_mode mode;
1423 rtx reg, mem;
1424 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1425
1426 size = nelts = 0;
1427 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1428 if ((mode = apply_result_mode[regno]) != VOIDmode)
1429 {
1430 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1431 if (size % align != 0)
1432 size = CEIL (size, align) * align;
1433 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1434 mem = adjust_address (result, mode, size);
1435 savevec[nelts++] = (savep
1436 ? gen_rtx_SET (VOIDmode, mem, reg)
1437 : gen_rtx_SET (VOIDmode, reg, mem));
1438 size += GET_MODE_SIZE (mode);
1439 }
1440 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1441 }
1442 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1443
1444 /* Save the state required to perform an untyped call with the same
1445 arguments as were passed to the current function. */
1446
1447 static rtx
1448 expand_builtin_apply_args_1 (void)
1449 {
1450 rtx registers, tem;
1451 int size, align, regno;
1452 enum machine_mode mode;
1453 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1454
1455 /* Create a block where the arg-pointer, structure value address,
1456 and argument registers can be saved. */
1457 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1458
1459 /* Walk past the arg-pointer and structure value address. */
1460 size = GET_MODE_SIZE (Pmode);
1461 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1462 size += GET_MODE_SIZE (Pmode);
1463
1464 /* Save each register used in calling a function to the block. */
1465 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1466 if ((mode = apply_args_mode[regno]) != VOIDmode)
1467 {
1468 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1469 if (size % align != 0)
1470 size = CEIL (size, align) * align;
1471
1472 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1473
1474 emit_move_insn (adjust_address (registers, mode, size), tem);
1475 size += GET_MODE_SIZE (mode);
1476 }
1477
1478 /* Save the arg pointer to the block. */
1479 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1480 #ifdef STACK_GROWS_DOWNWARD
1481 /* We need the pointer as the caller actually passed them to us, not
1482 as we might have pretended they were passed. Make sure it's a valid
1483 operand, as emit_move_insn isn't expected to handle a PLUS. */
1484 tem
1485 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1486 NULL_RTX);
1487 #endif
1488 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1489
1490 size = GET_MODE_SIZE (Pmode);
1491
1492 /* Save the structure value address unless this is passed as an
1493 "invisible" first argument. */
1494 if (struct_incoming_value)
1495 {
1496 emit_move_insn (adjust_address (registers, Pmode, size),
1497 copy_to_reg (struct_incoming_value));
1498 size += GET_MODE_SIZE (Pmode);
1499 }
1500
1501 /* Return the address of the block. */
1502 return copy_addr_to_reg (XEXP (registers, 0));
1503 }
1504
1505 /* __builtin_apply_args returns block of memory allocated on
1506 the stack into which is stored the arg pointer, structure
1507 value address, static chain, and all the registers that might
1508 possibly be used in performing a function call. The code is
1509 moved to the start of the function so the incoming values are
1510 saved. */
1511
1512 static rtx
1513 expand_builtin_apply_args (void)
1514 {
1515 /* Don't do __builtin_apply_args more than once in a function.
1516 Save the result of the first call and reuse it. */
1517 if (apply_args_value != 0)
1518 return apply_args_value;
1519 {
1520 /* When this function is called, it means that registers must be
1521 saved on entry to this function. So we migrate the
1522 call to the first insn of this function. */
1523 rtx temp;
1524 rtx seq;
1525
1526 start_sequence ();
1527 temp = expand_builtin_apply_args_1 ();
1528 seq = get_insns ();
1529 end_sequence ();
1530
1531 apply_args_value = temp;
1532
1533 /* Put the insns after the NOTE that starts the function.
1534 If this is inside a start_sequence, make the outer-level insn
1535 chain current, so the code is placed at the start of the
1536 function. If internal_arg_pointer is a non-virtual pseudo,
1537 it needs to be placed after the function that initializes
1538 that pseudo. */
1539 push_topmost_sequence ();
1540 if (REG_P (crtl->args.internal_arg_pointer)
1541 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1542 emit_insn_before (seq, parm_birth_insn);
1543 else
1544 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1545 pop_topmost_sequence ();
1546 return temp;
1547 }
1548 }
1549
1550 /* Perform an untyped call and save the state required to perform an
1551 untyped return of whatever value was returned by the given function. */
1552
1553 static rtx
1554 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1555 {
1556 int size, align, regno;
1557 enum machine_mode mode;
1558 rtx incoming_args, result, reg, dest, src, call_insn;
1559 rtx old_stack_level = 0;
1560 rtx call_fusage = 0;
1561 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1562
1563 arguments = convert_memory_address (Pmode, arguments);
1564
1565 /* Create a block where the return registers can be saved. */
1566 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1567
1568 /* Fetch the arg pointer from the ARGUMENTS block. */
1569 incoming_args = gen_reg_rtx (Pmode);
1570 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1571 #ifndef STACK_GROWS_DOWNWARD
1572 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1573 incoming_args, 0, OPTAB_LIB_WIDEN);
1574 #endif
1575
1576 /* Push a new argument block and copy the arguments. Do not allow
1577 the (potential) memcpy call below to interfere with our stack
1578 manipulations. */
1579 do_pending_stack_adjust ();
1580 NO_DEFER_POP;
1581
1582 /* Save the stack with nonlocal if available. */
1583 #ifdef HAVE_save_stack_nonlocal
1584 if (HAVE_save_stack_nonlocal)
1585 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1586 else
1587 #endif
1588 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1589
1590 /* Allocate a block of memory onto the stack and copy the memory
1591 arguments to the outgoing arguments address. We can pass TRUE
1592 as the 4th argument because we just saved the stack pointer
1593 and will restore it right after the call. */
1594 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1595
1596 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1597 may have already set current_function_calls_alloca to true.
1598 current_function_calls_alloca won't be set if argsize is zero,
1599 so we have to guarantee need_drap is true here. */
1600 if (SUPPORTS_STACK_ALIGNMENT)
1601 crtl->need_drap = true;
1602
1603 dest = virtual_outgoing_args_rtx;
1604 #ifndef STACK_GROWS_DOWNWARD
1605 if (CONST_INT_P (argsize))
1606 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1607 else
1608 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1609 #endif
1610 dest = gen_rtx_MEM (BLKmode, dest);
1611 set_mem_align (dest, PARM_BOUNDARY);
1612 src = gen_rtx_MEM (BLKmode, incoming_args);
1613 set_mem_align (src, PARM_BOUNDARY);
1614 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1615
1616 /* Refer to the argument block. */
1617 apply_args_size ();
1618 arguments = gen_rtx_MEM (BLKmode, arguments);
1619 set_mem_align (arguments, PARM_BOUNDARY);
1620
1621 /* Walk past the arg-pointer and structure value address. */
1622 size = GET_MODE_SIZE (Pmode);
1623 if (struct_value)
1624 size += GET_MODE_SIZE (Pmode);
1625
1626 /* Restore each of the registers previously saved. Make USE insns
1627 for each of these registers for use in making the call. */
1628 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1629 if ((mode = apply_args_mode[regno]) != VOIDmode)
1630 {
1631 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1632 if (size % align != 0)
1633 size = CEIL (size, align) * align;
1634 reg = gen_rtx_REG (mode, regno);
1635 emit_move_insn (reg, adjust_address (arguments, mode, size));
1636 use_reg (&call_fusage, reg);
1637 size += GET_MODE_SIZE (mode);
1638 }
1639
1640 /* Restore the structure value address unless this is passed as an
1641 "invisible" first argument. */
1642 size = GET_MODE_SIZE (Pmode);
1643 if (struct_value)
1644 {
1645 rtx value = gen_reg_rtx (Pmode);
1646 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1647 emit_move_insn (struct_value, value);
1648 if (REG_P (struct_value))
1649 use_reg (&call_fusage, struct_value);
1650 size += GET_MODE_SIZE (Pmode);
1651 }
1652
1653 /* All arguments and registers used for the call are set up by now! */
1654 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1655
1656 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1657 and we don't want to load it into a register as an optimization,
1658 because prepare_call_address already did it if it should be done. */
1659 if (GET_CODE (function) != SYMBOL_REF)
1660 function = memory_address (FUNCTION_MODE, function);
1661
1662 /* Generate the actual call instruction and save the return value. */
1663 #ifdef HAVE_untyped_call
1664 if (HAVE_untyped_call)
1665 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1666 result, result_vector (1, result)));
1667 else
1668 #endif
1669 #ifdef HAVE_call_value
1670 if (HAVE_call_value)
1671 {
1672 rtx valreg = 0;
1673
1674 /* Locate the unique return register. It is not possible to
1675 express a call that sets more than one return register using
1676 call_value; use untyped_call for that. In fact, untyped_call
1677 only needs to save the return registers in the given block. */
1678 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1679 if ((mode = apply_result_mode[regno]) != VOIDmode)
1680 {
1681 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1682
1683 valreg = gen_rtx_REG (mode, regno);
1684 }
1685
1686 emit_call_insn (GEN_CALL_VALUE (valreg,
1687 gen_rtx_MEM (FUNCTION_MODE, function),
1688 const0_rtx, NULL_RTX, const0_rtx));
1689
1690 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1691 }
1692 else
1693 #endif
1694 gcc_unreachable ();
1695
1696 /* Find the CALL insn we just emitted, and attach the register usage
1697 information. */
1698 call_insn = last_call_insn ();
1699 add_function_usage_to (call_insn, call_fusage);
1700
1701 /* Restore the stack. */
1702 #ifdef HAVE_save_stack_nonlocal
1703 if (HAVE_save_stack_nonlocal)
1704 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1705 else
1706 #endif
1707 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1708 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1709
1710 OK_DEFER_POP;
1711
1712 /* Return the address of the result block. */
1713 result = copy_addr_to_reg (XEXP (result, 0));
1714 return convert_memory_address (ptr_mode, result);
1715 }
1716
1717 /* Perform an untyped return. */
1718
1719 static void
1720 expand_builtin_return (rtx result)
1721 {
1722 int size, align, regno;
1723 enum machine_mode mode;
1724 rtx reg;
1725 rtx call_fusage = 0;
1726
1727 result = convert_memory_address (Pmode, result);
1728
1729 apply_result_size ();
1730 result = gen_rtx_MEM (BLKmode, result);
1731
1732 #ifdef HAVE_untyped_return
1733 if (HAVE_untyped_return)
1734 {
1735 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1736 emit_barrier ();
1737 return;
1738 }
1739 #endif
1740
1741 /* Restore the return value and note that each value is used. */
1742 size = 0;
1743 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1744 if ((mode = apply_result_mode[regno]) != VOIDmode)
1745 {
1746 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1747 if (size % align != 0)
1748 size = CEIL (size, align) * align;
1749 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1750 emit_move_insn (reg, adjust_address (result, mode, size));
1751
1752 push_to_sequence (call_fusage);
1753 emit_use (reg);
1754 call_fusage = get_insns ();
1755 end_sequence ();
1756 size += GET_MODE_SIZE (mode);
1757 }
1758
1759 /* Put the USE insns before the return. */
1760 emit_insn (call_fusage);
1761
1762 /* Return whatever values was restored by jumping directly to the end
1763 of the function. */
1764 expand_naked_return ();
1765 }
1766
1767 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1768
1769 static enum type_class
1770 type_to_class (tree type)
1771 {
1772 switch (TREE_CODE (type))
1773 {
1774 case VOID_TYPE: return void_type_class;
1775 case INTEGER_TYPE: return integer_type_class;
1776 case ENUMERAL_TYPE: return enumeral_type_class;
1777 case BOOLEAN_TYPE: return boolean_type_class;
1778 case POINTER_TYPE: return pointer_type_class;
1779 case REFERENCE_TYPE: return reference_type_class;
1780 case OFFSET_TYPE: return offset_type_class;
1781 case REAL_TYPE: return real_type_class;
1782 case COMPLEX_TYPE: return complex_type_class;
1783 case FUNCTION_TYPE: return function_type_class;
1784 case METHOD_TYPE: return method_type_class;
1785 case RECORD_TYPE: return record_type_class;
1786 case UNION_TYPE:
1787 case QUAL_UNION_TYPE: return union_type_class;
1788 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1789 ? string_type_class : array_type_class);
1790 case LANG_TYPE: return lang_type_class;
1791 default: return no_type_class;
1792 }
1793 }
1794
1795 /* Expand a call EXP to __builtin_classify_type. */
1796
1797 static rtx
1798 expand_builtin_classify_type (tree exp)
1799 {
1800 if (call_expr_nargs (exp))
1801 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1802 return GEN_INT (no_type_class);
1803 }
1804
1805 /* This helper macro, meant to be used in mathfn_built_in below,
1806 determines which among a set of three builtin math functions is
1807 appropriate for a given type mode. The `F' and `L' cases are
1808 automatically generated from the `double' case. */
1809 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1810 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1811 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1812 fcodel = BUILT_IN_MATHFN##L ; break;
1813 /* Similar to above, but appends _R after any F/L suffix. */
1814 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1815 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1816 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1817 fcodel = BUILT_IN_MATHFN##L_R ; break;
1818
1819 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1820 if available. If IMPLICIT is true use the implicit builtin declaration,
1821 otherwise use the explicit declaration. If we can't do the conversion,
1822 return zero. */
1823
1824 static tree
1825 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1826 {
1827 enum built_in_function fcode, fcodef, fcodel, fcode2;
1828
1829 switch (fn)
1830 {
1831 CASE_MATHFN (BUILT_IN_ACOS)
1832 CASE_MATHFN (BUILT_IN_ACOSH)
1833 CASE_MATHFN (BUILT_IN_ASIN)
1834 CASE_MATHFN (BUILT_IN_ASINH)
1835 CASE_MATHFN (BUILT_IN_ATAN)
1836 CASE_MATHFN (BUILT_IN_ATAN2)
1837 CASE_MATHFN (BUILT_IN_ATANH)
1838 CASE_MATHFN (BUILT_IN_CBRT)
1839 CASE_MATHFN (BUILT_IN_CEIL)
1840 CASE_MATHFN (BUILT_IN_CEXPI)
1841 CASE_MATHFN (BUILT_IN_COPYSIGN)
1842 CASE_MATHFN (BUILT_IN_COS)
1843 CASE_MATHFN (BUILT_IN_COSH)
1844 CASE_MATHFN (BUILT_IN_DREM)
1845 CASE_MATHFN (BUILT_IN_ERF)
1846 CASE_MATHFN (BUILT_IN_ERFC)
1847 CASE_MATHFN (BUILT_IN_EXP)
1848 CASE_MATHFN (BUILT_IN_EXP10)
1849 CASE_MATHFN (BUILT_IN_EXP2)
1850 CASE_MATHFN (BUILT_IN_EXPM1)
1851 CASE_MATHFN (BUILT_IN_FABS)
1852 CASE_MATHFN (BUILT_IN_FDIM)
1853 CASE_MATHFN (BUILT_IN_FLOOR)
1854 CASE_MATHFN (BUILT_IN_FMA)
1855 CASE_MATHFN (BUILT_IN_FMAX)
1856 CASE_MATHFN (BUILT_IN_FMIN)
1857 CASE_MATHFN (BUILT_IN_FMOD)
1858 CASE_MATHFN (BUILT_IN_FREXP)
1859 CASE_MATHFN (BUILT_IN_GAMMA)
1860 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1861 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1862 CASE_MATHFN (BUILT_IN_HYPOT)
1863 CASE_MATHFN (BUILT_IN_ILOGB)
1864 CASE_MATHFN (BUILT_IN_ICEIL)
1865 CASE_MATHFN (BUILT_IN_IFLOOR)
1866 CASE_MATHFN (BUILT_IN_INF)
1867 CASE_MATHFN (BUILT_IN_IRINT)
1868 CASE_MATHFN (BUILT_IN_IROUND)
1869 CASE_MATHFN (BUILT_IN_ISINF)
1870 CASE_MATHFN (BUILT_IN_J0)
1871 CASE_MATHFN (BUILT_IN_J1)
1872 CASE_MATHFN (BUILT_IN_JN)
1873 CASE_MATHFN (BUILT_IN_LCEIL)
1874 CASE_MATHFN (BUILT_IN_LDEXP)
1875 CASE_MATHFN (BUILT_IN_LFLOOR)
1876 CASE_MATHFN (BUILT_IN_LGAMMA)
1877 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1878 CASE_MATHFN (BUILT_IN_LLCEIL)
1879 CASE_MATHFN (BUILT_IN_LLFLOOR)
1880 CASE_MATHFN (BUILT_IN_LLRINT)
1881 CASE_MATHFN (BUILT_IN_LLROUND)
1882 CASE_MATHFN (BUILT_IN_LOG)
1883 CASE_MATHFN (BUILT_IN_LOG10)
1884 CASE_MATHFN (BUILT_IN_LOG1P)
1885 CASE_MATHFN (BUILT_IN_LOG2)
1886 CASE_MATHFN (BUILT_IN_LOGB)
1887 CASE_MATHFN (BUILT_IN_LRINT)
1888 CASE_MATHFN (BUILT_IN_LROUND)
1889 CASE_MATHFN (BUILT_IN_MODF)
1890 CASE_MATHFN (BUILT_IN_NAN)
1891 CASE_MATHFN (BUILT_IN_NANS)
1892 CASE_MATHFN (BUILT_IN_NEARBYINT)
1893 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1894 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1895 CASE_MATHFN (BUILT_IN_POW)
1896 CASE_MATHFN (BUILT_IN_POWI)
1897 CASE_MATHFN (BUILT_IN_POW10)
1898 CASE_MATHFN (BUILT_IN_REMAINDER)
1899 CASE_MATHFN (BUILT_IN_REMQUO)
1900 CASE_MATHFN (BUILT_IN_RINT)
1901 CASE_MATHFN (BUILT_IN_ROUND)
1902 CASE_MATHFN (BUILT_IN_SCALB)
1903 CASE_MATHFN (BUILT_IN_SCALBLN)
1904 CASE_MATHFN (BUILT_IN_SCALBN)
1905 CASE_MATHFN (BUILT_IN_SIGNBIT)
1906 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1907 CASE_MATHFN (BUILT_IN_SIN)
1908 CASE_MATHFN (BUILT_IN_SINCOS)
1909 CASE_MATHFN (BUILT_IN_SINH)
1910 CASE_MATHFN (BUILT_IN_SQRT)
1911 CASE_MATHFN (BUILT_IN_TAN)
1912 CASE_MATHFN (BUILT_IN_TANH)
1913 CASE_MATHFN (BUILT_IN_TGAMMA)
1914 CASE_MATHFN (BUILT_IN_TRUNC)
1915 CASE_MATHFN (BUILT_IN_Y0)
1916 CASE_MATHFN (BUILT_IN_Y1)
1917 CASE_MATHFN (BUILT_IN_YN)
1918
1919 default:
1920 return NULL_TREE;
1921 }
1922
1923 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1924 fcode2 = fcode;
1925 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1926 fcode2 = fcodef;
1927 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1928 fcode2 = fcodel;
1929 else
1930 return NULL_TREE;
1931
1932 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1933 return NULL_TREE;
1934
1935 return builtin_decl_explicit (fcode2);
1936 }
1937
1938 /* Like mathfn_built_in_1(), but always use the implicit array. */
1939
1940 tree
1941 mathfn_built_in (tree type, enum built_in_function fn)
1942 {
1943 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1944 }
1945
1946 /* If errno must be maintained, expand the RTL to check if the result,
1947 TARGET, of a built-in function call, EXP, is NaN, and if so set
1948 errno to EDOM. */
1949
1950 static void
1951 expand_errno_check (tree exp, rtx target)
1952 {
1953 rtx lab = gen_label_rtx ();
1954
1955 /* Test the result; if it is NaN, set errno=EDOM because
1956 the argument was not in the domain. */
1957 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1958 NULL_RTX, NULL_RTX, lab,
1959 /* The jump is very likely. */
1960 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1961
1962 #ifdef TARGET_EDOM
1963 /* If this built-in doesn't throw an exception, set errno directly. */
1964 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1965 {
1966 #ifdef GEN_ERRNO_RTX
1967 rtx errno_rtx = GEN_ERRNO_RTX;
1968 #else
1969 rtx errno_rtx
1970 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1971 #endif
1972 emit_move_insn (errno_rtx,
1973 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1974 emit_label (lab);
1975 return;
1976 }
1977 #endif
1978
1979 /* Make sure the library call isn't expanded as a tail call. */
1980 CALL_EXPR_TAILCALL (exp) = 0;
1981
1982 /* We can't set errno=EDOM directly; let the library call do it.
1983 Pop the arguments right away in case the call gets deleted. */
1984 NO_DEFER_POP;
1985 expand_call (exp, target, 0);
1986 OK_DEFER_POP;
1987 emit_label (lab);
1988 }
1989
1990 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1991 Return NULL_RTX if a normal call should be emitted rather than expanding
1992 the function in-line. EXP is the expression that is a call to the builtin
1993 function; if convenient, the result should be placed in TARGET.
1994 SUBTARGET may be used as the target for computing one of EXP's operands. */
1995
1996 static rtx
1997 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1998 {
1999 optab builtin_optab;
2000 rtx op0, insns;
2001 tree fndecl = get_callee_fndecl (exp);
2002 enum machine_mode mode;
2003 bool errno_set = false;
2004 bool try_widening = false;
2005 tree arg;
2006
2007 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2008 return NULL_RTX;
2009
2010 arg = CALL_EXPR_ARG (exp, 0);
2011
2012 switch (DECL_FUNCTION_CODE (fndecl))
2013 {
2014 CASE_FLT_FN (BUILT_IN_SQRT):
2015 errno_set = ! tree_expr_nonnegative_p (arg);
2016 try_widening = true;
2017 builtin_optab = sqrt_optab;
2018 break;
2019 CASE_FLT_FN (BUILT_IN_EXP):
2020 errno_set = true; builtin_optab = exp_optab; break;
2021 CASE_FLT_FN (BUILT_IN_EXP10):
2022 CASE_FLT_FN (BUILT_IN_POW10):
2023 errno_set = true; builtin_optab = exp10_optab; break;
2024 CASE_FLT_FN (BUILT_IN_EXP2):
2025 errno_set = true; builtin_optab = exp2_optab; break;
2026 CASE_FLT_FN (BUILT_IN_EXPM1):
2027 errno_set = true; builtin_optab = expm1_optab; break;
2028 CASE_FLT_FN (BUILT_IN_LOGB):
2029 errno_set = true; builtin_optab = logb_optab; break;
2030 CASE_FLT_FN (BUILT_IN_LOG):
2031 errno_set = true; builtin_optab = log_optab; break;
2032 CASE_FLT_FN (BUILT_IN_LOG10):
2033 errno_set = true; builtin_optab = log10_optab; break;
2034 CASE_FLT_FN (BUILT_IN_LOG2):
2035 errno_set = true; builtin_optab = log2_optab; break;
2036 CASE_FLT_FN (BUILT_IN_LOG1P):
2037 errno_set = true; builtin_optab = log1p_optab; break;
2038 CASE_FLT_FN (BUILT_IN_ASIN):
2039 builtin_optab = asin_optab; break;
2040 CASE_FLT_FN (BUILT_IN_ACOS):
2041 builtin_optab = acos_optab; break;
2042 CASE_FLT_FN (BUILT_IN_TAN):
2043 builtin_optab = tan_optab; break;
2044 CASE_FLT_FN (BUILT_IN_ATAN):
2045 builtin_optab = atan_optab; break;
2046 CASE_FLT_FN (BUILT_IN_FLOOR):
2047 builtin_optab = floor_optab; break;
2048 CASE_FLT_FN (BUILT_IN_CEIL):
2049 builtin_optab = ceil_optab; break;
2050 CASE_FLT_FN (BUILT_IN_TRUNC):
2051 builtin_optab = btrunc_optab; break;
2052 CASE_FLT_FN (BUILT_IN_ROUND):
2053 builtin_optab = round_optab; break;
2054 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2055 builtin_optab = nearbyint_optab;
2056 if (flag_trapping_math)
2057 break;
2058 /* Else fallthrough and expand as rint. */
2059 CASE_FLT_FN (BUILT_IN_RINT):
2060 builtin_optab = rint_optab; break;
2061 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2062 builtin_optab = significand_optab; break;
2063 default:
2064 gcc_unreachable ();
2065 }
2066
2067 /* Make a suitable register to place result in. */
2068 mode = TYPE_MODE (TREE_TYPE (exp));
2069
2070 if (! flag_errno_math || ! HONOR_NANS (mode))
2071 errno_set = false;
2072
2073 /* Before working hard, check whether the instruction is available, but try
2074 to widen the mode for specific operations. */
2075 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2076 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2077 && (!errno_set || !optimize_insn_for_size_p ()))
2078 {
2079 rtx result = gen_reg_rtx (mode);
2080
2081 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2082 need to expand the argument again. This way, we will not perform
2083 side-effects more the once. */
2084 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2085
2086 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2087
2088 start_sequence ();
2089
2090 /* Compute into RESULT.
2091 Set RESULT to wherever the result comes back. */
2092 result = expand_unop (mode, builtin_optab, op0, result, 0);
2093
2094 if (result != 0)
2095 {
2096 if (errno_set)
2097 expand_errno_check (exp, result);
2098
2099 /* Output the entire sequence. */
2100 insns = get_insns ();
2101 end_sequence ();
2102 emit_insn (insns);
2103 return result;
2104 }
2105
2106 /* If we were unable to expand via the builtin, stop the sequence
2107 (without outputting the insns) and call to the library function
2108 with the stabilized argument list. */
2109 end_sequence ();
2110 }
2111
2112 return expand_call (exp, target, target == const0_rtx);
2113 }
2114
2115 /* Expand a call to the builtin binary math functions (pow and atan2).
2116 Return NULL_RTX if a normal call should be emitted rather than expanding the
2117 function in-line. EXP is the expression that is a call to the builtin
2118 function; if convenient, the result should be placed in TARGET.
2119 SUBTARGET may be used as the target for computing one of EXP's
2120 operands. */
2121
2122 static rtx
2123 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2124 {
2125 optab builtin_optab;
2126 rtx op0, op1, insns, result;
2127 int op1_type = REAL_TYPE;
2128 tree fndecl = get_callee_fndecl (exp);
2129 tree arg0, arg1;
2130 enum machine_mode mode;
2131 bool errno_set = true;
2132
2133 switch (DECL_FUNCTION_CODE (fndecl))
2134 {
2135 CASE_FLT_FN (BUILT_IN_SCALBN):
2136 CASE_FLT_FN (BUILT_IN_SCALBLN):
2137 CASE_FLT_FN (BUILT_IN_LDEXP):
2138 op1_type = INTEGER_TYPE;
2139 default:
2140 break;
2141 }
2142
2143 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2144 return NULL_RTX;
2145
2146 arg0 = CALL_EXPR_ARG (exp, 0);
2147 arg1 = CALL_EXPR_ARG (exp, 1);
2148
2149 switch (DECL_FUNCTION_CODE (fndecl))
2150 {
2151 CASE_FLT_FN (BUILT_IN_POW):
2152 builtin_optab = pow_optab; break;
2153 CASE_FLT_FN (BUILT_IN_ATAN2):
2154 builtin_optab = atan2_optab; break;
2155 CASE_FLT_FN (BUILT_IN_SCALB):
2156 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2157 return 0;
2158 builtin_optab = scalb_optab; break;
2159 CASE_FLT_FN (BUILT_IN_SCALBN):
2160 CASE_FLT_FN (BUILT_IN_SCALBLN):
2161 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2162 return 0;
2163 /* Fall through... */
2164 CASE_FLT_FN (BUILT_IN_LDEXP):
2165 builtin_optab = ldexp_optab; break;
2166 CASE_FLT_FN (BUILT_IN_FMOD):
2167 builtin_optab = fmod_optab; break;
2168 CASE_FLT_FN (BUILT_IN_REMAINDER):
2169 CASE_FLT_FN (BUILT_IN_DREM):
2170 builtin_optab = remainder_optab; break;
2171 default:
2172 gcc_unreachable ();
2173 }
2174
2175 /* Make a suitable register to place result in. */
2176 mode = TYPE_MODE (TREE_TYPE (exp));
2177
2178 /* Before working hard, check whether the instruction is available. */
2179 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2180 return NULL_RTX;
2181
2182 result = gen_reg_rtx (mode);
2183
2184 if (! flag_errno_math || ! HONOR_NANS (mode))
2185 errno_set = false;
2186
2187 if (errno_set && optimize_insn_for_size_p ())
2188 return 0;
2189
2190 /* Always stabilize the argument list. */
2191 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2192 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2193
2194 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2195 op1 = expand_normal (arg1);
2196
2197 start_sequence ();
2198
2199 /* Compute into RESULT.
2200 Set RESULT to wherever the result comes back. */
2201 result = expand_binop (mode, builtin_optab, op0, op1,
2202 result, 0, OPTAB_DIRECT);
2203
2204 /* If we were unable to expand via the builtin, stop the sequence
2205 (without outputting the insns) and call to the library function
2206 with the stabilized argument list. */
2207 if (result == 0)
2208 {
2209 end_sequence ();
2210 return expand_call (exp, target, target == const0_rtx);
2211 }
2212
2213 if (errno_set)
2214 expand_errno_check (exp, result);
2215
2216 /* Output the entire sequence. */
2217 insns = get_insns ();
2218 end_sequence ();
2219 emit_insn (insns);
2220
2221 return result;
2222 }
2223
2224 /* Expand a call to the builtin trinary math functions (fma).
2225 Return NULL_RTX if a normal call should be emitted rather than expanding the
2226 function in-line. EXP is the expression that is a call to the builtin
2227 function; if convenient, the result should be placed in TARGET.
2228 SUBTARGET may be used as the target for computing one of EXP's
2229 operands. */
2230
2231 static rtx
2232 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2233 {
2234 optab builtin_optab;
2235 rtx op0, op1, op2, insns, result;
2236 tree fndecl = get_callee_fndecl (exp);
2237 tree arg0, arg1, arg2;
2238 enum machine_mode mode;
2239
2240 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2241 return NULL_RTX;
2242
2243 arg0 = CALL_EXPR_ARG (exp, 0);
2244 arg1 = CALL_EXPR_ARG (exp, 1);
2245 arg2 = CALL_EXPR_ARG (exp, 2);
2246
2247 switch (DECL_FUNCTION_CODE (fndecl))
2248 {
2249 CASE_FLT_FN (BUILT_IN_FMA):
2250 builtin_optab = fma_optab; break;
2251 default:
2252 gcc_unreachable ();
2253 }
2254
2255 /* Make a suitable register to place result in. */
2256 mode = TYPE_MODE (TREE_TYPE (exp));
2257
2258 /* Before working hard, check whether the instruction is available. */
2259 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2260 return NULL_RTX;
2261
2262 result = gen_reg_rtx (mode);
2263
2264 /* Always stabilize the argument list. */
2265 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2266 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2267 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2268
2269 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2270 op1 = expand_normal (arg1);
2271 op2 = expand_normal (arg2);
2272
2273 start_sequence ();
2274
2275 /* Compute into RESULT.
2276 Set RESULT to wherever the result comes back. */
2277 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2278 result, 0);
2279
2280 /* If we were unable to expand via the builtin, stop the sequence
2281 (without outputting the insns) and call to the library function
2282 with the stabilized argument list. */
2283 if (result == 0)
2284 {
2285 end_sequence ();
2286 return expand_call (exp, target, target == const0_rtx);
2287 }
2288
2289 /* Output the entire sequence. */
2290 insns = get_insns ();
2291 end_sequence ();
2292 emit_insn (insns);
2293
2294 return result;
2295 }
2296
2297 /* Expand a call to the builtin sin and cos math functions.
2298 Return NULL_RTX if a normal call should be emitted rather than expanding the
2299 function in-line. EXP is the expression that is a call to the builtin
2300 function; if convenient, the result should be placed in TARGET.
2301 SUBTARGET may be used as the target for computing one of EXP's
2302 operands. */
2303
2304 static rtx
2305 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2306 {
2307 optab builtin_optab;
2308 rtx op0, insns;
2309 tree fndecl = get_callee_fndecl (exp);
2310 enum machine_mode mode;
2311 tree arg;
2312
2313 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2314 return NULL_RTX;
2315
2316 arg = CALL_EXPR_ARG (exp, 0);
2317
2318 switch (DECL_FUNCTION_CODE (fndecl))
2319 {
2320 CASE_FLT_FN (BUILT_IN_SIN):
2321 CASE_FLT_FN (BUILT_IN_COS):
2322 builtin_optab = sincos_optab; break;
2323 default:
2324 gcc_unreachable ();
2325 }
2326
2327 /* Make a suitable register to place result in. */
2328 mode = TYPE_MODE (TREE_TYPE (exp));
2329
2330 /* Check if sincos insn is available, otherwise fallback
2331 to sin or cos insn. */
2332 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2333 switch (DECL_FUNCTION_CODE (fndecl))
2334 {
2335 CASE_FLT_FN (BUILT_IN_SIN):
2336 builtin_optab = sin_optab; break;
2337 CASE_FLT_FN (BUILT_IN_COS):
2338 builtin_optab = cos_optab; break;
2339 default:
2340 gcc_unreachable ();
2341 }
2342
2343 /* Before working hard, check whether the instruction is available. */
2344 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2345 {
2346 rtx result = gen_reg_rtx (mode);
2347
2348 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2349 need to expand the argument again. This way, we will not perform
2350 side-effects more the once. */
2351 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2352
2353 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2354
2355 start_sequence ();
2356
2357 /* Compute into RESULT.
2358 Set RESULT to wherever the result comes back. */
2359 if (builtin_optab == sincos_optab)
2360 {
2361 int ok;
2362
2363 switch (DECL_FUNCTION_CODE (fndecl))
2364 {
2365 CASE_FLT_FN (BUILT_IN_SIN):
2366 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2367 break;
2368 CASE_FLT_FN (BUILT_IN_COS):
2369 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2370 break;
2371 default:
2372 gcc_unreachable ();
2373 }
2374 gcc_assert (ok);
2375 }
2376 else
2377 result = expand_unop (mode, builtin_optab, op0, result, 0);
2378
2379 if (result != 0)
2380 {
2381 /* Output the entire sequence. */
2382 insns = get_insns ();
2383 end_sequence ();
2384 emit_insn (insns);
2385 return result;
2386 }
2387
2388 /* If we were unable to expand via the builtin, stop the sequence
2389 (without outputting the insns) and call to the library function
2390 with the stabilized argument list. */
2391 end_sequence ();
2392 }
2393
2394 return expand_call (exp, target, target == const0_rtx);
2395 }
2396
2397 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2398 return an RTL instruction code that implements the functionality.
2399 If that isn't possible or available return CODE_FOR_nothing. */
2400
2401 static enum insn_code
2402 interclass_mathfn_icode (tree arg, tree fndecl)
2403 {
2404 bool errno_set = false;
2405 optab builtin_optab = unknown_optab;
2406 enum machine_mode mode;
2407
2408 switch (DECL_FUNCTION_CODE (fndecl))
2409 {
2410 CASE_FLT_FN (BUILT_IN_ILOGB):
2411 errno_set = true; builtin_optab = ilogb_optab; break;
2412 CASE_FLT_FN (BUILT_IN_ISINF):
2413 builtin_optab = isinf_optab; break;
2414 case BUILT_IN_ISNORMAL:
2415 case BUILT_IN_ISFINITE:
2416 CASE_FLT_FN (BUILT_IN_FINITE):
2417 case BUILT_IN_FINITED32:
2418 case BUILT_IN_FINITED64:
2419 case BUILT_IN_FINITED128:
2420 case BUILT_IN_ISINFD32:
2421 case BUILT_IN_ISINFD64:
2422 case BUILT_IN_ISINFD128:
2423 /* These builtins have no optabs (yet). */
2424 break;
2425 default:
2426 gcc_unreachable ();
2427 }
2428
2429 /* There's no easy way to detect the case we need to set EDOM. */
2430 if (flag_errno_math && errno_set)
2431 return CODE_FOR_nothing;
2432
2433 /* Optab mode depends on the mode of the input argument. */
2434 mode = TYPE_MODE (TREE_TYPE (arg));
2435
2436 if (builtin_optab)
2437 return optab_handler (builtin_optab, mode);
2438 return CODE_FOR_nothing;
2439 }
2440
2441 /* Expand a call to one of the builtin math functions that operate on
2442 floating point argument and output an integer result (ilogb, isinf,
2443 isnan, etc).
2444 Return 0 if a normal call should be emitted rather than expanding the
2445 function in-line. EXP is the expression that is a call to the builtin
2446 function; if convenient, the result should be placed in TARGET. */
2447
2448 static rtx
2449 expand_builtin_interclass_mathfn (tree exp, rtx target)
2450 {
2451 enum insn_code icode = CODE_FOR_nothing;
2452 rtx op0;
2453 tree fndecl = get_callee_fndecl (exp);
2454 enum machine_mode mode;
2455 tree arg;
2456
2457 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2458 return NULL_RTX;
2459
2460 arg = CALL_EXPR_ARG (exp, 0);
2461 icode = interclass_mathfn_icode (arg, fndecl);
2462 mode = TYPE_MODE (TREE_TYPE (arg));
2463
2464 if (icode != CODE_FOR_nothing)
2465 {
2466 struct expand_operand ops[1];
2467 rtx last = get_last_insn ();
2468 tree orig_arg = arg;
2469
2470 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2471 need to expand the argument again. This way, we will not perform
2472 side-effects more the once. */
2473 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2474
2475 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2476
2477 if (mode != GET_MODE (op0))
2478 op0 = convert_to_mode (mode, op0, 0);
2479
2480 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2481 if (maybe_legitimize_operands (icode, 0, 1, ops)
2482 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2483 return ops[0].value;
2484
2485 delete_insns_since (last);
2486 CALL_EXPR_ARG (exp, 0) = orig_arg;
2487 }
2488
2489 return NULL_RTX;
2490 }
2491
2492 /* Expand a call to the builtin sincos math function.
2493 Return NULL_RTX if a normal call should be emitted rather than expanding the
2494 function in-line. EXP is the expression that is a call to the builtin
2495 function. */
2496
2497 static rtx
2498 expand_builtin_sincos (tree exp)
2499 {
2500 rtx op0, op1, op2, target1, target2;
2501 enum machine_mode mode;
2502 tree arg, sinp, cosp;
2503 int result;
2504 location_t loc = EXPR_LOCATION (exp);
2505 tree alias_type, alias_off;
2506
2507 if (!validate_arglist (exp, REAL_TYPE,
2508 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2509 return NULL_RTX;
2510
2511 arg = CALL_EXPR_ARG (exp, 0);
2512 sinp = CALL_EXPR_ARG (exp, 1);
2513 cosp = CALL_EXPR_ARG (exp, 2);
2514
2515 /* Make a suitable register to place result in. */
2516 mode = TYPE_MODE (TREE_TYPE (arg));
2517
2518 /* Check if sincos insn is available, otherwise emit the call. */
2519 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2520 return NULL_RTX;
2521
2522 target1 = gen_reg_rtx (mode);
2523 target2 = gen_reg_rtx (mode);
2524
2525 op0 = expand_normal (arg);
2526 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2527 alias_off = build_int_cst (alias_type, 0);
2528 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2529 sinp, alias_off));
2530 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2531 cosp, alias_off));
2532
2533 /* Compute into target1 and target2.
2534 Set TARGET to wherever the result comes back. */
2535 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2536 gcc_assert (result);
2537
2538 /* Move target1 and target2 to the memory locations indicated
2539 by op1 and op2. */
2540 emit_move_insn (op1, target1);
2541 emit_move_insn (op2, target2);
2542
2543 return const0_rtx;
2544 }
2545
2546 /* Expand a call to the internal cexpi builtin to the sincos math function.
2547 EXP is the expression that is a call to the builtin function; if convenient,
2548 the result should be placed in TARGET. */
2549
2550 static rtx
2551 expand_builtin_cexpi (tree exp, rtx target)
2552 {
2553 tree fndecl = get_callee_fndecl (exp);
2554 tree arg, type;
2555 enum machine_mode mode;
2556 rtx op0, op1, op2;
2557 location_t loc = EXPR_LOCATION (exp);
2558
2559 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2560 return NULL_RTX;
2561
2562 arg = CALL_EXPR_ARG (exp, 0);
2563 type = TREE_TYPE (arg);
2564 mode = TYPE_MODE (TREE_TYPE (arg));
2565
2566 /* Try expanding via a sincos optab, fall back to emitting a libcall
2567 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2568 is only generated from sincos, cexp or if we have either of them. */
2569 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2570 {
2571 op1 = gen_reg_rtx (mode);
2572 op2 = gen_reg_rtx (mode);
2573
2574 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2575
2576 /* Compute into op1 and op2. */
2577 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2578 }
2579 else if (targetm.libc_has_function (function_sincos))
2580 {
2581 tree call, fn = NULL_TREE;
2582 tree top1, top2;
2583 rtx op1a, op2a;
2584
2585 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2586 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2587 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2588 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2589 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2590 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2591 else
2592 gcc_unreachable ();
2593
2594 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2595 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2596 op1a = copy_addr_to_reg (XEXP (op1, 0));
2597 op2a = copy_addr_to_reg (XEXP (op2, 0));
2598 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2599 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2600
2601 /* Make sure not to fold the sincos call again. */
2602 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2603 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2604 call, 3, arg, top1, top2));
2605 }
2606 else
2607 {
2608 tree call, fn = NULL_TREE, narg;
2609 tree ctype = build_complex_type (type);
2610
2611 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2612 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2613 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2614 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2615 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2616 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2617 else
2618 gcc_unreachable ();
2619
2620 /* If we don't have a decl for cexp create one. This is the
2621 friendliest fallback if the user calls __builtin_cexpi
2622 without full target C99 function support. */
2623 if (fn == NULL_TREE)
2624 {
2625 tree fntype;
2626 const char *name = NULL;
2627
2628 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2629 name = "cexpf";
2630 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2631 name = "cexp";
2632 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2633 name = "cexpl";
2634
2635 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2636 fn = build_fn_decl (name, fntype);
2637 }
2638
2639 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2640 build_real (type, dconst0), arg);
2641
2642 /* Make sure not to fold the cexp call again. */
2643 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2644 return expand_expr (build_call_nary (ctype, call, 1, narg),
2645 target, VOIDmode, EXPAND_NORMAL);
2646 }
2647
2648 /* Now build the proper return type. */
2649 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2650 make_tree (TREE_TYPE (arg), op2),
2651 make_tree (TREE_TYPE (arg), op1)),
2652 target, VOIDmode, EXPAND_NORMAL);
2653 }
2654
2655 /* Conveniently construct a function call expression. FNDECL names the
2656 function to be called, N is the number of arguments, and the "..."
2657 parameters are the argument expressions. Unlike build_call_exr
2658 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2659
2660 static tree
2661 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2662 {
2663 va_list ap;
2664 tree fntype = TREE_TYPE (fndecl);
2665 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2666
2667 va_start (ap, n);
2668 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2669 va_end (ap);
2670 SET_EXPR_LOCATION (fn, loc);
2671 return fn;
2672 }
2673
2674 /* Expand a call to one of the builtin rounding functions gcc defines
2675 as an extension (lfloor and lceil). As these are gcc extensions we
2676 do not need to worry about setting errno to EDOM.
2677 If expanding via optab fails, lower expression to (int)(floor(x)).
2678 EXP is the expression that is a call to the builtin function;
2679 if convenient, the result should be placed in TARGET. */
2680
2681 static rtx
2682 expand_builtin_int_roundingfn (tree exp, rtx target)
2683 {
2684 convert_optab builtin_optab;
2685 rtx op0, insns, tmp;
2686 tree fndecl = get_callee_fndecl (exp);
2687 enum built_in_function fallback_fn;
2688 tree fallback_fndecl;
2689 enum machine_mode mode;
2690 tree arg;
2691
2692 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2693 gcc_unreachable ();
2694
2695 arg = CALL_EXPR_ARG (exp, 0);
2696
2697 switch (DECL_FUNCTION_CODE (fndecl))
2698 {
2699 CASE_FLT_FN (BUILT_IN_ICEIL):
2700 CASE_FLT_FN (BUILT_IN_LCEIL):
2701 CASE_FLT_FN (BUILT_IN_LLCEIL):
2702 builtin_optab = lceil_optab;
2703 fallback_fn = BUILT_IN_CEIL;
2704 break;
2705
2706 CASE_FLT_FN (BUILT_IN_IFLOOR):
2707 CASE_FLT_FN (BUILT_IN_LFLOOR):
2708 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2709 builtin_optab = lfloor_optab;
2710 fallback_fn = BUILT_IN_FLOOR;
2711 break;
2712
2713 default:
2714 gcc_unreachable ();
2715 }
2716
2717 /* Make a suitable register to place result in. */
2718 mode = TYPE_MODE (TREE_TYPE (exp));
2719
2720 target = gen_reg_rtx (mode);
2721
2722 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2723 need to expand the argument again. This way, we will not perform
2724 side-effects more the once. */
2725 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2726
2727 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2728
2729 start_sequence ();
2730
2731 /* Compute into TARGET. */
2732 if (expand_sfix_optab (target, op0, builtin_optab))
2733 {
2734 /* Output the entire sequence. */
2735 insns = get_insns ();
2736 end_sequence ();
2737 emit_insn (insns);
2738 return target;
2739 }
2740
2741 /* If we were unable to expand via the builtin, stop the sequence
2742 (without outputting the insns). */
2743 end_sequence ();
2744
2745 /* Fall back to floating point rounding optab. */
2746 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2747
2748 /* For non-C99 targets we may end up without a fallback fndecl here
2749 if the user called __builtin_lfloor directly. In this case emit
2750 a call to the floor/ceil variants nevertheless. This should result
2751 in the best user experience for not full C99 targets. */
2752 if (fallback_fndecl == NULL_TREE)
2753 {
2754 tree fntype;
2755 const char *name = NULL;
2756
2757 switch (DECL_FUNCTION_CODE (fndecl))
2758 {
2759 case BUILT_IN_ICEIL:
2760 case BUILT_IN_LCEIL:
2761 case BUILT_IN_LLCEIL:
2762 name = "ceil";
2763 break;
2764 case BUILT_IN_ICEILF:
2765 case BUILT_IN_LCEILF:
2766 case BUILT_IN_LLCEILF:
2767 name = "ceilf";
2768 break;
2769 case BUILT_IN_ICEILL:
2770 case BUILT_IN_LCEILL:
2771 case BUILT_IN_LLCEILL:
2772 name = "ceill";
2773 break;
2774 case BUILT_IN_IFLOOR:
2775 case BUILT_IN_LFLOOR:
2776 case BUILT_IN_LLFLOOR:
2777 name = "floor";
2778 break;
2779 case BUILT_IN_IFLOORF:
2780 case BUILT_IN_LFLOORF:
2781 case BUILT_IN_LLFLOORF:
2782 name = "floorf";
2783 break;
2784 case BUILT_IN_IFLOORL:
2785 case BUILT_IN_LFLOORL:
2786 case BUILT_IN_LLFLOORL:
2787 name = "floorl";
2788 break;
2789 default:
2790 gcc_unreachable ();
2791 }
2792
2793 fntype = build_function_type_list (TREE_TYPE (arg),
2794 TREE_TYPE (arg), NULL_TREE);
2795 fallback_fndecl = build_fn_decl (name, fntype);
2796 }
2797
2798 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2799
2800 tmp = expand_normal (exp);
2801 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2802
2803 /* Truncate the result of floating point optab to integer
2804 via expand_fix (). */
2805 target = gen_reg_rtx (mode);
2806 expand_fix (target, tmp, 0);
2807
2808 return target;
2809 }
2810
2811 /* Expand a call to one of the builtin math functions doing integer
2812 conversion (lrint).
2813 Return 0 if a normal call should be emitted rather than expanding the
2814 function in-line. EXP is the expression that is a call to the builtin
2815 function; if convenient, the result should be placed in TARGET. */
2816
2817 static rtx
2818 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2819 {
2820 convert_optab builtin_optab;
2821 rtx op0, insns;
2822 tree fndecl = get_callee_fndecl (exp);
2823 tree arg;
2824 enum machine_mode mode;
2825 enum built_in_function fallback_fn = BUILT_IN_NONE;
2826
2827 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2828 gcc_unreachable ();
2829
2830 arg = CALL_EXPR_ARG (exp, 0);
2831
2832 switch (DECL_FUNCTION_CODE (fndecl))
2833 {
2834 CASE_FLT_FN (BUILT_IN_IRINT):
2835 fallback_fn = BUILT_IN_LRINT;
2836 /* FALLTHRU */
2837 CASE_FLT_FN (BUILT_IN_LRINT):
2838 CASE_FLT_FN (BUILT_IN_LLRINT):
2839 builtin_optab = lrint_optab;
2840 break;
2841
2842 CASE_FLT_FN (BUILT_IN_IROUND):
2843 fallback_fn = BUILT_IN_LROUND;
2844 /* FALLTHRU */
2845 CASE_FLT_FN (BUILT_IN_LROUND):
2846 CASE_FLT_FN (BUILT_IN_LLROUND):
2847 builtin_optab = lround_optab;
2848 break;
2849
2850 default:
2851 gcc_unreachable ();
2852 }
2853
2854 /* There's no easy way to detect the case we need to set EDOM. */
2855 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2856 return NULL_RTX;
2857
2858 /* Make a suitable register to place result in. */
2859 mode = TYPE_MODE (TREE_TYPE (exp));
2860
2861 /* There's no easy way to detect the case we need to set EDOM. */
2862 if (!flag_errno_math)
2863 {
2864 rtx result = gen_reg_rtx (mode);
2865
2866 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2867 need to expand the argument again. This way, we will not perform
2868 side-effects more the once. */
2869 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2870
2871 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2872
2873 start_sequence ();
2874
2875 if (expand_sfix_optab (result, op0, builtin_optab))
2876 {
2877 /* Output the entire sequence. */
2878 insns = get_insns ();
2879 end_sequence ();
2880 emit_insn (insns);
2881 return result;
2882 }
2883
2884 /* If we were unable to expand via the builtin, stop the sequence
2885 (without outputting the insns) and call to the library function
2886 with the stabilized argument list. */
2887 end_sequence ();
2888 }
2889
2890 if (fallback_fn != BUILT_IN_NONE)
2891 {
2892 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2893 targets, (int) round (x) should never be transformed into
2894 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2895 a call to lround in the hope that the target provides at least some
2896 C99 functions. This should result in the best user experience for
2897 not full C99 targets. */
2898 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2899 fallback_fn, 0);
2900
2901 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2902 fallback_fndecl, 1, arg);
2903
2904 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2905 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2906 return convert_to_mode (mode, target, 0);
2907 }
2908
2909 return expand_call (exp, target, target == const0_rtx);
2910 }
2911
2912 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2913 a normal call should be emitted rather than expanding the function
2914 in-line. EXP is the expression that is a call to the builtin
2915 function; if convenient, the result should be placed in TARGET. */
2916
2917 static rtx
2918 expand_builtin_powi (tree exp, rtx target)
2919 {
2920 tree arg0, arg1;
2921 rtx op0, op1;
2922 enum machine_mode mode;
2923 enum machine_mode mode2;
2924
2925 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2926 return NULL_RTX;
2927
2928 arg0 = CALL_EXPR_ARG (exp, 0);
2929 arg1 = CALL_EXPR_ARG (exp, 1);
2930 mode = TYPE_MODE (TREE_TYPE (exp));
2931
2932 /* Emit a libcall to libgcc. */
2933
2934 /* Mode of the 2nd argument must match that of an int. */
2935 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2936
2937 if (target == NULL_RTX)
2938 target = gen_reg_rtx (mode);
2939
2940 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2941 if (GET_MODE (op0) != mode)
2942 op0 = convert_to_mode (mode, op0, 0);
2943 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2944 if (GET_MODE (op1) != mode2)
2945 op1 = convert_to_mode (mode2, op1, 0);
2946
2947 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2948 target, LCT_CONST, mode, 2,
2949 op0, mode, op1, mode2);
2950
2951 return target;
2952 }
2953
2954 /* Expand expression EXP which is a call to the strlen builtin. Return
2955 NULL_RTX if we failed the caller should emit a normal call, otherwise
2956 try to get the result in TARGET, if convenient. */
2957
2958 static rtx
2959 expand_builtin_strlen (tree exp, rtx target,
2960 enum machine_mode target_mode)
2961 {
2962 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2963 return NULL_RTX;
2964 else
2965 {
2966 struct expand_operand ops[4];
2967 rtx pat;
2968 tree len;
2969 tree src = CALL_EXPR_ARG (exp, 0);
2970 rtx src_reg, before_strlen;
2971 enum machine_mode insn_mode = target_mode;
2972 enum insn_code icode = CODE_FOR_nothing;
2973 unsigned int align;
2974
2975 /* If the length can be computed at compile-time, return it. */
2976 len = c_strlen (src, 0);
2977 if (len)
2978 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2979
2980 /* If the length can be computed at compile-time and is constant
2981 integer, but there are side-effects in src, evaluate
2982 src for side-effects, then return len.
2983 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2984 can be optimized into: i++; x = 3; */
2985 len = c_strlen (src, 1);
2986 if (len && TREE_CODE (len) == INTEGER_CST)
2987 {
2988 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2989 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2990 }
2991
2992 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2993
2994 /* If SRC is not a pointer type, don't do this operation inline. */
2995 if (align == 0)
2996 return NULL_RTX;
2997
2998 /* Bail out if we can't compute strlen in the right mode. */
2999 while (insn_mode != VOIDmode)
3000 {
3001 icode = optab_handler (strlen_optab, insn_mode);
3002 if (icode != CODE_FOR_nothing)
3003 break;
3004
3005 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3006 }
3007 if (insn_mode == VOIDmode)
3008 return NULL_RTX;
3009
3010 /* Make a place to hold the source address. We will not expand
3011 the actual source until we are sure that the expansion will
3012 not fail -- there are trees that cannot be expanded twice. */
3013 src_reg = gen_reg_rtx (Pmode);
3014
3015 /* Mark the beginning of the strlen sequence so we can emit the
3016 source operand later. */
3017 before_strlen = get_last_insn ();
3018
3019 create_output_operand (&ops[0], target, insn_mode);
3020 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3021 create_integer_operand (&ops[2], 0);
3022 create_integer_operand (&ops[3], align);
3023 if (!maybe_expand_insn (icode, 4, ops))
3024 return NULL_RTX;
3025
3026 /* Now that we are assured of success, expand the source. */
3027 start_sequence ();
3028 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3029 if (pat != src_reg)
3030 {
3031 #ifdef POINTERS_EXTEND_UNSIGNED
3032 if (GET_MODE (pat) != Pmode)
3033 pat = convert_to_mode (Pmode, pat,
3034 POINTERS_EXTEND_UNSIGNED);
3035 #endif
3036 emit_move_insn (src_reg, pat);
3037 }
3038 pat = get_insns ();
3039 end_sequence ();
3040
3041 if (before_strlen)
3042 emit_insn_after (pat, before_strlen);
3043 else
3044 emit_insn_before (pat, get_insns ());
3045
3046 /* Return the value in the proper mode for this function. */
3047 if (GET_MODE (ops[0].value) == target_mode)
3048 target = ops[0].value;
3049 else if (target != 0)
3050 convert_move (target, ops[0].value, 0);
3051 else
3052 target = convert_to_mode (target_mode, ops[0].value, 0);
3053
3054 return target;
3055 }
3056 }
3057
3058 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3059 bytes from constant string DATA + OFFSET and return it as target
3060 constant. */
3061
3062 static rtx
3063 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3064 enum machine_mode mode)
3065 {
3066 const char *str = (const char *) data;
3067
3068 gcc_assert (offset >= 0
3069 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3070 <= strlen (str) + 1));
3071
3072 return c_readstr (str + offset, mode);
3073 }
3074
3075 /* Expand a call EXP to the memcpy builtin.
3076 Return NULL_RTX if we failed, the caller should emit a normal call,
3077 otherwise try to get the result in TARGET, if convenient (and in
3078 mode MODE if that's convenient). */
3079
3080 static rtx
3081 expand_builtin_memcpy (tree exp, rtx target)
3082 {
3083 if (!validate_arglist (exp,
3084 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3085 return NULL_RTX;
3086 else
3087 {
3088 tree dest = CALL_EXPR_ARG (exp, 0);
3089 tree src = CALL_EXPR_ARG (exp, 1);
3090 tree len = CALL_EXPR_ARG (exp, 2);
3091 const char *src_str;
3092 unsigned int src_align = get_pointer_alignment (src);
3093 unsigned int dest_align = get_pointer_alignment (dest);
3094 rtx dest_mem, src_mem, dest_addr, len_rtx;
3095 HOST_WIDE_INT expected_size = -1;
3096 unsigned int expected_align = 0;
3097
3098 /* If DEST is not a pointer type, call the normal function. */
3099 if (dest_align == 0)
3100 return NULL_RTX;
3101
3102 /* If either SRC is not a pointer type, don't do this
3103 operation in-line. */
3104 if (src_align == 0)
3105 return NULL_RTX;
3106
3107 if (currently_expanding_gimple_stmt)
3108 stringop_block_profile (currently_expanding_gimple_stmt,
3109 &expected_align, &expected_size);
3110
3111 if (expected_align < dest_align)
3112 expected_align = dest_align;
3113 dest_mem = get_memory_rtx (dest, len);
3114 set_mem_align (dest_mem, dest_align);
3115 len_rtx = expand_normal (len);
3116 src_str = c_getstr (src);
3117
3118 /* If SRC is a string constant and block move would be done
3119 by pieces, we can avoid loading the string from memory
3120 and only stored the computed constants. */
3121 if (src_str
3122 && CONST_INT_P (len_rtx)
3123 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3124 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3125 CONST_CAST (char *, src_str),
3126 dest_align, false))
3127 {
3128 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3129 builtin_memcpy_read_str,
3130 CONST_CAST (char *, src_str),
3131 dest_align, false, 0);
3132 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3133 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3134 return dest_mem;
3135 }
3136
3137 src_mem = get_memory_rtx (src, len);
3138 set_mem_align (src_mem, src_align);
3139
3140 /* Copy word part most expediently. */
3141 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3142 CALL_EXPR_TAILCALL (exp)
3143 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3144 expected_align, expected_size);
3145
3146 if (dest_addr == 0)
3147 {
3148 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3149 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3150 }
3151 return dest_addr;
3152 }
3153 }
3154
3155 /* Expand a call EXP to the mempcpy builtin.
3156 Return NULL_RTX if we failed; the caller should emit a normal call,
3157 otherwise try to get the result in TARGET, if convenient (and in
3158 mode MODE if that's convenient). If ENDP is 0 return the
3159 destination pointer, if ENDP is 1 return the end pointer ala
3160 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3161 stpcpy. */
3162
3163 static rtx
3164 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3165 {
3166 if (!validate_arglist (exp,
3167 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3168 return NULL_RTX;
3169 else
3170 {
3171 tree dest = CALL_EXPR_ARG (exp, 0);
3172 tree src = CALL_EXPR_ARG (exp, 1);
3173 tree len = CALL_EXPR_ARG (exp, 2);
3174 return expand_builtin_mempcpy_args (dest, src, len,
3175 target, mode, /*endp=*/ 1);
3176 }
3177 }
3178
3179 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3180 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3181 so that this can also be called without constructing an actual CALL_EXPR.
3182 The other arguments and return value are the same as for
3183 expand_builtin_mempcpy. */
3184
3185 static rtx
3186 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3187 rtx target, enum machine_mode mode, int endp)
3188 {
3189 /* If return value is ignored, transform mempcpy into memcpy. */
3190 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3191 {
3192 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3193 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3194 dest, src, len);
3195 return expand_expr (result, target, mode, EXPAND_NORMAL);
3196 }
3197 else
3198 {
3199 const char *src_str;
3200 unsigned int src_align = get_pointer_alignment (src);
3201 unsigned int dest_align = get_pointer_alignment (dest);
3202 rtx dest_mem, src_mem, len_rtx;
3203
3204 /* If either SRC or DEST is not a pointer type, don't do this
3205 operation in-line. */
3206 if (dest_align == 0 || src_align == 0)
3207 return NULL_RTX;
3208
3209 /* If LEN is not constant, call the normal function. */
3210 if (! host_integerp (len, 1))
3211 return NULL_RTX;
3212
3213 len_rtx = expand_normal (len);
3214 src_str = c_getstr (src);
3215
3216 /* If SRC is a string constant and block move would be done
3217 by pieces, we can avoid loading the string from memory
3218 and only stored the computed constants. */
3219 if (src_str
3220 && CONST_INT_P (len_rtx)
3221 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3222 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3223 CONST_CAST (char *, src_str),
3224 dest_align, false))
3225 {
3226 dest_mem = get_memory_rtx (dest, len);
3227 set_mem_align (dest_mem, dest_align);
3228 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3229 builtin_memcpy_read_str,
3230 CONST_CAST (char *, src_str),
3231 dest_align, false, endp);
3232 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3233 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3234 return dest_mem;
3235 }
3236
3237 if (CONST_INT_P (len_rtx)
3238 && can_move_by_pieces (INTVAL (len_rtx),
3239 MIN (dest_align, src_align)))
3240 {
3241 dest_mem = get_memory_rtx (dest, len);
3242 set_mem_align (dest_mem, dest_align);
3243 src_mem = get_memory_rtx (src, len);
3244 set_mem_align (src_mem, src_align);
3245 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3246 MIN (dest_align, src_align), endp);
3247 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3248 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3249 return dest_mem;
3250 }
3251
3252 return NULL_RTX;
3253 }
3254 }
3255
3256 #ifndef HAVE_movstr
3257 # define HAVE_movstr 0
3258 # define CODE_FOR_movstr CODE_FOR_nothing
3259 #endif
3260
3261 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3262 we failed, the caller should emit a normal call, otherwise try to
3263 get the result in TARGET, if convenient. If ENDP is 0 return the
3264 destination pointer, if ENDP is 1 return the end pointer ala
3265 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3266 stpcpy. */
3267
3268 static rtx
3269 expand_movstr (tree dest, tree src, rtx target, int endp)
3270 {
3271 struct expand_operand ops[3];
3272 rtx dest_mem;
3273 rtx src_mem;
3274
3275 if (!HAVE_movstr)
3276 return NULL_RTX;
3277
3278 dest_mem = get_memory_rtx (dest, NULL);
3279 src_mem = get_memory_rtx (src, NULL);
3280 if (!endp)
3281 {
3282 target = force_reg (Pmode, XEXP (dest_mem, 0));
3283 dest_mem = replace_equiv_address (dest_mem, target);
3284 }
3285
3286 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3287 create_fixed_operand (&ops[1], dest_mem);
3288 create_fixed_operand (&ops[2], src_mem);
3289 expand_insn (CODE_FOR_movstr, 3, ops);
3290
3291 if (endp && target != const0_rtx)
3292 {
3293 target = ops[0].value;
3294 /* movstr is supposed to set end to the address of the NUL
3295 terminator. If the caller requested a mempcpy-like return value,
3296 adjust it. */
3297 if (endp == 1)
3298 {
3299 rtx tem = plus_constant (GET_MODE (target),
3300 gen_lowpart (GET_MODE (target), target), 1);
3301 emit_move_insn (target, force_operand (tem, NULL_RTX));
3302 }
3303 }
3304 return target;
3305 }
3306
3307 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3308 NULL_RTX if we failed the caller should emit a normal call, otherwise
3309 try to get the result in TARGET, if convenient (and in mode MODE if that's
3310 convenient). */
3311
3312 static rtx
3313 expand_builtin_strcpy (tree exp, rtx target)
3314 {
3315 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3316 {
3317 tree dest = CALL_EXPR_ARG (exp, 0);
3318 tree src = CALL_EXPR_ARG (exp, 1);
3319 return expand_builtin_strcpy_args (dest, src, target);
3320 }
3321 return NULL_RTX;
3322 }
3323
3324 /* Helper function to do the actual work for expand_builtin_strcpy. The
3325 arguments to the builtin_strcpy call DEST and SRC are broken out
3326 so that this can also be called without constructing an actual CALL_EXPR.
3327 The other arguments and return value are the same as for
3328 expand_builtin_strcpy. */
3329
3330 static rtx
3331 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3332 {
3333 return expand_movstr (dest, src, target, /*endp=*/0);
3334 }
3335
3336 /* Expand a call EXP to the stpcpy builtin.
3337 Return NULL_RTX if we failed the caller should emit a normal call,
3338 otherwise try to get the result in TARGET, if convenient (and in
3339 mode MODE if that's convenient). */
3340
3341 static rtx
3342 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3343 {
3344 tree dst, src;
3345 location_t loc = EXPR_LOCATION (exp);
3346
3347 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3348 return NULL_RTX;
3349
3350 dst = CALL_EXPR_ARG (exp, 0);
3351 src = CALL_EXPR_ARG (exp, 1);
3352
3353 /* If return value is ignored, transform stpcpy into strcpy. */
3354 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3355 {
3356 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3357 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3358 return expand_expr (result, target, mode, EXPAND_NORMAL);
3359 }
3360 else
3361 {
3362 tree len, lenp1;
3363 rtx ret;
3364
3365 /* Ensure we get an actual string whose length can be evaluated at
3366 compile-time, not an expression containing a string. This is
3367 because the latter will potentially produce pessimized code
3368 when used to produce the return value. */
3369 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3370 return expand_movstr (dst, src, target, /*endp=*/2);
3371
3372 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3373 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3374 target, mode, /*endp=*/2);
3375
3376 if (ret)
3377 return ret;
3378
3379 if (TREE_CODE (len) == INTEGER_CST)
3380 {
3381 rtx len_rtx = expand_normal (len);
3382
3383 if (CONST_INT_P (len_rtx))
3384 {
3385 ret = expand_builtin_strcpy_args (dst, src, target);
3386
3387 if (ret)
3388 {
3389 if (! target)
3390 {
3391 if (mode != VOIDmode)
3392 target = gen_reg_rtx (mode);
3393 else
3394 target = gen_reg_rtx (GET_MODE (ret));
3395 }
3396 if (GET_MODE (target) != GET_MODE (ret))
3397 ret = gen_lowpart (GET_MODE (target), ret);
3398
3399 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3400 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3401 gcc_assert (ret);
3402
3403 return target;
3404 }
3405 }
3406 }
3407
3408 return expand_movstr (dst, src, target, /*endp=*/2);
3409 }
3410 }
3411
3412 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3413 bytes from constant string DATA + OFFSET and return it as target
3414 constant. */
3415
3416 rtx
3417 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3418 enum machine_mode mode)
3419 {
3420 const char *str = (const char *) data;
3421
3422 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3423 return const0_rtx;
3424
3425 return c_readstr (str + offset, mode);
3426 }
3427
3428 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3429 NULL_RTX if we failed the caller should emit a normal call. */
3430
3431 static rtx
3432 expand_builtin_strncpy (tree exp, rtx target)
3433 {
3434 location_t loc = EXPR_LOCATION (exp);
3435
3436 if (validate_arglist (exp,
3437 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3438 {
3439 tree dest = CALL_EXPR_ARG (exp, 0);
3440 tree src = CALL_EXPR_ARG (exp, 1);
3441 tree len = CALL_EXPR_ARG (exp, 2);
3442 tree slen = c_strlen (src, 1);
3443
3444 /* We must be passed a constant len and src parameter. */
3445 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3446 return NULL_RTX;
3447
3448 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3449
3450 /* We're required to pad with trailing zeros if the requested
3451 len is greater than strlen(s2)+1. In that case try to
3452 use store_by_pieces, if it fails, punt. */
3453 if (tree_int_cst_lt (slen, len))
3454 {
3455 unsigned int dest_align = get_pointer_alignment (dest);
3456 const char *p = c_getstr (src);
3457 rtx dest_mem;
3458
3459 if (!p || dest_align == 0 || !host_integerp (len, 1)
3460 || !can_store_by_pieces (tree_low_cst (len, 1),
3461 builtin_strncpy_read_str,
3462 CONST_CAST (char *, p),
3463 dest_align, false))
3464 return NULL_RTX;
3465
3466 dest_mem = get_memory_rtx (dest, len);
3467 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3468 builtin_strncpy_read_str,
3469 CONST_CAST (char *, p), dest_align, false, 0);
3470 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3471 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3472 return dest_mem;
3473 }
3474 }
3475 return NULL_RTX;
3476 }
3477
3478 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3479 bytes from constant string DATA + OFFSET and return it as target
3480 constant. */
3481
3482 rtx
3483 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3484 enum machine_mode mode)
3485 {
3486 const char *c = (const char *) data;
3487 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3488
3489 memset (p, *c, GET_MODE_SIZE (mode));
3490
3491 return c_readstr (p, mode);
3492 }
3493
3494 /* Callback routine for store_by_pieces. Return the RTL of a register
3495 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3496 char value given in the RTL register data. For example, if mode is
3497 4 bytes wide, return the RTL for 0x01010101*data. */
3498
3499 static rtx
3500 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3501 enum machine_mode mode)
3502 {
3503 rtx target, coeff;
3504 size_t size;
3505 char *p;
3506
3507 size = GET_MODE_SIZE (mode);
3508 if (size == 1)
3509 return (rtx) data;
3510
3511 p = XALLOCAVEC (char, size);
3512 memset (p, 1, size);
3513 coeff = c_readstr (p, mode);
3514
3515 target = convert_to_mode (mode, (rtx) data, 1);
3516 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3517 return force_reg (mode, target);
3518 }
3519
3520 /* Expand expression EXP, which is a call to the memset builtin. Return
3521 NULL_RTX if we failed the caller should emit a normal call, otherwise
3522 try to get the result in TARGET, if convenient (and in mode MODE if that's
3523 convenient). */
3524
3525 static rtx
3526 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3527 {
3528 if (!validate_arglist (exp,
3529 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3530 return NULL_RTX;
3531 else
3532 {
3533 tree dest = CALL_EXPR_ARG (exp, 0);
3534 tree val = CALL_EXPR_ARG (exp, 1);
3535 tree len = CALL_EXPR_ARG (exp, 2);
3536 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3537 }
3538 }
3539
3540 /* Helper function to do the actual work for expand_builtin_memset. The
3541 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3542 so that this can also be called without constructing an actual CALL_EXPR.
3543 The other arguments and return value are the same as for
3544 expand_builtin_memset. */
3545
3546 static rtx
3547 expand_builtin_memset_args (tree dest, tree val, tree len,
3548 rtx target, enum machine_mode mode, tree orig_exp)
3549 {
3550 tree fndecl, fn;
3551 enum built_in_function fcode;
3552 enum machine_mode val_mode;
3553 char c;
3554 unsigned int dest_align;
3555 rtx dest_mem, dest_addr, len_rtx;
3556 HOST_WIDE_INT expected_size = -1;
3557 unsigned int expected_align = 0;
3558
3559 dest_align = get_pointer_alignment (dest);
3560
3561 /* If DEST is not a pointer type, don't do this operation in-line. */
3562 if (dest_align == 0)
3563 return NULL_RTX;
3564
3565 if (currently_expanding_gimple_stmt)
3566 stringop_block_profile (currently_expanding_gimple_stmt,
3567 &expected_align, &expected_size);
3568
3569 if (expected_align < dest_align)
3570 expected_align = dest_align;
3571
3572 /* If the LEN parameter is zero, return DEST. */
3573 if (integer_zerop (len))
3574 {
3575 /* Evaluate and ignore VAL in case it has side-effects. */
3576 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3577 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3578 }
3579
3580 /* Stabilize the arguments in case we fail. */
3581 dest = builtin_save_expr (dest);
3582 val = builtin_save_expr (val);
3583 len = builtin_save_expr (len);
3584
3585 len_rtx = expand_normal (len);
3586 dest_mem = get_memory_rtx (dest, len);
3587 val_mode = TYPE_MODE (unsigned_char_type_node);
3588
3589 if (TREE_CODE (val) != INTEGER_CST)
3590 {
3591 rtx val_rtx;
3592
3593 val_rtx = expand_normal (val);
3594 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3595
3596 /* Assume that we can memset by pieces if we can store
3597 * the coefficients by pieces (in the required modes).
3598 * We can't pass builtin_memset_gen_str as that emits RTL. */
3599 c = 1;
3600 if (host_integerp (len, 1)
3601 && can_store_by_pieces (tree_low_cst (len, 1),
3602 builtin_memset_read_str, &c, dest_align,
3603 true))
3604 {
3605 val_rtx = force_reg (val_mode, val_rtx);
3606 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3607 builtin_memset_gen_str, val_rtx, dest_align,
3608 true, 0);
3609 }
3610 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3611 dest_align, expected_align,
3612 expected_size))
3613 goto do_libcall;
3614
3615 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3616 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3617 return dest_mem;
3618 }
3619
3620 if (target_char_cast (val, &c))
3621 goto do_libcall;
3622
3623 if (c)
3624 {
3625 if (host_integerp (len, 1)
3626 && can_store_by_pieces (tree_low_cst (len, 1),
3627 builtin_memset_read_str, &c, dest_align,
3628 true))
3629 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3630 builtin_memset_read_str, &c, dest_align, true, 0);
3631 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3632 gen_int_mode (c, val_mode),
3633 dest_align, expected_align,
3634 expected_size))
3635 goto do_libcall;
3636
3637 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3638 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3639 return dest_mem;
3640 }
3641
3642 set_mem_align (dest_mem, dest_align);
3643 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3644 CALL_EXPR_TAILCALL (orig_exp)
3645 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3646 expected_align, expected_size);
3647
3648 if (dest_addr == 0)
3649 {
3650 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3651 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3652 }
3653
3654 return dest_addr;
3655
3656 do_libcall:
3657 fndecl = get_callee_fndecl (orig_exp);
3658 fcode = DECL_FUNCTION_CODE (fndecl);
3659 if (fcode == BUILT_IN_MEMSET)
3660 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3661 dest, val, len);
3662 else if (fcode == BUILT_IN_BZERO)
3663 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3664 dest, len);
3665 else
3666 gcc_unreachable ();
3667 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3668 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3669 return expand_call (fn, target, target == const0_rtx);
3670 }
3671
3672 /* Expand expression EXP, which is a call to the bzero builtin. Return
3673 NULL_RTX if we failed the caller should emit a normal call. */
3674
3675 static rtx
3676 expand_builtin_bzero (tree exp)
3677 {
3678 tree dest, size;
3679 location_t loc = EXPR_LOCATION (exp);
3680
3681 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3682 return NULL_RTX;
3683
3684 dest = CALL_EXPR_ARG (exp, 0);
3685 size = CALL_EXPR_ARG (exp, 1);
3686
3687 /* New argument list transforming bzero(ptr x, int y) to
3688 memset(ptr x, int 0, size_t y). This is done this way
3689 so that if it isn't expanded inline, we fallback to
3690 calling bzero instead of memset. */
3691
3692 return expand_builtin_memset_args (dest, integer_zero_node,
3693 fold_convert_loc (loc,
3694 size_type_node, size),
3695 const0_rtx, VOIDmode, exp);
3696 }
3697
3698 /* Expand expression EXP, which is a call to the memcmp built-in function.
3699 Return NULL_RTX if we failed and the caller should emit a normal call,
3700 otherwise try to get the result in TARGET, if convenient (and in mode
3701 MODE, if that's convenient). */
3702
3703 static rtx
3704 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3705 ATTRIBUTE_UNUSED enum machine_mode mode)
3706 {
3707 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3708
3709 if (!validate_arglist (exp,
3710 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3711 return NULL_RTX;
3712
3713 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3714 implementing memcmp because it will stop if it encounters two
3715 zero bytes. */
3716 #if defined HAVE_cmpmemsi
3717 {
3718 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3719 rtx result;
3720 rtx insn;
3721 tree arg1 = CALL_EXPR_ARG (exp, 0);
3722 tree arg2 = CALL_EXPR_ARG (exp, 1);
3723 tree len = CALL_EXPR_ARG (exp, 2);
3724
3725 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3726 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3727 enum machine_mode insn_mode;
3728
3729 if (HAVE_cmpmemsi)
3730 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3731 else
3732 return NULL_RTX;
3733
3734 /* If we don't have POINTER_TYPE, call the function. */
3735 if (arg1_align == 0 || arg2_align == 0)
3736 return NULL_RTX;
3737
3738 /* Make a place to write the result of the instruction. */
3739 result = target;
3740 if (! (result != 0
3741 && REG_P (result) && GET_MODE (result) == insn_mode
3742 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3743 result = gen_reg_rtx (insn_mode);
3744
3745 arg1_rtx = get_memory_rtx (arg1, len);
3746 arg2_rtx = get_memory_rtx (arg2, len);
3747 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3748
3749 /* Set MEM_SIZE as appropriate. */
3750 if (CONST_INT_P (arg3_rtx))
3751 {
3752 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3753 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3754 }
3755
3756 if (HAVE_cmpmemsi)
3757 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3758 GEN_INT (MIN (arg1_align, arg2_align)));
3759 else
3760 gcc_unreachable ();
3761
3762 if (insn)
3763 emit_insn (insn);
3764 else
3765 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3766 TYPE_MODE (integer_type_node), 3,
3767 XEXP (arg1_rtx, 0), Pmode,
3768 XEXP (arg2_rtx, 0), Pmode,
3769 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3770 TYPE_UNSIGNED (sizetype)),
3771 TYPE_MODE (sizetype));
3772
3773 /* Return the value in the proper mode for this function. */
3774 mode = TYPE_MODE (TREE_TYPE (exp));
3775 if (GET_MODE (result) == mode)
3776 return result;
3777 else if (target != 0)
3778 {
3779 convert_move (target, result, 0);
3780 return target;
3781 }
3782 else
3783 return convert_to_mode (mode, result, 0);
3784 }
3785 #endif /* HAVE_cmpmemsi. */
3786
3787 return NULL_RTX;
3788 }
3789
3790 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3791 if we failed the caller should emit a normal call, otherwise try to get
3792 the result in TARGET, if convenient. */
3793
3794 static rtx
3795 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3796 {
3797 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3798 return NULL_RTX;
3799
3800 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3801 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3802 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3803 {
3804 rtx arg1_rtx, arg2_rtx;
3805 rtx result, insn = NULL_RTX;
3806 tree fndecl, fn;
3807 tree arg1 = CALL_EXPR_ARG (exp, 0);
3808 tree arg2 = CALL_EXPR_ARG (exp, 1);
3809
3810 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3811 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3812
3813 /* If we don't have POINTER_TYPE, call the function. */
3814 if (arg1_align == 0 || arg2_align == 0)
3815 return NULL_RTX;
3816
3817 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3818 arg1 = builtin_save_expr (arg1);
3819 arg2 = builtin_save_expr (arg2);
3820
3821 arg1_rtx = get_memory_rtx (arg1, NULL);
3822 arg2_rtx = get_memory_rtx (arg2, NULL);
3823
3824 #ifdef HAVE_cmpstrsi
3825 /* Try to call cmpstrsi. */
3826 if (HAVE_cmpstrsi)
3827 {
3828 enum machine_mode insn_mode
3829 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3830
3831 /* Make a place to write the result of the instruction. */
3832 result = target;
3833 if (! (result != 0
3834 && REG_P (result) && GET_MODE (result) == insn_mode
3835 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3836 result = gen_reg_rtx (insn_mode);
3837
3838 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3839 GEN_INT (MIN (arg1_align, arg2_align)));
3840 }
3841 #endif
3842 #ifdef HAVE_cmpstrnsi
3843 /* Try to determine at least one length and call cmpstrnsi. */
3844 if (!insn && HAVE_cmpstrnsi)
3845 {
3846 tree len;
3847 rtx arg3_rtx;
3848
3849 enum machine_mode insn_mode
3850 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3851 tree len1 = c_strlen (arg1, 1);
3852 tree len2 = c_strlen (arg2, 1);
3853
3854 if (len1)
3855 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3856 if (len2)
3857 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3858
3859 /* If we don't have a constant length for the first, use the length
3860 of the second, if we know it. We don't require a constant for
3861 this case; some cost analysis could be done if both are available
3862 but neither is constant. For now, assume they're equally cheap,
3863 unless one has side effects. If both strings have constant lengths,
3864 use the smaller. */
3865
3866 if (!len1)
3867 len = len2;
3868 else if (!len2)
3869 len = len1;
3870 else if (TREE_SIDE_EFFECTS (len1))
3871 len = len2;
3872 else if (TREE_SIDE_EFFECTS (len2))
3873 len = len1;
3874 else if (TREE_CODE (len1) != INTEGER_CST)
3875 len = len2;
3876 else if (TREE_CODE (len2) != INTEGER_CST)
3877 len = len1;
3878 else if (tree_int_cst_lt (len1, len2))
3879 len = len1;
3880 else
3881 len = len2;
3882
3883 /* If both arguments have side effects, we cannot optimize. */
3884 if (!len || TREE_SIDE_EFFECTS (len))
3885 goto do_libcall;
3886
3887 arg3_rtx = expand_normal (len);
3888
3889 /* Make a place to write the result of the instruction. */
3890 result = target;
3891 if (! (result != 0
3892 && REG_P (result) && GET_MODE (result) == insn_mode
3893 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3894 result = gen_reg_rtx (insn_mode);
3895
3896 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3897 GEN_INT (MIN (arg1_align, arg2_align)));
3898 }
3899 #endif
3900
3901 if (insn)
3902 {
3903 enum machine_mode mode;
3904 emit_insn (insn);
3905
3906 /* Return the value in the proper mode for this function. */
3907 mode = TYPE_MODE (TREE_TYPE (exp));
3908 if (GET_MODE (result) == mode)
3909 return result;
3910 if (target == 0)
3911 return convert_to_mode (mode, result, 0);
3912 convert_move (target, result, 0);
3913 return target;
3914 }
3915
3916 /* Expand the library call ourselves using a stabilized argument
3917 list to avoid re-evaluating the function's arguments twice. */
3918 #ifdef HAVE_cmpstrnsi
3919 do_libcall:
3920 #endif
3921 fndecl = get_callee_fndecl (exp);
3922 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3923 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3924 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3925 return expand_call (fn, target, target == const0_rtx);
3926 }
3927 #endif
3928 return NULL_RTX;
3929 }
3930
3931 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3932 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3933 the result in TARGET, if convenient. */
3934
3935 static rtx
3936 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3937 ATTRIBUTE_UNUSED enum machine_mode mode)
3938 {
3939 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3940
3941 if (!validate_arglist (exp,
3942 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3943 return NULL_RTX;
3944
3945 /* If c_strlen can determine an expression for one of the string
3946 lengths, and it doesn't have side effects, then emit cmpstrnsi
3947 using length MIN(strlen(string)+1, arg3). */
3948 #ifdef HAVE_cmpstrnsi
3949 if (HAVE_cmpstrnsi)
3950 {
3951 tree len, len1, len2;
3952 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3953 rtx result, insn;
3954 tree fndecl, fn;
3955 tree arg1 = CALL_EXPR_ARG (exp, 0);
3956 tree arg2 = CALL_EXPR_ARG (exp, 1);
3957 tree arg3 = CALL_EXPR_ARG (exp, 2);
3958
3959 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3960 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3961 enum machine_mode insn_mode
3962 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3963
3964 len1 = c_strlen (arg1, 1);
3965 len2 = c_strlen (arg2, 1);
3966
3967 if (len1)
3968 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3969 if (len2)
3970 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3971
3972 /* If we don't have a constant length for the first, use the length
3973 of the second, if we know it. We don't require a constant for
3974 this case; some cost analysis could be done if both are available
3975 but neither is constant. For now, assume they're equally cheap,
3976 unless one has side effects. If both strings have constant lengths,
3977 use the smaller. */
3978
3979 if (!len1)
3980 len = len2;
3981 else if (!len2)
3982 len = len1;
3983 else if (TREE_SIDE_EFFECTS (len1))
3984 len = len2;
3985 else if (TREE_SIDE_EFFECTS (len2))
3986 len = len1;
3987 else if (TREE_CODE (len1) != INTEGER_CST)
3988 len = len2;
3989 else if (TREE_CODE (len2) != INTEGER_CST)
3990 len = len1;
3991 else if (tree_int_cst_lt (len1, len2))
3992 len = len1;
3993 else
3994 len = len2;
3995
3996 /* If both arguments have side effects, we cannot optimize. */
3997 if (!len || TREE_SIDE_EFFECTS (len))
3998 return NULL_RTX;
3999
4000 /* The actual new length parameter is MIN(len,arg3). */
4001 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4002 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4003
4004 /* If we don't have POINTER_TYPE, call the function. */
4005 if (arg1_align == 0 || arg2_align == 0)
4006 return NULL_RTX;
4007
4008 /* Make a place to write the result of the instruction. */
4009 result = target;
4010 if (! (result != 0
4011 && REG_P (result) && GET_MODE (result) == insn_mode
4012 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4013 result = gen_reg_rtx (insn_mode);
4014
4015 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4016 arg1 = builtin_save_expr (arg1);
4017 arg2 = builtin_save_expr (arg2);
4018 len = builtin_save_expr (len);
4019
4020 arg1_rtx = get_memory_rtx (arg1, len);
4021 arg2_rtx = get_memory_rtx (arg2, len);
4022 arg3_rtx = expand_normal (len);
4023 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4024 GEN_INT (MIN (arg1_align, arg2_align)));
4025 if (insn)
4026 {
4027 emit_insn (insn);
4028
4029 /* Return the value in the proper mode for this function. */
4030 mode = TYPE_MODE (TREE_TYPE (exp));
4031 if (GET_MODE (result) == mode)
4032 return result;
4033 if (target == 0)
4034 return convert_to_mode (mode, result, 0);
4035 convert_move (target, result, 0);
4036 return target;
4037 }
4038
4039 /* Expand the library call ourselves using a stabilized argument
4040 list to avoid re-evaluating the function's arguments twice. */
4041 fndecl = get_callee_fndecl (exp);
4042 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4043 arg1, arg2, len);
4044 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4045 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4046 return expand_call (fn, target, target == const0_rtx);
4047 }
4048 #endif
4049 return NULL_RTX;
4050 }
4051
4052 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4053 if that's convenient. */
4054
4055 rtx
4056 expand_builtin_saveregs (void)
4057 {
4058 rtx val, seq;
4059
4060 /* Don't do __builtin_saveregs more than once in a function.
4061 Save the result of the first call and reuse it. */
4062 if (saveregs_value != 0)
4063 return saveregs_value;
4064
4065 /* When this function is called, it means that registers must be
4066 saved on entry to this function. So we migrate the call to the
4067 first insn of this function. */
4068
4069 start_sequence ();
4070
4071 /* Do whatever the machine needs done in this case. */
4072 val = targetm.calls.expand_builtin_saveregs ();
4073
4074 seq = get_insns ();
4075 end_sequence ();
4076
4077 saveregs_value = val;
4078
4079 /* Put the insns after the NOTE that starts the function. If this
4080 is inside a start_sequence, make the outer-level insn chain current, so
4081 the code is placed at the start of the function. */
4082 push_topmost_sequence ();
4083 emit_insn_after (seq, entry_of_function ());
4084 pop_topmost_sequence ();
4085
4086 return val;
4087 }
4088
4089 /* Expand a call to __builtin_next_arg. */
4090
4091 static rtx
4092 expand_builtin_next_arg (void)
4093 {
4094 /* Checking arguments is already done in fold_builtin_next_arg
4095 that must be called before this function. */
4096 return expand_binop (ptr_mode, add_optab,
4097 crtl->args.internal_arg_pointer,
4098 crtl->args.arg_offset_rtx,
4099 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4100 }
4101
4102 /* Make it easier for the backends by protecting the valist argument
4103 from multiple evaluations. */
4104
4105 static tree
4106 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4107 {
4108 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4109
4110 /* The current way of determining the type of valist is completely
4111 bogus. We should have the information on the va builtin instead. */
4112 if (!vatype)
4113 vatype = targetm.fn_abi_va_list (cfun->decl);
4114
4115 if (TREE_CODE (vatype) == ARRAY_TYPE)
4116 {
4117 if (TREE_SIDE_EFFECTS (valist))
4118 valist = save_expr (valist);
4119
4120 /* For this case, the backends will be expecting a pointer to
4121 vatype, but it's possible we've actually been given an array
4122 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4123 So fix it. */
4124 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4125 {
4126 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4127 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4128 }
4129 }
4130 else
4131 {
4132 tree pt = build_pointer_type (vatype);
4133
4134 if (! needs_lvalue)
4135 {
4136 if (! TREE_SIDE_EFFECTS (valist))
4137 return valist;
4138
4139 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4140 TREE_SIDE_EFFECTS (valist) = 1;
4141 }
4142
4143 if (TREE_SIDE_EFFECTS (valist))
4144 valist = save_expr (valist);
4145 valist = fold_build2_loc (loc, MEM_REF,
4146 vatype, valist, build_int_cst (pt, 0));
4147 }
4148
4149 return valist;
4150 }
4151
4152 /* The "standard" definition of va_list is void*. */
4153
4154 tree
4155 std_build_builtin_va_list (void)
4156 {
4157 return ptr_type_node;
4158 }
4159
4160 /* The "standard" abi va_list is va_list_type_node. */
4161
4162 tree
4163 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4164 {
4165 return va_list_type_node;
4166 }
4167
4168 /* The "standard" type of va_list is va_list_type_node. */
4169
4170 tree
4171 std_canonical_va_list_type (tree type)
4172 {
4173 tree wtype, htype;
4174
4175 if (INDIRECT_REF_P (type))
4176 type = TREE_TYPE (type);
4177 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4178 type = TREE_TYPE (type);
4179 wtype = va_list_type_node;
4180 htype = type;
4181 /* Treat structure va_list types. */
4182 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4183 htype = TREE_TYPE (htype);
4184 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4185 {
4186 /* If va_list is an array type, the argument may have decayed
4187 to a pointer type, e.g. by being passed to another function.
4188 In that case, unwrap both types so that we can compare the
4189 underlying records. */
4190 if (TREE_CODE (htype) == ARRAY_TYPE
4191 || POINTER_TYPE_P (htype))
4192 {
4193 wtype = TREE_TYPE (wtype);
4194 htype = TREE_TYPE (htype);
4195 }
4196 }
4197 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4198 return va_list_type_node;
4199
4200 return NULL_TREE;
4201 }
4202
4203 /* The "standard" implementation of va_start: just assign `nextarg' to
4204 the variable. */
4205
4206 void
4207 std_expand_builtin_va_start (tree valist, rtx nextarg)
4208 {
4209 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4210 convert_move (va_r, nextarg, 0);
4211 }
4212
4213 /* Expand EXP, a call to __builtin_va_start. */
4214
4215 static rtx
4216 expand_builtin_va_start (tree exp)
4217 {
4218 rtx nextarg;
4219 tree valist;
4220 location_t loc = EXPR_LOCATION (exp);
4221
4222 if (call_expr_nargs (exp) < 2)
4223 {
4224 error_at (loc, "too few arguments to function %<va_start%>");
4225 return const0_rtx;
4226 }
4227
4228 if (fold_builtin_next_arg (exp, true))
4229 return const0_rtx;
4230
4231 nextarg = expand_builtin_next_arg ();
4232 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4233
4234 if (targetm.expand_builtin_va_start)
4235 targetm.expand_builtin_va_start (valist, nextarg);
4236 else
4237 std_expand_builtin_va_start (valist, nextarg);
4238
4239 return const0_rtx;
4240 }
4241
4242 /* Expand EXP, a call to __builtin_va_end. */
4243
4244 static rtx
4245 expand_builtin_va_end (tree exp)
4246 {
4247 tree valist = CALL_EXPR_ARG (exp, 0);
4248
4249 /* Evaluate for side effects, if needed. I hate macros that don't
4250 do that. */
4251 if (TREE_SIDE_EFFECTS (valist))
4252 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4253
4254 return const0_rtx;
4255 }
4256
4257 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4258 builtin rather than just as an assignment in stdarg.h because of the
4259 nastiness of array-type va_list types. */
4260
4261 static rtx
4262 expand_builtin_va_copy (tree exp)
4263 {
4264 tree dst, src, t;
4265 location_t loc = EXPR_LOCATION (exp);
4266
4267 dst = CALL_EXPR_ARG (exp, 0);
4268 src = CALL_EXPR_ARG (exp, 1);
4269
4270 dst = stabilize_va_list_loc (loc, dst, 1);
4271 src = stabilize_va_list_loc (loc, src, 0);
4272
4273 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4274
4275 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4276 {
4277 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4278 TREE_SIDE_EFFECTS (t) = 1;
4279 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4280 }
4281 else
4282 {
4283 rtx dstb, srcb, size;
4284
4285 /* Evaluate to pointers. */
4286 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4287 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4288 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4289 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4290
4291 dstb = convert_memory_address (Pmode, dstb);
4292 srcb = convert_memory_address (Pmode, srcb);
4293
4294 /* "Dereference" to BLKmode memories. */
4295 dstb = gen_rtx_MEM (BLKmode, dstb);
4296 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4297 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4298 srcb = gen_rtx_MEM (BLKmode, srcb);
4299 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4300 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4301
4302 /* Copy. */
4303 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4304 }
4305
4306 return const0_rtx;
4307 }
4308
4309 /* Expand a call to one of the builtin functions __builtin_frame_address or
4310 __builtin_return_address. */
4311
4312 static rtx
4313 expand_builtin_frame_address (tree fndecl, tree exp)
4314 {
4315 /* The argument must be a nonnegative integer constant.
4316 It counts the number of frames to scan up the stack.
4317 The value is the return address saved in that frame. */
4318 if (call_expr_nargs (exp) == 0)
4319 /* Warning about missing arg was already issued. */
4320 return const0_rtx;
4321 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4322 {
4323 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4324 error ("invalid argument to %<__builtin_frame_address%>");
4325 else
4326 error ("invalid argument to %<__builtin_return_address%>");
4327 return const0_rtx;
4328 }
4329 else
4330 {
4331 rtx tem
4332 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4333 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4334
4335 /* Some ports cannot access arbitrary stack frames. */
4336 if (tem == NULL)
4337 {
4338 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4339 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4340 else
4341 warning (0, "unsupported argument to %<__builtin_return_address%>");
4342 return const0_rtx;
4343 }
4344
4345 /* For __builtin_frame_address, return what we've got. */
4346 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4347 return tem;
4348
4349 if (!REG_P (tem)
4350 && ! CONSTANT_P (tem))
4351 tem = copy_addr_to_reg (tem);
4352 return tem;
4353 }
4354 }
4355
4356 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4357 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4358 is the same as for allocate_dynamic_stack_space. */
4359
4360 static rtx
4361 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4362 {
4363 rtx op0;
4364 rtx result;
4365 bool valid_arglist;
4366 unsigned int align;
4367 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4368 == BUILT_IN_ALLOCA_WITH_ALIGN);
4369
4370 valid_arglist
4371 = (alloca_with_align
4372 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4373 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4374
4375 if (!valid_arglist)
4376 return NULL_RTX;
4377
4378 /* Compute the argument. */
4379 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4380
4381 /* Compute the alignment. */
4382 align = (alloca_with_align
4383 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4384 : BIGGEST_ALIGNMENT);
4385
4386 /* Allocate the desired space. */
4387 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4388 result = convert_memory_address (ptr_mode, result);
4389
4390 return result;
4391 }
4392
4393 /* Expand a call to bswap builtin in EXP.
4394 Return NULL_RTX if a normal call should be emitted rather than expanding the
4395 function in-line. If convenient, the result should be placed in TARGET.
4396 SUBTARGET may be used as the target for computing one of EXP's operands. */
4397
4398 static rtx
4399 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4400 rtx subtarget)
4401 {
4402 tree arg;
4403 rtx op0;
4404
4405 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4406 return NULL_RTX;
4407
4408 arg = CALL_EXPR_ARG (exp, 0);
4409 op0 = expand_expr (arg,
4410 subtarget && GET_MODE (subtarget) == target_mode
4411 ? subtarget : NULL_RTX,
4412 target_mode, EXPAND_NORMAL);
4413 if (GET_MODE (op0) != target_mode)
4414 op0 = convert_to_mode (target_mode, op0, 1);
4415
4416 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4417
4418 gcc_assert (target);
4419
4420 return convert_to_mode (target_mode, target, 1);
4421 }
4422
4423 /* Expand a call to a unary builtin in EXP.
4424 Return NULL_RTX if a normal call should be emitted rather than expanding the
4425 function in-line. If convenient, the result should be placed in TARGET.
4426 SUBTARGET may be used as the target for computing one of EXP's operands. */
4427
4428 static rtx
4429 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4430 rtx subtarget, optab op_optab)
4431 {
4432 rtx op0;
4433
4434 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4435 return NULL_RTX;
4436
4437 /* Compute the argument. */
4438 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4439 (subtarget
4440 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4441 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4442 VOIDmode, EXPAND_NORMAL);
4443 /* Compute op, into TARGET if possible.
4444 Set TARGET to wherever the result comes back. */
4445 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4446 op_optab, op0, target, op_optab != clrsb_optab);
4447 gcc_assert (target);
4448
4449 return convert_to_mode (target_mode, target, 0);
4450 }
4451
4452 /* Expand a call to __builtin_expect. We just return our argument
4453 as the builtin_expect semantic should've been already executed by
4454 tree branch prediction pass. */
4455
4456 static rtx
4457 expand_builtin_expect (tree exp, rtx target)
4458 {
4459 tree arg;
4460
4461 if (call_expr_nargs (exp) < 2)
4462 return const0_rtx;
4463 arg = CALL_EXPR_ARG (exp, 0);
4464
4465 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4466 /* When guessing was done, the hints should be already stripped away. */
4467 gcc_assert (!flag_guess_branch_prob
4468 || optimize == 0 || seen_error ());
4469 return target;
4470 }
4471
4472 /* Expand a call to __builtin_assume_aligned. We just return our first
4473 argument as the builtin_assume_aligned semantic should've been already
4474 executed by CCP. */
4475
4476 static rtx
4477 expand_builtin_assume_aligned (tree exp, rtx target)
4478 {
4479 if (call_expr_nargs (exp) < 2)
4480 return const0_rtx;
4481 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4482 EXPAND_NORMAL);
4483 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4484 && (call_expr_nargs (exp) < 3
4485 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4486 return target;
4487 }
4488
4489 void
4490 expand_builtin_trap (void)
4491 {
4492 #ifdef HAVE_trap
4493 if (HAVE_trap)
4494 {
4495 rtx insn = emit_insn (gen_trap ());
4496 /* For trap insns when not accumulating outgoing args force
4497 REG_ARGS_SIZE note to prevent crossjumping of calls with
4498 different args sizes. */
4499 if (!ACCUMULATE_OUTGOING_ARGS)
4500 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4501 }
4502 else
4503 #endif
4504 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4505 emit_barrier ();
4506 }
4507
4508 /* Expand a call to __builtin_unreachable. We do nothing except emit
4509 a barrier saying that control flow will not pass here.
4510
4511 It is the responsibility of the program being compiled to ensure
4512 that control flow does never reach __builtin_unreachable. */
4513 static void
4514 expand_builtin_unreachable (void)
4515 {
4516 emit_barrier ();
4517 }
4518
4519 /* Expand EXP, a call to fabs, fabsf or fabsl.
4520 Return NULL_RTX if a normal call should be emitted rather than expanding
4521 the function inline. If convenient, the result should be placed
4522 in TARGET. SUBTARGET may be used as the target for computing
4523 the operand. */
4524
4525 static rtx
4526 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4527 {
4528 enum machine_mode mode;
4529 tree arg;
4530 rtx op0;
4531
4532 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4533 return NULL_RTX;
4534
4535 arg = CALL_EXPR_ARG (exp, 0);
4536 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4537 mode = TYPE_MODE (TREE_TYPE (arg));
4538 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4539 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4540 }
4541
4542 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4543 Return NULL is a normal call should be emitted rather than expanding the
4544 function inline. If convenient, the result should be placed in TARGET.
4545 SUBTARGET may be used as the target for computing the operand. */
4546
4547 static rtx
4548 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4549 {
4550 rtx op0, op1;
4551 tree arg;
4552
4553 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4554 return NULL_RTX;
4555
4556 arg = CALL_EXPR_ARG (exp, 0);
4557 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4558
4559 arg = CALL_EXPR_ARG (exp, 1);
4560 op1 = expand_normal (arg);
4561
4562 return expand_copysign (op0, op1, target);
4563 }
4564
4565 /* Create a new constant string literal and return a char* pointer to it.
4566 The STRING_CST value is the LEN characters at STR. */
4567 tree
4568 build_string_literal (int len, const char *str)
4569 {
4570 tree t, elem, index, type;
4571
4572 t = build_string (len, str);
4573 elem = build_type_variant (char_type_node, 1, 0);
4574 index = build_index_type (size_int (len - 1));
4575 type = build_array_type (elem, index);
4576 TREE_TYPE (t) = type;
4577 TREE_CONSTANT (t) = 1;
4578 TREE_READONLY (t) = 1;
4579 TREE_STATIC (t) = 1;
4580
4581 type = build_pointer_type (elem);
4582 t = build1 (ADDR_EXPR, type,
4583 build4 (ARRAY_REF, elem,
4584 t, integer_zero_node, NULL_TREE, NULL_TREE));
4585 return t;
4586 }
4587
4588 /* Expand a call to __builtin___clear_cache. */
4589
4590 static rtx
4591 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4592 {
4593 #ifndef HAVE_clear_cache
4594 #ifdef CLEAR_INSN_CACHE
4595 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4596 does something. Just do the default expansion to a call to
4597 __clear_cache(). */
4598 return NULL_RTX;
4599 #else
4600 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4601 does nothing. There is no need to call it. Do nothing. */
4602 return const0_rtx;
4603 #endif /* CLEAR_INSN_CACHE */
4604 #else
4605 /* We have a "clear_cache" insn, and it will handle everything. */
4606 tree begin, end;
4607 rtx begin_rtx, end_rtx;
4608
4609 /* We must not expand to a library call. If we did, any
4610 fallback library function in libgcc that might contain a call to
4611 __builtin___clear_cache() would recurse infinitely. */
4612 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4613 {
4614 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4615 return const0_rtx;
4616 }
4617
4618 if (HAVE_clear_cache)
4619 {
4620 struct expand_operand ops[2];
4621
4622 begin = CALL_EXPR_ARG (exp, 0);
4623 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4624
4625 end = CALL_EXPR_ARG (exp, 1);
4626 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4627
4628 create_address_operand (&ops[0], begin_rtx);
4629 create_address_operand (&ops[1], end_rtx);
4630 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4631 return const0_rtx;
4632 }
4633 return const0_rtx;
4634 #endif /* HAVE_clear_cache */
4635 }
4636
4637 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4638
4639 static rtx
4640 round_trampoline_addr (rtx tramp)
4641 {
4642 rtx temp, addend, mask;
4643
4644 /* If we don't need too much alignment, we'll have been guaranteed
4645 proper alignment by get_trampoline_type. */
4646 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4647 return tramp;
4648
4649 /* Round address up to desired boundary. */
4650 temp = gen_reg_rtx (Pmode);
4651 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4652 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4653
4654 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4655 temp, 0, OPTAB_LIB_WIDEN);
4656 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4657 temp, 0, OPTAB_LIB_WIDEN);
4658
4659 return tramp;
4660 }
4661
4662 static rtx
4663 expand_builtin_init_trampoline (tree exp, bool onstack)
4664 {
4665 tree t_tramp, t_func, t_chain;
4666 rtx m_tramp, r_tramp, r_chain, tmp;
4667
4668 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4669 POINTER_TYPE, VOID_TYPE))
4670 return NULL_RTX;
4671
4672 t_tramp = CALL_EXPR_ARG (exp, 0);
4673 t_func = CALL_EXPR_ARG (exp, 1);
4674 t_chain = CALL_EXPR_ARG (exp, 2);
4675
4676 r_tramp = expand_normal (t_tramp);
4677 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4678 MEM_NOTRAP_P (m_tramp) = 1;
4679
4680 /* If ONSTACK, the TRAMP argument should be the address of a field
4681 within the local function's FRAME decl. Either way, let's see if
4682 we can fill in the MEM_ATTRs for this memory. */
4683 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4684 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4685
4686 /* Creator of a heap trampoline is responsible for making sure the
4687 address is aligned to at least STACK_BOUNDARY. Normally malloc
4688 will ensure this anyhow. */
4689 tmp = round_trampoline_addr (r_tramp);
4690 if (tmp != r_tramp)
4691 {
4692 m_tramp = change_address (m_tramp, BLKmode, tmp);
4693 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4694 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4695 }
4696
4697 /* The FUNC argument should be the address of the nested function.
4698 Extract the actual function decl to pass to the hook. */
4699 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4700 t_func = TREE_OPERAND (t_func, 0);
4701 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4702
4703 r_chain = expand_normal (t_chain);
4704
4705 /* Generate insns to initialize the trampoline. */
4706 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4707
4708 if (onstack)
4709 {
4710 trampolines_created = 1;
4711
4712 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4713 "trampoline generated for nested function %qD", t_func);
4714 }
4715
4716 return const0_rtx;
4717 }
4718
4719 static rtx
4720 expand_builtin_adjust_trampoline (tree exp)
4721 {
4722 rtx tramp;
4723
4724 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4725 return NULL_RTX;
4726
4727 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4728 tramp = round_trampoline_addr (tramp);
4729 if (targetm.calls.trampoline_adjust_address)
4730 tramp = targetm.calls.trampoline_adjust_address (tramp);
4731
4732 return tramp;
4733 }
4734
4735 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4736 function. The function first checks whether the back end provides
4737 an insn to implement signbit for the respective mode. If not, it
4738 checks whether the floating point format of the value is such that
4739 the sign bit can be extracted. If that is not the case, the
4740 function returns NULL_RTX to indicate that a normal call should be
4741 emitted rather than expanding the function in-line. EXP is the
4742 expression that is a call to the builtin function; if convenient,
4743 the result should be placed in TARGET. */
4744 static rtx
4745 expand_builtin_signbit (tree exp, rtx target)
4746 {
4747 const struct real_format *fmt;
4748 enum machine_mode fmode, imode, rmode;
4749 tree arg;
4750 int word, bitpos;
4751 enum insn_code icode;
4752 rtx temp;
4753 location_t loc = EXPR_LOCATION (exp);
4754
4755 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4756 return NULL_RTX;
4757
4758 arg = CALL_EXPR_ARG (exp, 0);
4759 fmode = TYPE_MODE (TREE_TYPE (arg));
4760 rmode = TYPE_MODE (TREE_TYPE (exp));
4761 fmt = REAL_MODE_FORMAT (fmode);
4762
4763 arg = builtin_save_expr (arg);
4764
4765 /* Expand the argument yielding a RTX expression. */
4766 temp = expand_normal (arg);
4767
4768 /* Check if the back end provides an insn that handles signbit for the
4769 argument's mode. */
4770 icode = optab_handler (signbit_optab, fmode);
4771 if (icode != CODE_FOR_nothing)
4772 {
4773 rtx last = get_last_insn ();
4774 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4775 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4776 return target;
4777 delete_insns_since (last);
4778 }
4779
4780 /* For floating point formats without a sign bit, implement signbit
4781 as "ARG < 0.0". */
4782 bitpos = fmt->signbit_ro;
4783 if (bitpos < 0)
4784 {
4785 /* But we can't do this if the format supports signed zero. */
4786 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4787 return NULL_RTX;
4788
4789 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4790 build_real (TREE_TYPE (arg), dconst0));
4791 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4792 }
4793
4794 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4795 {
4796 imode = int_mode_for_mode (fmode);
4797 if (imode == BLKmode)
4798 return NULL_RTX;
4799 temp = gen_lowpart (imode, temp);
4800 }
4801 else
4802 {
4803 imode = word_mode;
4804 /* Handle targets with different FP word orders. */
4805 if (FLOAT_WORDS_BIG_ENDIAN)
4806 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4807 else
4808 word = bitpos / BITS_PER_WORD;
4809 temp = operand_subword_force (temp, word, fmode);
4810 bitpos = bitpos % BITS_PER_WORD;
4811 }
4812
4813 /* Force the intermediate word_mode (or narrower) result into a
4814 register. This avoids attempting to create paradoxical SUBREGs
4815 of floating point modes below. */
4816 temp = force_reg (imode, temp);
4817
4818 /* If the bitpos is within the "result mode" lowpart, the operation
4819 can be implement with a single bitwise AND. Otherwise, we need
4820 a right shift and an AND. */
4821
4822 if (bitpos < GET_MODE_BITSIZE (rmode))
4823 {
4824 double_int mask = double_int_zero.set_bit (bitpos);
4825
4826 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4827 temp = gen_lowpart (rmode, temp);
4828 temp = expand_binop (rmode, and_optab, temp,
4829 immed_double_int_const (mask, rmode),
4830 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4831 }
4832 else
4833 {
4834 /* Perform a logical right shift to place the signbit in the least
4835 significant bit, then truncate the result to the desired mode
4836 and mask just this bit. */
4837 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4838 temp = gen_lowpart (rmode, temp);
4839 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4840 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4841 }
4842
4843 return temp;
4844 }
4845
4846 /* Expand fork or exec calls. TARGET is the desired target of the
4847 call. EXP is the call. FN is the
4848 identificator of the actual function. IGNORE is nonzero if the
4849 value is to be ignored. */
4850
4851 static rtx
4852 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4853 {
4854 tree id, decl;
4855 tree call;
4856
4857 /* If we are not profiling, just call the function. */
4858 if (!profile_arc_flag)
4859 return NULL_RTX;
4860
4861 /* Otherwise call the wrapper. This should be equivalent for the rest of
4862 compiler, so the code does not diverge, and the wrapper may run the
4863 code necessary for keeping the profiling sane. */
4864
4865 switch (DECL_FUNCTION_CODE (fn))
4866 {
4867 case BUILT_IN_FORK:
4868 id = get_identifier ("__gcov_fork");
4869 break;
4870
4871 case BUILT_IN_EXECL:
4872 id = get_identifier ("__gcov_execl");
4873 break;
4874
4875 case BUILT_IN_EXECV:
4876 id = get_identifier ("__gcov_execv");
4877 break;
4878
4879 case BUILT_IN_EXECLP:
4880 id = get_identifier ("__gcov_execlp");
4881 break;
4882
4883 case BUILT_IN_EXECLE:
4884 id = get_identifier ("__gcov_execle");
4885 break;
4886
4887 case BUILT_IN_EXECVP:
4888 id = get_identifier ("__gcov_execvp");
4889 break;
4890
4891 case BUILT_IN_EXECVE:
4892 id = get_identifier ("__gcov_execve");
4893 break;
4894
4895 default:
4896 gcc_unreachable ();
4897 }
4898
4899 decl = build_decl (DECL_SOURCE_LOCATION (fn),
4900 FUNCTION_DECL, id, TREE_TYPE (fn));
4901 DECL_EXTERNAL (decl) = 1;
4902 TREE_PUBLIC (decl) = 1;
4903 DECL_ARTIFICIAL (decl) = 1;
4904 TREE_NOTHROW (decl) = 1;
4905 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
4906 DECL_VISIBILITY_SPECIFIED (decl) = 1;
4907 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
4908 return expand_call (call, target, ignore);
4909 }
4910
4911
4912 \f
4913 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
4914 the pointer in these functions is void*, the tree optimizers may remove
4915 casts. The mode computed in expand_builtin isn't reliable either, due
4916 to __sync_bool_compare_and_swap.
4917
4918 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
4919 group of builtins. This gives us log2 of the mode size. */
4920
4921 static inline enum machine_mode
4922 get_builtin_sync_mode (int fcode_diff)
4923 {
4924 /* The size is not negotiable, so ask not to get BLKmode in return
4925 if the target indicates that a smaller size would be better. */
4926 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
4927 }
4928
4929 /* Expand the memory expression LOC and return the appropriate memory operand
4930 for the builtin_sync operations. */
4931
4932 static rtx
4933 get_builtin_sync_mem (tree loc, enum machine_mode mode)
4934 {
4935 rtx addr, mem;
4936
4937 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
4938 addr = convert_memory_address (Pmode, addr);
4939
4940 /* Note that we explicitly do not want any alias information for this
4941 memory, so that we kill all other live memories. Otherwise we don't
4942 satisfy the full barrier semantics of the intrinsic. */
4943 mem = validize_mem (gen_rtx_MEM (mode, addr));
4944
4945 /* The alignment needs to be at least according to that of the mode. */
4946 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
4947 get_pointer_alignment (loc)));
4948 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
4949 MEM_VOLATILE_P (mem) = 1;
4950
4951 return mem;
4952 }
4953
4954 /* Make sure an argument is in the right mode.
4955 EXP is the tree argument.
4956 MODE is the mode it should be in. */
4957
4958 static rtx
4959 expand_expr_force_mode (tree exp, enum machine_mode mode)
4960 {
4961 rtx val;
4962 enum machine_mode old_mode;
4963
4964 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
4965 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
4966 of CONST_INTs, where we know the old_mode only from the call argument. */
4967
4968 old_mode = GET_MODE (val);
4969 if (old_mode == VOIDmode)
4970 old_mode = TYPE_MODE (TREE_TYPE (exp));
4971 val = convert_modes (mode, old_mode, val, 1);
4972 return val;
4973 }
4974
4975
4976 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
4977 EXP is the CALL_EXPR. CODE is the rtx code
4978 that corresponds to the arithmetic or logical operation from the name;
4979 an exception here is that NOT actually means NAND. TARGET is an optional
4980 place for us to store the results; AFTER is true if this is the
4981 fetch_and_xxx form. */
4982
4983 static rtx
4984 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
4985 enum rtx_code code, bool after,
4986 rtx target)
4987 {
4988 rtx val, mem;
4989 location_t loc = EXPR_LOCATION (exp);
4990
4991 if (code == NOT && warn_sync_nand)
4992 {
4993 tree fndecl = get_callee_fndecl (exp);
4994 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4995
4996 static bool warned_f_a_n, warned_n_a_f;
4997
4998 switch (fcode)
4999 {
5000 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5001 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5002 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5003 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5004 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5005 if (warned_f_a_n)
5006 break;
5007
5008 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5009 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5010 warned_f_a_n = true;
5011 break;
5012
5013 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5014 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5015 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5016 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5017 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5018 if (warned_n_a_f)
5019 break;
5020
5021 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5022 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5023 warned_n_a_f = true;
5024 break;
5025
5026 default:
5027 gcc_unreachable ();
5028 }
5029 }
5030
5031 /* Expand the operands. */
5032 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5033 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5034
5035 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5036 after);
5037 }
5038
5039 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5040 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5041 true if this is the boolean form. TARGET is a place for us to store the
5042 results; this is NOT optional if IS_BOOL is true. */
5043
5044 static rtx
5045 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5046 bool is_bool, rtx target)
5047 {
5048 rtx old_val, new_val, mem;
5049 rtx *pbool, *poval;
5050
5051 /* Expand the operands. */
5052 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5053 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5054 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5055
5056 pbool = poval = NULL;
5057 if (target != const0_rtx)
5058 {
5059 if (is_bool)
5060 pbool = &target;
5061 else
5062 poval = &target;
5063 }
5064 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5065 false, MEMMODEL_SEQ_CST,
5066 MEMMODEL_SEQ_CST))
5067 return NULL_RTX;
5068
5069 return target;
5070 }
5071
5072 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5073 general form is actually an atomic exchange, and some targets only
5074 support a reduced form with the second argument being a constant 1.
5075 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5076 the results. */
5077
5078 static rtx
5079 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5080 rtx target)
5081 {
5082 rtx val, mem;
5083
5084 /* Expand the operands. */
5085 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5086 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5087
5088 return expand_sync_lock_test_and_set (target, mem, val);
5089 }
5090
5091 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5092
5093 static void
5094 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5095 {
5096 rtx mem;
5097
5098 /* Expand the operands. */
5099 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5100
5101 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5102 }
5103
5104 /* Given an integer representing an ``enum memmodel'', verify its
5105 correctness and return the memory model enum. */
5106
5107 static enum memmodel
5108 get_memmodel (tree exp)
5109 {
5110 rtx op;
5111 unsigned HOST_WIDE_INT val;
5112
5113 /* If the parameter is not a constant, it's a run time value so we'll just
5114 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5115 if (TREE_CODE (exp) != INTEGER_CST)
5116 return MEMMODEL_SEQ_CST;
5117
5118 op = expand_normal (exp);
5119
5120 val = INTVAL (op);
5121 if (targetm.memmodel_check)
5122 val = targetm.memmodel_check (val);
5123 else if (val & ~MEMMODEL_MASK)
5124 {
5125 warning (OPT_Winvalid_memory_model,
5126 "Unknown architecture specifier in memory model to builtin.");
5127 return MEMMODEL_SEQ_CST;
5128 }
5129
5130 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5131 {
5132 warning (OPT_Winvalid_memory_model,
5133 "invalid memory model argument to builtin");
5134 return MEMMODEL_SEQ_CST;
5135 }
5136
5137 return (enum memmodel) val;
5138 }
5139
5140 /* Expand the __atomic_exchange intrinsic:
5141 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5142 EXP is the CALL_EXPR.
5143 TARGET is an optional place for us to store the results. */
5144
5145 static rtx
5146 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5147 {
5148 rtx val, mem;
5149 enum memmodel model;
5150
5151 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5152 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5153 {
5154 error ("invalid memory model for %<__atomic_exchange%>");
5155 return NULL_RTX;
5156 }
5157
5158 if (!flag_inline_atomics)
5159 return NULL_RTX;
5160
5161 /* Expand the operands. */
5162 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5163 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5164
5165 return expand_atomic_exchange (target, mem, val, model);
5166 }
5167
5168 /* Expand the __atomic_compare_exchange intrinsic:
5169 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5170 TYPE desired, BOOL weak,
5171 enum memmodel success,
5172 enum memmodel failure)
5173 EXP is the CALL_EXPR.
5174 TARGET is an optional place for us to store the results. */
5175
5176 static rtx
5177 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5178 rtx target)
5179 {
5180 rtx expect, desired, mem, oldval;
5181 enum memmodel success, failure;
5182 tree weak;
5183 bool is_weak;
5184
5185 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5186 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5187
5188 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5189 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5190 {
5191 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5192 return NULL_RTX;
5193 }
5194
5195 if (failure > success)
5196 {
5197 error ("failure memory model cannot be stronger than success "
5198 "memory model for %<__atomic_compare_exchange%>");
5199 return NULL_RTX;
5200 }
5201
5202 if (!flag_inline_atomics)
5203 return NULL_RTX;
5204
5205 /* Expand the operands. */
5206 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5207
5208 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5209 expect = convert_memory_address (Pmode, expect);
5210 expect = gen_rtx_MEM (mode, expect);
5211 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5212
5213 weak = CALL_EXPR_ARG (exp, 3);
5214 is_weak = false;
5215 if (host_integerp (weak, 0) && tree_low_cst (weak, 0) != 0)
5216 is_weak = true;
5217
5218 oldval = expect;
5219 if (!expand_atomic_compare_and_swap ((target == const0_rtx ? NULL : &target),
5220 &oldval, mem, oldval, desired,
5221 is_weak, success, failure))
5222 return NULL_RTX;
5223
5224 if (oldval != expect)
5225 emit_move_insn (expect, oldval);
5226
5227 return target;
5228 }
5229
5230 /* Expand the __atomic_load intrinsic:
5231 TYPE __atomic_load (TYPE *object, enum memmodel)
5232 EXP is the CALL_EXPR.
5233 TARGET is an optional place for us to store the results. */
5234
5235 static rtx
5236 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5237 {
5238 rtx mem;
5239 enum memmodel model;
5240
5241 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5242 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5243 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5244 {
5245 error ("invalid memory model for %<__atomic_load%>");
5246 return NULL_RTX;
5247 }
5248
5249 if (!flag_inline_atomics)
5250 return NULL_RTX;
5251
5252 /* Expand the operand. */
5253 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5254
5255 return expand_atomic_load (target, mem, model);
5256 }
5257
5258
5259 /* Expand the __atomic_store intrinsic:
5260 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5261 EXP is the CALL_EXPR.
5262 TARGET is an optional place for us to store the results. */
5263
5264 static rtx
5265 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5266 {
5267 rtx mem, val;
5268 enum memmodel model;
5269
5270 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5271 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5272 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5273 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5274 {
5275 error ("invalid memory model for %<__atomic_store%>");
5276 return NULL_RTX;
5277 }
5278
5279 if (!flag_inline_atomics)
5280 return NULL_RTX;
5281
5282 /* Expand the operands. */
5283 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5284 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5285
5286 return expand_atomic_store (mem, val, model, false);
5287 }
5288
5289 /* Expand the __atomic_fetch_XXX intrinsic:
5290 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5291 EXP is the CALL_EXPR.
5292 TARGET is an optional place for us to store the results.
5293 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5294 FETCH_AFTER is true if returning the result of the operation.
5295 FETCH_AFTER is false if returning the value before the operation.
5296 IGNORE is true if the result is not used.
5297 EXT_CALL is the correct builtin for an external call if this cannot be
5298 resolved to an instruction sequence. */
5299
5300 static rtx
5301 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5302 enum rtx_code code, bool fetch_after,
5303 bool ignore, enum built_in_function ext_call)
5304 {
5305 rtx val, mem, ret;
5306 enum memmodel model;
5307 tree fndecl;
5308 tree addr;
5309
5310 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5311
5312 /* Expand the operands. */
5313 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5314 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5315
5316 /* Only try generating instructions if inlining is turned on. */
5317 if (flag_inline_atomics)
5318 {
5319 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5320 if (ret)
5321 return ret;
5322 }
5323
5324 /* Return if a different routine isn't needed for the library call. */
5325 if (ext_call == BUILT_IN_NONE)
5326 return NULL_RTX;
5327
5328 /* Change the call to the specified function. */
5329 fndecl = get_callee_fndecl (exp);
5330 addr = CALL_EXPR_FN (exp);
5331 STRIP_NOPS (addr);
5332
5333 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5334 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5335
5336 /* Expand the call here so we can emit trailing code. */
5337 ret = expand_call (exp, target, ignore);
5338
5339 /* Replace the original function just in case it matters. */
5340 TREE_OPERAND (addr, 0) = fndecl;
5341
5342 /* Then issue the arithmetic correction to return the right result. */
5343 if (!ignore)
5344 {
5345 if (code == NOT)
5346 {
5347 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5348 OPTAB_LIB_WIDEN);
5349 ret = expand_simple_unop (mode, NOT, ret, target, true);
5350 }
5351 else
5352 ret = expand_simple_binop (mode, code, ret, val, target, true,
5353 OPTAB_LIB_WIDEN);
5354 }
5355 return ret;
5356 }
5357
5358
5359 #ifndef HAVE_atomic_clear
5360 # define HAVE_atomic_clear 0
5361 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5362 #endif
5363
5364 /* Expand an atomic clear operation.
5365 void _atomic_clear (BOOL *obj, enum memmodel)
5366 EXP is the call expression. */
5367
5368 static rtx
5369 expand_builtin_atomic_clear (tree exp)
5370 {
5371 enum machine_mode mode;
5372 rtx mem, ret;
5373 enum memmodel model;
5374
5375 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5376 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5377 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5378
5379 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5380 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5381 {
5382 error ("invalid memory model for %<__atomic_store%>");
5383 return const0_rtx;
5384 }
5385
5386 if (HAVE_atomic_clear)
5387 {
5388 emit_insn (gen_atomic_clear (mem, model));
5389 return const0_rtx;
5390 }
5391
5392 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5393 Failing that, a store is issued by __atomic_store. The only way this can
5394 fail is if the bool type is larger than a word size. Unlikely, but
5395 handle it anyway for completeness. Assume a single threaded model since
5396 there is no atomic support in this case, and no barriers are required. */
5397 ret = expand_atomic_store (mem, const0_rtx, model, true);
5398 if (!ret)
5399 emit_move_insn (mem, const0_rtx);
5400 return const0_rtx;
5401 }
5402
5403 /* Expand an atomic test_and_set operation.
5404 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5405 EXP is the call expression. */
5406
5407 static rtx
5408 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5409 {
5410 rtx mem;
5411 enum memmodel model;
5412 enum machine_mode mode;
5413
5414 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5415 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5416 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5417
5418 return expand_atomic_test_and_set (target, mem, model);
5419 }
5420
5421
5422 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5423 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5424
5425 static tree
5426 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5427 {
5428 int size;
5429 enum machine_mode mode;
5430 unsigned int mode_align, type_align;
5431
5432 if (TREE_CODE (arg0) != INTEGER_CST)
5433 return NULL_TREE;
5434
5435 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5436 mode = mode_for_size (size, MODE_INT, 0);
5437 mode_align = GET_MODE_ALIGNMENT (mode);
5438
5439 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5440 type_align = mode_align;
5441 else
5442 {
5443 tree ttype = TREE_TYPE (arg1);
5444
5445 /* This function is usually invoked and folded immediately by the front
5446 end before anything else has a chance to look at it. The pointer
5447 parameter at this point is usually cast to a void *, so check for that
5448 and look past the cast. */
5449 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5450 && VOID_TYPE_P (TREE_TYPE (ttype)))
5451 arg1 = TREE_OPERAND (arg1, 0);
5452
5453 ttype = TREE_TYPE (arg1);
5454 gcc_assert (POINTER_TYPE_P (ttype));
5455
5456 /* Get the underlying type of the object. */
5457 ttype = TREE_TYPE (ttype);
5458 type_align = TYPE_ALIGN (ttype);
5459 }
5460
5461 /* If the object has smaller alignment, the the lock free routines cannot
5462 be used. */
5463 if (type_align < mode_align)
5464 return boolean_false_node;
5465
5466 /* Check if a compare_and_swap pattern exists for the mode which represents
5467 the required size. The pattern is not allowed to fail, so the existence
5468 of the pattern indicates support is present. */
5469 if (can_compare_and_swap_p (mode, true))
5470 return boolean_true_node;
5471 else
5472 return boolean_false_node;
5473 }
5474
5475 /* Return true if the parameters to call EXP represent an object which will
5476 always generate lock free instructions. The first argument represents the
5477 size of the object, and the second parameter is a pointer to the object
5478 itself. If NULL is passed for the object, then the result is based on
5479 typical alignment for an object of the specified size. Otherwise return
5480 false. */
5481
5482 static rtx
5483 expand_builtin_atomic_always_lock_free (tree exp)
5484 {
5485 tree size;
5486 tree arg0 = CALL_EXPR_ARG (exp, 0);
5487 tree arg1 = CALL_EXPR_ARG (exp, 1);
5488
5489 if (TREE_CODE (arg0) != INTEGER_CST)
5490 {
5491 error ("non-constant argument 1 to __atomic_always_lock_free");
5492 return const0_rtx;
5493 }
5494
5495 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5496 if (size == boolean_true_node)
5497 return const1_rtx;
5498 return const0_rtx;
5499 }
5500
5501 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5502 is lock free on this architecture. */
5503
5504 static tree
5505 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5506 {
5507 if (!flag_inline_atomics)
5508 return NULL_TREE;
5509
5510 /* If it isn't always lock free, don't generate a result. */
5511 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5512 return boolean_true_node;
5513
5514 return NULL_TREE;
5515 }
5516
5517 /* Return true if the parameters to call EXP represent an object which will
5518 always generate lock free instructions. The first argument represents the
5519 size of the object, and the second parameter is a pointer to the object
5520 itself. If NULL is passed for the object, then the result is based on
5521 typical alignment for an object of the specified size. Otherwise return
5522 NULL*/
5523
5524 static rtx
5525 expand_builtin_atomic_is_lock_free (tree exp)
5526 {
5527 tree size;
5528 tree arg0 = CALL_EXPR_ARG (exp, 0);
5529 tree arg1 = CALL_EXPR_ARG (exp, 1);
5530
5531 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5532 {
5533 error ("non-integer argument 1 to __atomic_is_lock_free");
5534 return NULL_RTX;
5535 }
5536
5537 if (!flag_inline_atomics)
5538 return NULL_RTX;
5539
5540 /* If the value is known at compile time, return the RTX for it. */
5541 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5542 if (size == boolean_true_node)
5543 return const1_rtx;
5544
5545 return NULL_RTX;
5546 }
5547
5548 /* Expand the __atomic_thread_fence intrinsic:
5549 void __atomic_thread_fence (enum memmodel)
5550 EXP is the CALL_EXPR. */
5551
5552 static void
5553 expand_builtin_atomic_thread_fence (tree exp)
5554 {
5555 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5556 expand_mem_thread_fence (model);
5557 }
5558
5559 /* Expand the __atomic_signal_fence intrinsic:
5560 void __atomic_signal_fence (enum memmodel)
5561 EXP is the CALL_EXPR. */
5562
5563 static void
5564 expand_builtin_atomic_signal_fence (tree exp)
5565 {
5566 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5567 expand_mem_signal_fence (model);
5568 }
5569
5570 /* Expand the __sync_synchronize intrinsic. */
5571
5572 static void
5573 expand_builtin_sync_synchronize (void)
5574 {
5575 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5576 }
5577
5578 static rtx
5579 expand_builtin_thread_pointer (tree exp, rtx target)
5580 {
5581 enum insn_code icode;
5582 if (!validate_arglist (exp, VOID_TYPE))
5583 return const0_rtx;
5584 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5585 if (icode != CODE_FOR_nothing)
5586 {
5587 struct expand_operand op;
5588 if (!REG_P (target) || GET_MODE (target) != Pmode)
5589 target = gen_reg_rtx (Pmode);
5590 create_output_operand (&op, target, Pmode);
5591 expand_insn (icode, 1, &op);
5592 return target;
5593 }
5594 error ("__builtin_thread_pointer is not supported on this target");
5595 return const0_rtx;
5596 }
5597
5598 static void
5599 expand_builtin_set_thread_pointer (tree exp)
5600 {
5601 enum insn_code icode;
5602 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5603 return;
5604 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5605 if (icode != CODE_FOR_nothing)
5606 {
5607 struct expand_operand op;
5608 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5609 Pmode, EXPAND_NORMAL);
5610 create_input_operand (&op, val, Pmode);
5611 expand_insn (icode, 1, &op);
5612 return;
5613 }
5614 error ("__builtin_set_thread_pointer is not supported on this target");
5615 }
5616
5617 \f
5618 /* Expand an expression EXP that calls a built-in function,
5619 with result going to TARGET if that's convenient
5620 (and in mode MODE if that's convenient).
5621 SUBTARGET may be used as the target for computing one of EXP's operands.
5622 IGNORE is nonzero if the value is to be ignored. */
5623
5624 rtx
5625 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5626 int ignore)
5627 {
5628 tree fndecl = get_callee_fndecl (exp);
5629 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5630 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5631 int flags;
5632
5633 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5634 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5635
5636 /* When not optimizing, generate calls to library functions for a certain
5637 set of builtins. */
5638 if (!optimize
5639 && !called_as_built_in (fndecl)
5640 && fcode != BUILT_IN_FORK
5641 && fcode != BUILT_IN_EXECL
5642 && fcode != BUILT_IN_EXECV
5643 && fcode != BUILT_IN_EXECLP
5644 && fcode != BUILT_IN_EXECLE
5645 && fcode != BUILT_IN_EXECVP
5646 && fcode != BUILT_IN_EXECVE
5647 && fcode != BUILT_IN_ALLOCA
5648 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5649 && fcode != BUILT_IN_FREE)
5650 return expand_call (exp, target, ignore);
5651
5652 /* The built-in function expanders test for target == const0_rtx
5653 to determine whether the function's result will be ignored. */
5654 if (ignore)
5655 target = const0_rtx;
5656
5657 /* If the result of a pure or const built-in function is ignored, and
5658 none of its arguments are volatile, we can avoid expanding the
5659 built-in call and just evaluate the arguments for side-effects. */
5660 if (target == const0_rtx
5661 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5662 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5663 {
5664 bool volatilep = false;
5665 tree arg;
5666 call_expr_arg_iterator iter;
5667
5668 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5669 if (TREE_THIS_VOLATILE (arg))
5670 {
5671 volatilep = true;
5672 break;
5673 }
5674
5675 if (! volatilep)
5676 {
5677 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5678 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5679 return const0_rtx;
5680 }
5681 }
5682
5683 switch (fcode)
5684 {
5685 CASE_FLT_FN (BUILT_IN_FABS):
5686 case BUILT_IN_FABSD32:
5687 case BUILT_IN_FABSD64:
5688 case BUILT_IN_FABSD128:
5689 target = expand_builtin_fabs (exp, target, subtarget);
5690 if (target)
5691 return target;
5692 break;
5693
5694 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5695 target = expand_builtin_copysign (exp, target, subtarget);
5696 if (target)
5697 return target;
5698 break;
5699
5700 /* Just do a normal library call if we were unable to fold
5701 the values. */
5702 CASE_FLT_FN (BUILT_IN_CABS):
5703 break;
5704
5705 CASE_FLT_FN (BUILT_IN_EXP):
5706 CASE_FLT_FN (BUILT_IN_EXP10):
5707 CASE_FLT_FN (BUILT_IN_POW10):
5708 CASE_FLT_FN (BUILT_IN_EXP2):
5709 CASE_FLT_FN (BUILT_IN_EXPM1):
5710 CASE_FLT_FN (BUILT_IN_LOGB):
5711 CASE_FLT_FN (BUILT_IN_LOG):
5712 CASE_FLT_FN (BUILT_IN_LOG10):
5713 CASE_FLT_FN (BUILT_IN_LOG2):
5714 CASE_FLT_FN (BUILT_IN_LOG1P):
5715 CASE_FLT_FN (BUILT_IN_TAN):
5716 CASE_FLT_FN (BUILT_IN_ASIN):
5717 CASE_FLT_FN (BUILT_IN_ACOS):
5718 CASE_FLT_FN (BUILT_IN_ATAN):
5719 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5720 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5721 because of possible accuracy problems. */
5722 if (! flag_unsafe_math_optimizations)
5723 break;
5724 CASE_FLT_FN (BUILT_IN_SQRT):
5725 CASE_FLT_FN (BUILT_IN_FLOOR):
5726 CASE_FLT_FN (BUILT_IN_CEIL):
5727 CASE_FLT_FN (BUILT_IN_TRUNC):
5728 CASE_FLT_FN (BUILT_IN_ROUND):
5729 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5730 CASE_FLT_FN (BUILT_IN_RINT):
5731 target = expand_builtin_mathfn (exp, target, subtarget);
5732 if (target)
5733 return target;
5734 break;
5735
5736 CASE_FLT_FN (BUILT_IN_FMA):
5737 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5738 if (target)
5739 return target;
5740 break;
5741
5742 CASE_FLT_FN (BUILT_IN_ILOGB):
5743 if (! flag_unsafe_math_optimizations)
5744 break;
5745 CASE_FLT_FN (BUILT_IN_ISINF):
5746 CASE_FLT_FN (BUILT_IN_FINITE):
5747 case BUILT_IN_ISFINITE:
5748 case BUILT_IN_ISNORMAL:
5749 target = expand_builtin_interclass_mathfn (exp, target);
5750 if (target)
5751 return target;
5752 break;
5753
5754 CASE_FLT_FN (BUILT_IN_ICEIL):
5755 CASE_FLT_FN (BUILT_IN_LCEIL):
5756 CASE_FLT_FN (BUILT_IN_LLCEIL):
5757 CASE_FLT_FN (BUILT_IN_LFLOOR):
5758 CASE_FLT_FN (BUILT_IN_IFLOOR):
5759 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5760 target = expand_builtin_int_roundingfn (exp, target);
5761 if (target)
5762 return target;
5763 break;
5764
5765 CASE_FLT_FN (BUILT_IN_IRINT):
5766 CASE_FLT_FN (BUILT_IN_LRINT):
5767 CASE_FLT_FN (BUILT_IN_LLRINT):
5768 CASE_FLT_FN (BUILT_IN_IROUND):
5769 CASE_FLT_FN (BUILT_IN_LROUND):
5770 CASE_FLT_FN (BUILT_IN_LLROUND):
5771 target = expand_builtin_int_roundingfn_2 (exp, target);
5772 if (target)
5773 return target;
5774 break;
5775
5776 CASE_FLT_FN (BUILT_IN_POWI):
5777 target = expand_builtin_powi (exp, target);
5778 if (target)
5779 return target;
5780 break;
5781
5782 CASE_FLT_FN (BUILT_IN_ATAN2):
5783 CASE_FLT_FN (BUILT_IN_LDEXP):
5784 CASE_FLT_FN (BUILT_IN_SCALB):
5785 CASE_FLT_FN (BUILT_IN_SCALBN):
5786 CASE_FLT_FN (BUILT_IN_SCALBLN):
5787 if (! flag_unsafe_math_optimizations)
5788 break;
5789
5790 CASE_FLT_FN (BUILT_IN_FMOD):
5791 CASE_FLT_FN (BUILT_IN_REMAINDER):
5792 CASE_FLT_FN (BUILT_IN_DREM):
5793 CASE_FLT_FN (BUILT_IN_POW):
5794 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5795 if (target)
5796 return target;
5797 break;
5798
5799 CASE_FLT_FN (BUILT_IN_CEXPI):
5800 target = expand_builtin_cexpi (exp, target);
5801 gcc_assert (target);
5802 return target;
5803
5804 CASE_FLT_FN (BUILT_IN_SIN):
5805 CASE_FLT_FN (BUILT_IN_COS):
5806 if (! flag_unsafe_math_optimizations)
5807 break;
5808 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5809 if (target)
5810 return target;
5811 break;
5812
5813 CASE_FLT_FN (BUILT_IN_SINCOS):
5814 if (! flag_unsafe_math_optimizations)
5815 break;
5816 target = expand_builtin_sincos (exp);
5817 if (target)
5818 return target;
5819 break;
5820
5821 case BUILT_IN_APPLY_ARGS:
5822 return expand_builtin_apply_args ();
5823
5824 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5825 FUNCTION with a copy of the parameters described by
5826 ARGUMENTS, and ARGSIZE. It returns a block of memory
5827 allocated on the stack into which is stored all the registers
5828 that might possibly be used for returning the result of a
5829 function. ARGUMENTS is the value returned by
5830 __builtin_apply_args. ARGSIZE is the number of bytes of
5831 arguments that must be copied. ??? How should this value be
5832 computed? We'll also need a safe worst case value for varargs
5833 functions. */
5834 case BUILT_IN_APPLY:
5835 if (!validate_arglist (exp, POINTER_TYPE,
5836 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5837 && !validate_arglist (exp, REFERENCE_TYPE,
5838 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5839 return const0_rtx;
5840 else
5841 {
5842 rtx ops[3];
5843
5844 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5845 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5846 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5847
5848 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5849 }
5850
5851 /* __builtin_return (RESULT) causes the function to return the
5852 value described by RESULT. RESULT is address of the block of
5853 memory returned by __builtin_apply. */
5854 case BUILT_IN_RETURN:
5855 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5856 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5857 return const0_rtx;
5858
5859 case BUILT_IN_SAVEREGS:
5860 return expand_builtin_saveregs ();
5861
5862 case BUILT_IN_VA_ARG_PACK:
5863 /* All valid uses of __builtin_va_arg_pack () are removed during
5864 inlining. */
5865 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5866 return const0_rtx;
5867
5868 case BUILT_IN_VA_ARG_PACK_LEN:
5869 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5870 inlining. */
5871 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5872 return const0_rtx;
5873
5874 /* Return the address of the first anonymous stack arg. */
5875 case BUILT_IN_NEXT_ARG:
5876 if (fold_builtin_next_arg (exp, false))
5877 return const0_rtx;
5878 return expand_builtin_next_arg ();
5879
5880 case BUILT_IN_CLEAR_CACHE:
5881 target = expand_builtin___clear_cache (exp);
5882 if (target)
5883 return target;
5884 break;
5885
5886 case BUILT_IN_CLASSIFY_TYPE:
5887 return expand_builtin_classify_type (exp);
5888
5889 case BUILT_IN_CONSTANT_P:
5890 return const0_rtx;
5891
5892 case BUILT_IN_FRAME_ADDRESS:
5893 case BUILT_IN_RETURN_ADDRESS:
5894 return expand_builtin_frame_address (fndecl, exp);
5895
5896 /* Returns the address of the area where the structure is returned.
5897 0 otherwise. */
5898 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5899 if (call_expr_nargs (exp) != 0
5900 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5901 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5902 return const0_rtx;
5903 else
5904 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5905
5906 case BUILT_IN_ALLOCA:
5907 case BUILT_IN_ALLOCA_WITH_ALIGN:
5908 /* If the allocation stems from the declaration of a variable-sized
5909 object, it cannot accumulate. */
5910 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
5911 if (target)
5912 return target;
5913 break;
5914
5915 case BUILT_IN_STACK_SAVE:
5916 return expand_stack_save ();
5917
5918 case BUILT_IN_STACK_RESTORE:
5919 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5920 return const0_rtx;
5921
5922 case BUILT_IN_BSWAP16:
5923 case BUILT_IN_BSWAP32:
5924 case BUILT_IN_BSWAP64:
5925 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
5926 if (target)
5927 return target;
5928 break;
5929
5930 CASE_INT_FN (BUILT_IN_FFS):
5931 target = expand_builtin_unop (target_mode, exp, target,
5932 subtarget, ffs_optab);
5933 if (target)
5934 return target;
5935 break;
5936
5937 CASE_INT_FN (BUILT_IN_CLZ):
5938 target = expand_builtin_unop (target_mode, exp, target,
5939 subtarget, clz_optab);
5940 if (target)
5941 return target;
5942 break;
5943
5944 CASE_INT_FN (BUILT_IN_CTZ):
5945 target = expand_builtin_unop (target_mode, exp, target,
5946 subtarget, ctz_optab);
5947 if (target)
5948 return target;
5949 break;
5950
5951 CASE_INT_FN (BUILT_IN_CLRSB):
5952 target = expand_builtin_unop (target_mode, exp, target,
5953 subtarget, clrsb_optab);
5954 if (target)
5955 return target;
5956 break;
5957
5958 CASE_INT_FN (BUILT_IN_POPCOUNT):
5959 target = expand_builtin_unop (target_mode, exp, target,
5960 subtarget, popcount_optab);
5961 if (target)
5962 return target;
5963 break;
5964
5965 CASE_INT_FN (BUILT_IN_PARITY):
5966 target = expand_builtin_unop (target_mode, exp, target,
5967 subtarget, parity_optab);
5968 if (target)
5969 return target;
5970 break;
5971
5972 case BUILT_IN_STRLEN:
5973 target = expand_builtin_strlen (exp, target, target_mode);
5974 if (target)
5975 return target;
5976 break;
5977
5978 case BUILT_IN_STRCPY:
5979 target = expand_builtin_strcpy (exp, target);
5980 if (target)
5981 return target;
5982 break;
5983
5984 case BUILT_IN_STRNCPY:
5985 target = expand_builtin_strncpy (exp, target);
5986 if (target)
5987 return target;
5988 break;
5989
5990 case BUILT_IN_STPCPY:
5991 target = expand_builtin_stpcpy (exp, target, mode);
5992 if (target)
5993 return target;
5994 break;
5995
5996 case BUILT_IN_MEMCPY:
5997 target = expand_builtin_memcpy (exp, target);
5998 if (target)
5999 return target;
6000 break;
6001
6002 case BUILT_IN_MEMPCPY:
6003 target = expand_builtin_mempcpy (exp, target, mode);
6004 if (target)
6005 return target;
6006 break;
6007
6008 case BUILT_IN_MEMSET:
6009 target = expand_builtin_memset (exp, target, mode);
6010 if (target)
6011 return target;
6012 break;
6013
6014 case BUILT_IN_BZERO:
6015 target = expand_builtin_bzero (exp);
6016 if (target)
6017 return target;
6018 break;
6019
6020 case BUILT_IN_STRCMP:
6021 target = expand_builtin_strcmp (exp, target);
6022 if (target)
6023 return target;
6024 break;
6025
6026 case BUILT_IN_STRNCMP:
6027 target = expand_builtin_strncmp (exp, target, mode);
6028 if (target)
6029 return target;
6030 break;
6031
6032 case BUILT_IN_BCMP:
6033 case BUILT_IN_MEMCMP:
6034 target = expand_builtin_memcmp (exp, target, mode);
6035 if (target)
6036 return target;
6037 break;
6038
6039 case BUILT_IN_SETJMP:
6040 /* This should have been lowered to the builtins below. */
6041 gcc_unreachable ();
6042
6043 case BUILT_IN_SETJMP_SETUP:
6044 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6045 and the receiver label. */
6046 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6047 {
6048 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6049 VOIDmode, EXPAND_NORMAL);
6050 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6051 rtx label_r = label_rtx (label);
6052
6053 /* This is copied from the handling of non-local gotos. */
6054 expand_builtin_setjmp_setup (buf_addr, label_r);
6055 nonlocal_goto_handler_labels
6056 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6057 nonlocal_goto_handler_labels);
6058 /* ??? Do not let expand_label treat us as such since we would
6059 not want to be both on the list of non-local labels and on
6060 the list of forced labels. */
6061 FORCED_LABEL (label) = 0;
6062 return const0_rtx;
6063 }
6064 break;
6065
6066 case BUILT_IN_SETJMP_DISPATCHER:
6067 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6068 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6069 {
6070 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6071 rtx label_r = label_rtx (label);
6072
6073 /* Remove the dispatcher label from the list of non-local labels
6074 since the receiver labels have been added to it above. */
6075 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6076 return const0_rtx;
6077 }
6078 break;
6079
6080 case BUILT_IN_SETJMP_RECEIVER:
6081 /* __builtin_setjmp_receiver is passed the receiver label. */
6082 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6083 {
6084 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6085 rtx label_r = label_rtx (label);
6086
6087 expand_builtin_setjmp_receiver (label_r);
6088 return const0_rtx;
6089 }
6090 break;
6091
6092 /* __builtin_longjmp is passed a pointer to an array of five words.
6093 It's similar to the C library longjmp function but works with
6094 __builtin_setjmp above. */
6095 case BUILT_IN_LONGJMP:
6096 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6097 {
6098 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6099 VOIDmode, EXPAND_NORMAL);
6100 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6101
6102 if (value != const1_rtx)
6103 {
6104 error ("%<__builtin_longjmp%> second argument must be 1");
6105 return const0_rtx;
6106 }
6107
6108 expand_builtin_longjmp (buf_addr, value);
6109 return const0_rtx;
6110 }
6111 break;
6112
6113 case BUILT_IN_NONLOCAL_GOTO:
6114 target = expand_builtin_nonlocal_goto (exp);
6115 if (target)
6116 return target;
6117 break;
6118
6119 /* This updates the setjmp buffer that is its argument with the value
6120 of the current stack pointer. */
6121 case BUILT_IN_UPDATE_SETJMP_BUF:
6122 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6123 {
6124 rtx buf_addr
6125 = expand_normal (CALL_EXPR_ARG (exp, 0));
6126
6127 expand_builtin_update_setjmp_buf (buf_addr);
6128 return const0_rtx;
6129 }
6130 break;
6131
6132 case BUILT_IN_TRAP:
6133 expand_builtin_trap ();
6134 return const0_rtx;
6135
6136 case BUILT_IN_UNREACHABLE:
6137 expand_builtin_unreachable ();
6138 return const0_rtx;
6139
6140 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6141 case BUILT_IN_SIGNBITD32:
6142 case BUILT_IN_SIGNBITD64:
6143 case BUILT_IN_SIGNBITD128:
6144 target = expand_builtin_signbit (exp, target);
6145 if (target)
6146 return target;
6147 break;
6148
6149 /* Various hooks for the DWARF 2 __throw routine. */
6150 case BUILT_IN_UNWIND_INIT:
6151 expand_builtin_unwind_init ();
6152 return const0_rtx;
6153 case BUILT_IN_DWARF_CFA:
6154 return virtual_cfa_rtx;
6155 #ifdef DWARF2_UNWIND_INFO
6156 case BUILT_IN_DWARF_SP_COLUMN:
6157 return expand_builtin_dwarf_sp_column ();
6158 case BUILT_IN_INIT_DWARF_REG_SIZES:
6159 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6160 return const0_rtx;
6161 #endif
6162 case BUILT_IN_FROB_RETURN_ADDR:
6163 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6164 case BUILT_IN_EXTRACT_RETURN_ADDR:
6165 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6166 case BUILT_IN_EH_RETURN:
6167 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6168 CALL_EXPR_ARG (exp, 1));
6169 return const0_rtx;
6170 #ifdef EH_RETURN_DATA_REGNO
6171 case BUILT_IN_EH_RETURN_DATA_REGNO:
6172 return expand_builtin_eh_return_data_regno (exp);
6173 #endif
6174 case BUILT_IN_EXTEND_POINTER:
6175 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6176 case BUILT_IN_EH_POINTER:
6177 return expand_builtin_eh_pointer (exp);
6178 case BUILT_IN_EH_FILTER:
6179 return expand_builtin_eh_filter (exp);
6180 case BUILT_IN_EH_COPY_VALUES:
6181 return expand_builtin_eh_copy_values (exp);
6182
6183 case BUILT_IN_VA_START:
6184 return expand_builtin_va_start (exp);
6185 case BUILT_IN_VA_END:
6186 return expand_builtin_va_end (exp);
6187 case BUILT_IN_VA_COPY:
6188 return expand_builtin_va_copy (exp);
6189 case BUILT_IN_EXPECT:
6190 return expand_builtin_expect (exp, target);
6191 case BUILT_IN_ASSUME_ALIGNED:
6192 return expand_builtin_assume_aligned (exp, target);
6193 case BUILT_IN_PREFETCH:
6194 expand_builtin_prefetch (exp);
6195 return const0_rtx;
6196
6197 case BUILT_IN_INIT_TRAMPOLINE:
6198 return expand_builtin_init_trampoline (exp, true);
6199 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6200 return expand_builtin_init_trampoline (exp, false);
6201 case BUILT_IN_ADJUST_TRAMPOLINE:
6202 return expand_builtin_adjust_trampoline (exp);
6203
6204 case BUILT_IN_FORK:
6205 case BUILT_IN_EXECL:
6206 case BUILT_IN_EXECV:
6207 case BUILT_IN_EXECLP:
6208 case BUILT_IN_EXECLE:
6209 case BUILT_IN_EXECVP:
6210 case BUILT_IN_EXECVE:
6211 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6212 if (target)
6213 return target;
6214 break;
6215
6216 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6217 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6218 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6219 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6220 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6221 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6222 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6223 if (target)
6224 return target;
6225 break;
6226
6227 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6228 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6229 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6230 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6231 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6232 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6233 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6234 if (target)
6235 return target;
6236 break;
6237
6238 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6239 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6240 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6241 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6242 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6243 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6244 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6245 if (target)
6246 return target;
6247 break;
6248
6249 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6250 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6251 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6252 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6253 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6254 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6255 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6256 if (target)
6257 return target;
6258 break;
6259
6260 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6261 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6262 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6263 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6264 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6265 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6266 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6267 if (target)
6268 return target;
6269 break;
6270
6271 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6272 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6273 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6274 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6275 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6276 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6277 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6278 if (target)
6279 return target;
6280 break;
6281
6282 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6283 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6284 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6285 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6286 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6287 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6288 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6289 if (target)
6290 return target;
6291 break;
6292
6293 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6294 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6295 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6296 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6297 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6298 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6299 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6300 if (target)
6301 return target;
6302 break;
6303
6304 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6305 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6306 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6307 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6308 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6309 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6310 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6311 if (target)
6312 return target;
6313 break;
6314
6315 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6316 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6317 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6318 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6319 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6320 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6321 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6322 if (target)
6323 return target;
6324 break;
6325
6326 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6327 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6328 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6329 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6330 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6331 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6332 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6333 if (target)
6334 return target;
6335 break;
6336
6337 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6338 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6339 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6340 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6341 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6342 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6343 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6344 if (target)
6345 return target;
6346 break;
6347
6348 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6349 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6350 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6351 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6352 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6353 if (mode == VOIDmode)
6354 mode = TYPE_MODE (boolean_type_node);
6355 if (!target || !register_operand (target, mode))
6356 target = gen_reg_rtx (mode);
6357
6358 mode = get_builtin_sync_mode
6359 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6360 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6361 if (target)
6362 return target;
6363 break;
6364
6365 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6366 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6367 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6368 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6369 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6370 mode = get_builtin_sync_mode
6371 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6372 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6373 if (target)
6374 return target;
6375 break;
6376
6377 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6378 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6379 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6380 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6381 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6382 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6383 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6384 if (target)
6385 return target;
6386 break;
6387
6388 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6389 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6390 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6391 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6392 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6393 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6394 expand_builtin_sync_lock_release (mode, exp);
6395 return const0_rtx;
6396
6397 case BUILT_IN_SYNC_SYNCHRONIZE:
6398 expand_builtin_sync_synchronize ();
6399 return const0_rtx;
6400
6401 case BUILT_IN_ATOMIC_EXCHANGE_1:
6402 case BUILT_IN_ATOMIC_EXCHANGE_2:
6403 case BUILT_IN_ATOMIC_EXCHANGE_4:
6404 case BUILT_IN_ATOMIC_EXCHANGE_8:
6405 case BUILT_IN_ATOMIC_EXCHANGE_16:
6406 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6407 target = expand_builtin_atomic_exchange (mode, exp, target);
6408 if (target)
6409 return target;
6410 break;
6411
6412 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6413 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6414 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6415 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6416 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6417 {
6418 unsigned int nargs, z;
6419 vec<tree, va_gc> *vec;
6420
6421 mode =
6422 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6423 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6424 if (target)
6425 return target;
6426
6427 /* If this is turned into an external library call, the weak parameter
6428 must be dropped to match the expected parameter list. */
6429 nargs = call_expr_nargs (exp);
6430 vec_alloc (vec, nargs - 1);
6431 for (z = 0; z < 3; z++)
6432 vec->quick_push (CALL_EXPR_ARG (exp, z));
6433 /* Skip the boolean weak parameter. */
6434 for (z = 4; z < 6; z++)
6435 vec->quick_push (CALL_EXPR_ARG (exp, z));
6436 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6437 break;
6438 }
6439
6440 case BUILT_IN_ATOMIC_LOAD_1:
6441 case BUILT_IN_ATOMIC_LOAD_2:
6442 case BUILT_IN_ATOMIC_LOAD_4:
6443 case BUILT_IN_ATOMIC_LOAD_8:
6444 case BUILT_IN_ATOMIC_LOAD_16:
6445 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6446 target = expand_builtin_atomic_load (mode, exp, target);
6447 if (target)
6448 return target;
6449 break;
6450
6451 case BUILT_IN_ATOMIC_STORE_1:
6452 case BUILT_IN_ATOMIC_STORE_2:
6453 case BUILT_IN_ATOMIC_STORE_4:
6454 case BUILT_IN_ATOMIC_STORE_8:
6455 case BUILT_IN_ATOMIC_STORE_16:
6456 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6457 target = expand_builtin_atomic_store (mode, exp);
6458 if (target)
6459 return const0_rtx;
6460 break;
6461
6462 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6463 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6464 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6465 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6466 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6467 {
6468 enum built_in_function lib;
6469 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6470 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6471 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6472 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6473 ignore, lib);
6474 if (target)
6475 return target;
6476 break;
6477 }
6478 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6479 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6480 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6481 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6482 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6483 {
6484 enum built_in_function lib;
6485 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6486 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6487 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6488 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6489 ignore, lib);
6490 if (target)
6491 return target;
6492 break;
6493 }
6494 case BUILT_IN_ATOMIC_AND_FETCH_1:
6495 case BUILT_IN_ATOMIC_AND_FETCH_2:
6496 case BUILT_IN_ATOMIC_AND_FETCH_4:
6497 case BUILT_IN_ATOMIC_AND_FETCH_8:
6498 case BUILT_IN_ATOMIC_AND_FETCH_16:
6499 {
6500 enum built_in_function lib;
6501 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6502 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6503 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6504 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6505 ignore, lib);
6506 if (target)
6507 return target;
6508 break;
6509 }
6510 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6511 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6512 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6513 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6514 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6515 {
6516 enum built_in_function lib;
6517 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6518 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6519 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6520 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6521 ignore, lib);
6522 if (target)
6523 return target;
6524 break;
6525 }
6526 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6527 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6528 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6529 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6530 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6531 {
6532 enum built_in_function lib;
6533 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6534 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6535 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6536 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6537 ignore, lib);
6538 if (target)
6539 return target;
6540 break;
6541 }
6542 case BUILT_IN_ATOMIC_OR_FETCH_1:
6543 case BUILT_IN_ATOMIC_OR_FETCH_2:
6544 case BUILT_IN_ATOMIC_OR_FETCH_4:
6545 case BUILT_IN_ATOMIC_OR_FETCH_8:
6546 case BUILT_IN_ATOMIC_OR_FETCH_16:
6547 {
6548 enum built_in_function lib;
6549 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6550 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6551 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6552 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6553 ignore, lib);
6554 if (target)
6555 return target;
6556 break;
6557 }
6558 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6559 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6560 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6561 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6562 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6563 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6564 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6565 ignore, BUILT_IN_NONE);
6566 if (target)
6567 return target;
6568 break;
6569
6570 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6571 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6572 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6573 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6574 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6575 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6576 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6577 ignore, BUILT_IN_NONE);
6578 if (target)
6579 return target;
6580 break;
6581
6582 case BUILT_IN_ATOMIC_FETCH_AND_1:
6583 case BUILT_IN_ATOMIC_FETCH_AND_2:
6584 case BUILT_IN_ATOMIC_FETCH_AND_4:
6585 case BUILT_IN_ATOMIC_FETCH_AND_8:
6586 case BUILT_IN_ATOMIC_FETCH_AND_16:
6587 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6588 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6589 ignore, BUILT_IN_NONE);
6590 if (target)
6591 return target;
6592 break;
6593
6594 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6595 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6596 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6597 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6598 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6599 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6600 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6601 ignore, BUILT_IN_NONE);
6602 if (target)
6603 return target;
6604 break;
6605
6606 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6607 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6608 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6609 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6610 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6611 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6612 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6613 ignore, BUILT_IN_NONE);
6614 if (target)
6615 return target;
6616 break;
6617
6618 case BUILT_IN_ATOMIC_FETCH_OR_1:
6619 case BUILT_IN_ATOMIC_FETCH_OR_2:
6620 case BUILT_IN_ATOMIC_FETCH_OR_4:
6621 case BUILT_IN_ATOMIC_FETCH_OR_8:
6622 case BUILT_IN_ATOMIC_FETCH_OR_16:
6623 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6624 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6625 ignore, BUILT_IN_NONE);
6626 if (target)
6627 return target;
6628 break;
6629
6630 case BUILT_IN_ATOMIC_TEST_AND_SET:
6631 return expand_builtin_atomic_test_and_set (exp, target);
6632
6633 case BUILT_IN_ATOMIC_CLEAR:
6634 return expand_builtin_atomic_clear (exp);
6635
6636 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6637 return expand_builtin_atomic_always_lock_free (exp);
6638
6639 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6640 target = expand_builtin_atomic_is_lock_free (exp);
6641 if (target)
6642 return target;
6643 break;
6644
6645 case BUILT_IN_ATOMIC_THREAD_FENCE:
6646 expand_builtin_atomic_thread_fence (exp);
6647 return const0_rtx;
6648
6649 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6650 expand_builtin_atomic_signal_fence (exp);
6651 return const0_rtx;
6652
6653 case BUILT_IN_OBJECT_SIZE:
6654 return expand_builtin_object_size (exp);
6655
6656 case BUILT_IN_MEMCPY_CHK:
6657 case BUILT_IN_MEMPCPY_CHK:
6658 case BUILT_IN_MEMMOVE_CHK:
6659 case BUILT_IN_MEMSET_CHK:
6660 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6661 if (target)
6662 return target;
6663 break;
6664
6665 case BUILT_IN_STRCPY_CHK:
6666 case BUILT_IN_STPCPY_CHK:
6667 case BUILT_IN_STRNCPY_CHK:
6668 case BUILT_IN_STPNCPY_CHK:
6669 case BUILT_IN_STRCAT_CHK:
6670 case BUILT_IN_STRNCAT_CHK:
6671 case BUILT_IN_SNPRINTF_CHK:
6672 case BUILT_IN_VSNPRINTF_CHK:
6673 maybe_emit_chk_warning (exp, fcode);
6674 break;
6675
6676 case BUILT_IN_SPRINTF_CHK:
6677 case BUILT_IN_VSPRINTF_CHK:
6678 maybe_emit_sprintf_chk_warning (exp, fcode);
6679 break;
6680
6681 case BUILT_IN_FREE:
6682 if (warn_free_nonheap_object)
6683 maybe_emit_free_warning (exp);
6684 break;
6685
6686 case BUILT_IN_THREAD_POINTER:
6687 return expand_builtin_thread_pointer (exp, target);
6688
6689 case BUILT_IN_SET_THREAD_POINTER:
6690 expand_builtin_set_thread_pointer (exp);
6691 return const0_rtx;
6692
6693 case BUILT_IN_CILK_DETACH:
6694 expand_builtin_cilk_detach (exp);
6695 return const0_rtx;
6696
6697 case BUILT_IN_CILK_POP_FRAME:
6698 expand_builtin_cilk_pop_frame (exp);
6699 return const0_rtx;
6700
6701 default: /* just do library call, if unknown builtin */
6702 break;
6703 }
6704
6705 /* The switch statement above can drop through to cause the function
6706 to be called normally. */
6707 return expand_call (exp, target, ignore);
6708 }
6709
6710 /* Determine whether a tree node represents a call to a built-in
6711 function. If the tree T is a call to a built-in function with
6712 the right number of arguments of the appropriate types, return
6713 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6714 Otherwise the return value is END_BUILTINS. */
6715
6716 enum built_in_function
6717 builtin_mathfn_code (const_tree t)
6718 {
6719 const_tree fndecl, arg, parmlist;
6720 const_tree argtype, parmtype;
6721 const_call_expr_arg_iterator iter;
6722
6723 if (TREE_CODE (t) != CALL_EXPR
6724 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6725 return END_BUILTINS;
6726
6727 fndecl = get_callee_fndecl (t);
6728 if (fndecl == NULL_TREE
6729 || TREE_CODE (fndecl) != FUNCTION_DECL
6730 || ! DECL_BUILT_IN (fndecl)
6731 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6732 return END_BUILTINS;
6733
6734 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6735 init_const_call_expr_arg_iterator (t, &iter);
6736 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6737 {
6738 /* If a function doesn't take a variable number of arguments,
6739 the last element in the list will have type `void'. */
6740 parmtype = TREE_VALUE (parmlist);
6741 if (VOID_TYPE_P (parmtype))
6742 {
6743 if (more_const_call_expr_args_p (&iter))
6744 return END_BUILTINS;
6745 return DECL_FUNCTION_CODE (fndecl);
6746 }
6747
6748 if (! more_const_call_expr_args_p (&iter))
6749 return END_BUILTINS;
6750
6751 arg = next_const_call_expr_arg (&iter);
6752 argtype = TREE_TYPE (arg);
6753
6754 if (SCALAR_FLOAT_TYPE_P (parmtype))
6755 {
6756 if (! SCALAR_FLOAT_TYPE_P (argtype))
6757 return END_BUILTINS;
6758 }
6759 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6760 {
6761 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6762 return END_BUILTINS;
6763 }
6764 else if (POINTER_TYPE_P (parmtype))
6765 {
6766 if (! POINTER_TYPE_P (argtype))
6767 return END_BUILTINS;
6768 }
6769 else if (INTEGRAL_TYPE_P (parmtype))
6770 {
6771 if (! INTEGRAL_TYPE_P (argtype))
6772 return END_BUILTINS;
6773 }
6774 else
6775 return END_BUILTINS;
6776 }
6777
6778 /* Variable-length argument list. */
6779 return DECL_FUNCTION_CODE (fndecl);
6780 }
6781
6782 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6783 evaluate to a constant. */
6784
6785 static tree
6786 fold_builtin_constant_p (tree arg)
6787 {
6788 /* We return 1 for a numeric type that's known to be a constant
6789 value at compile-time or for an aggregate type that's a
6790 literal constant. */
6791 STRIP_NOPS (arg);
6792
6793 /* If we know this is a constant, emit the constant of one. */
6794 if (CONSTANT_CLASS_P (arg)
6795 || (TREE_CODE (arg) == CONSTRUCTOR
6796 && TREE_CONSTANT (arg)))
6797 return integer_one_node;
6798 if (TREE_CODE (arg) == ADDR_EXPR)
6799 {
6800 tree op = TREE_OPERAND (arg, 0);
6801 if (TREE_CODE (op) == STRING_CST
6802 || (TREE_CODE (op) == ARRAY_REF
6803 && integer_zerop (TREE_OPERAND (op, 1))
6804 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6805 return integer_one_node;
6806 }
6807
6808 /* If this expression has side effects, show we don't know it to be a
6809 constant. Likewise if it's a pointer or aggregate type since in
6810 those case we only want literals, since those are only optimized
6811 when generating RTL, not later.
6812 And finally, if we are compiling an initializer, not code, we
6813 need to return a definite result now; there's not going to be any
6814 more optimization done. */
6815 if (TREE_SIDE_EFFECTS (arg)
6816 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6817 || POINTER_TYPE_P (TREE_TYPE (arg))
6818 || cfun == 0
6819 || folding_initializer
6820 || force_folding_builtin_constant_p)
6821 return integer_zero_node;
6822
6823 return NULL_TREE;
6824 }
6825
6826 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6827 return it as a truthvalue. */
6828
6829 static tree
6830 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6831 {
6832 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6833
6834 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
6835 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6836 ret_type = TREE_TYPE (TREE_TYPE (fn));
6837 pred_type = TREE_VALUE (arg_types);
6838 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6839
6840 pred = fold_convert_loc (loc, pred_type, pred);
6841 expected = fold_convert_loc (loc, expected_type, expected);
6842 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6843
6844 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6845 build_int_cst (ret_type, 0));
6846 }
6847
6848 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6849 NULL_TREE if no simplification is possible. */
6850
6851 static tree
6852 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6853 {
6854 tree inner, fndecl, inner_arg0;
6855 enum tree_code code;
6856
6857 /* Distribute the expected value over short-circuiting operators.
6858 See through the cast from truthvalue_type_node to long. */
6859 inner_arg0 = arg0;
6860 while (TREE_CODE (inner_arg0) == NOP_EXPR
6861 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
6862 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
6863 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
6864
6865 /* If this is a builtin_expect within a builtin_expect keep the
6866 inner one. See through a comparison against a constant. It
6867 might have been added to create a thruthvalue. */
6868 inner = inner_arg0;
6869
6870 if (COMPARISON_CLASS_P (inner)
6871 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6872 inner = TREE_OPERAND (inner, 0);
6873
6874 if (TREE_CODE (inner) == CALL_EXPR
6875 && (fndecl = get_callee_fndecl (inner))
6876 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6877 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6878 return arg0;
6879
6880 inner = inner_arg0;
6881 code = TREE_CODE (inner);
6882 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6883 {
6884 tree op0 = TREE_OPERAND (inner, 0);
6885 tree op1 = TREE_OPERAND (inner, 1);
6886
6887 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6888 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6889 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6890
6891 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6892 }
6893
6894 /* If the argument isn't invariant then there's nothing else we can do. */
6895 if (!TREE_CONSTANT (inner_arg0))
6896 return NULL_TREE;
6897
6898 /* If we expect that a comparison against the argument will fold to
6899 a constant return the constant. In practice, this means a true
6900 constant or the address of a non-weak symbol. */
6901 inner = inner_arg0;
6902 STRIP_NOPS (inner);
6903 if (TREE_CODE (inner) == ADDR_EXPR)
6904 {
6905 do
6906 {
6907 inner = TREE_OPERAND (inner, 0);
6908 }
6909 while (TREE_CODE (inner) == COMPONENT_REF
6910 || TREE_CODE (inner) == ARRAY_REF);
6911 if ((TREE_CODE (inner) == VAR_DECL
6912 || TREE_CODE (inner) == FUNCTION_DECL)
6913 && DECL_WEAK (inner))
6914 return NULL_TREE;
6915 }
6916
6917 /* Otherwise, ARG0 already has the proper type for the return value. */
6918 return arg0;
6919 }
6920
6921 /* Fold a call to __builtin_classify_type with argument ARG. */
6922
6923 static tree
6924 fold_builtin_classify_type (tree arg)
6925 {
6926 if (arg == 0)
6927 return build_int_cst (integer_type_node, no_type_class);
6928
6929 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
6930 }
6931
6932 /* Fold a call to __builtin_strlen with argument ARG. */
6933
6934 static tree
6935 fold_builtin_strlen (location_t loc, tree type, tree arg)
6936 {
6937 if (!validate_arg (arg, POINTER_TYPE))
6938 return NULL_TREE;
6939 else
6940 {
6941 tree len = c_strlen (arg, 0);
6942
6943 if (len)
6944 return fold_convert_loc (loc, type, len);
6945
6946 return NULL_TREE;
6947 }
6948 }
6949
6950 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6951
6952 static tree
6953 fold_builtin_inf (location_t loc, tree type, int warn)
6954 {
6955 REAL_VALUE_TYPE real;
6956
6957 /* __builtin_inff is intended to be usable to define INFINITY on all
6958 targets. If an infinity is not available, INFINITY expands "to a
6959 positive constant of type float that overflows at translation
6960 time", footnote "In this case, using INFINITY will violate the
6961 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6962 Thus we pedwarn to ensure this constraint violation is
6963 diagnosed. */
6964 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6965 pedwarn (loc, 0, "target format does not support infinity");
6966
6967 real_inf (&real);
6968 return build_real (type, real);
6969 }
6970
6971 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6972
6973 static tree
6974 fold_builtin_nan (tree arg, tree type, int quiet)
6975 {
6976 REAL_VALUE_TYPE real;
6977 const char *str;
6978
6979 if (!validate_arg (arg, POINTER_TYPE))
6980 return NULL_TREE;
6981 str = c_getstr (arg);
6982 if (!str)
6983 return NULL_TREE;
6984
6985 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6986 return NULL_TREE;
6987
6988 return build_real (type, real);
6989 }
6990
6991 /* Return true if the floating point expression T has an integer value.
6992 We also allow +Inf, -Inf and NaN to be considered integer values. */
6993
6994 static bool
6995 integer_valued_real_p (tree t)
6996 {
6997 switch (TREE_CODE (t))
6998 {
6999 case FLOAT_EXPR:
7000 return true;
7001
7002 case ABS_EXPR:
7003 case SAVE_EXPR:
7004 return integer_valued_real_p (TREE_OPERAND (t, 0));
7005
7006 case COMPOUND_EXPR:
7007 case MODIFY_EXPR:
7008 case BIND_EXPR:
7009 return integer_valued_real_p (TREE_OPERAND (t, 1));
7010
7011 case PLUS_EXPR:
7012 case MINUS_EXPR:
7013 case MULT_EXPR:
7014 case MIN_EXPR:
7015 case MAX_EXPR:
7016 return integer_valued_real_p (TREE_OPERAND (t, 0))
7017 && integer_valued_real_p (TREE_OPERAND (t, 1));
7018
7019 case COND_EXPR:
7020 return integer_valued_real_p (TREE_OPERAND (t, 1))
7021 && integer_valued_real_p (TREE_OPERAND (t, 2));
7022
7023 case REAL_CST:
7024 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7025
7026 case NOP_EXPR:
7027 {
7028 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7029 if (TREE_CODE (type) == INTEGER_TYPE)
7030 return true;
7031 if (TREE_CODE (type) == REAL_TYPE)
7032 return integer_valued_real_p (TREE_OPERAND (t, 0));
7033 break;
7034 }
7035
7036 case CALL_EXPR:
7037 switch (builtin_mathfn_code (t))
7038 {
7039 CASE_FLT_FN (BUILT_IN_CEIL):
7040 CASE_FLT_FN (BUILT_IN_FLOOR):
7041 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7042 CASE_FLT_FN (BUILT_IN_RINT):
7043 CASE_FLT_FN (BUILT_IN_ROUND):
7044 CASE_FLT_FN (BUILT_IN_TRUNC):
7045 return true;
7046
7047 CASE_FLT_FN (BUILT_IN_FMIN):
7048 CASE_FLT_FN (BUILT_IN_FMAX):
7049 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7050 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7051
7052 default:
7053 break;
7054 }
7055 break;
7056
7057 default:
7058 break;
7059 }
7060 return false;
7061 }
7062
7063 /* FNDECL is assumed to be a builtin where truncation can be propagated
7064 across (for instance floor((double)f) == (double)floorf (f).
7065 Do the transformation for a call with argument ARG. */
7066
7067 static tree
7068 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7069 {
7070 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7071
7072 if (!validate_arg (arg, REAL_TYPE))
7073 return NULL_TREE;
7074
7075 /* Integer rounding functions are idempotent. */
7076 if (fcode == builtin_mathfn_code (arg))
7077 return arg;
7078
7079 /* If argument is already integer valued, and we don't need to worry
7080 about setting errno, there's no need to perform rounding. */
7081 if (! flag_errno_math && integer_valued_real_p (arg))
7082 return arg;
7083
7084 if (optimize)
7085 {
7086 tree arg0 = strip_float_extensions (arg);
7087 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7088 tree newtype = TREE_TYPE (arg0);
7089 tree decl;
7090
7091 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7092 && (decl = mathfn_built_in (newtype, fcode)))
7093 return fold_convert_loc (loc, ftype,
7094 build_call_expr_loc (loc, decl, 1,
7095 fold_convert_loc (loc,
7096 newtype,
7097 arg0)));
7098 }
7099 return NULL_TREE;
7100 }
7101
7102 /* FNDECL is assumed to be builtin which can narrow the FP type of
7103 the argument, for instance lround((double)f) -> lroundf (f).
7104 Do the transformation for a call with argument ARG. */
7105
7106 static tree
7107 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7108 {
7109 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7110
7111 if (!validate_arg (arg, REAL_TYPE))
7112 return NULL_TREE;
7113
7114 /* If argument is already integer valued, and we don't need to worry
7115 about setting errno, there's no need to perform rounding. */
7116 if (! flag_errno_math && integer_valued_real_p (arg))
7117 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7118 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7119
7120 if (optimize)
7121 {
7122 tree ftype = TREE_TYPE (arg);
7123 tree arg0 = strip_float_extensions (arg);
7124 tree newtype = TREE_TYPE (arg0);
7125 tree decl;
7126
7127 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7128 && (decl = mathfn_built_in (newtype, fcode)))
7129 return build_call_expr_loc (loc, decl, 1,
7130 fold_convert_loc (loc, newtype, arg0));
7131 }
7132
7133 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7134 sizeof (int) == sizeof (long). */
7135 if (TYPE_PRECISION (integer_type_node)
7136 == TYPE_PRECISION (long_integer_type_node))
7137 {
7138 tree newfn = NULL_TREE;
7139 switch (fcode)
7140 {
7141 CASE_FLT_FN (BUILT_IN_ICEIL):
7142 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7143 break;
7144
7145 CASE_FLT_FN (BUILT_IN_IFLOOR):
7146 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7147 break;
7148
7149 CASE_FLT_FN (BUILT_IN_IROUND):
7150 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7151 break;
7152
7153 CASE_FLT_FN (BUILT_IN_IRINT):
7154 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7155 break;
7156
7157 default:
7158 break;
7159 }
7160
7161 if (newfn)
7162 {
7163 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7164 return fold_convert_loc (loc,
7165 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7166 }
7167 }
7168
7169 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7170 sizeof (long long) == sizeof (long). */
7171 if (TYPE_PRECISION (long_long_integer_type_node)
7172 == TYPE_PRECISION (long_integer_type_node))
7173 {
7174 tree newfn = NULL_TREE;
7175 switch (fcode)
7176 {
7177 CASE_FLT_FN (BUILT_IN_LLCEIL):
7178 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7179 break;
7180
7181 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7182 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7183 break;
7184
7185 CASE_FLT_FN (BUILT_IN_LLROUND):
7186 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7187 break;
7188
7189 CASE_FLT_FN (BUILT_IN_LLRINT):
7190 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7191 break;
7192
7193 default:
7194 break;
7195 }
7196
7197 if (newfn)
7198 {
7199 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7200 return fold_convert_loc (loc,
7201 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7202 }
7203 }
7204
7205 return NULL_TREE;
7206 }
7207
7208 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7209 return type. Return NULL_TREE if no simplification can be made. */
7210
7211 static tree
7212 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7213 {
7214 tree res;
7215
7216 if (!validate_arg (arg, COMPLEX_TYPE)
7217 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7218 return NULL_TREE;
7219
7220 /* Calculate the result when the argument is a constant. */
7221 if (TREE_CODE (arg) == COMPLEX_CST
7222 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7223 type, mpfr_hypot)))
7224 return res;
7225
7226 if (TREE_CODE (arg) == COMPLEX_EXPR)
7227 {
7228 tree real = TREE_OPERAND (arg, 0);
7229 tree imag = TREE_OPERAND (arg, 1);
7230
7231 /* If either part is zero, cabs is fabs of the other. */
7232 if (real_zerop (real))
7233 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7234 if (real_zerop (imag))
7235 return fold_build1_loc (loc, ABS_EXPR, type, real);
7236
7237 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7238 if (flag_unsafe_math_optimizations
7239 && operand_equal_p (real, imag, OEP_PURE_SAME))
7240 {
7241 const REAL_VALUE_TYPE sqrt2_trunc
7242 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7243 STRIP_NOPS (real);
7244 return fold_build2_loc (loc, MULT_EXPR, type,
7245 fold_build1_loc (loc, ABS_EXPR, type, real),
7246 build_real (type, sqrt2_trunc));
7247 }
7248 }
7249
7250 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7251 if (TREE_CODE (arg) == NEGATE_EXPR
7252 || TREE_CODE (arg) == CONJ_EXPR)
7253 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7254
7255 /* Don't do this when optimizing for size. */
7256 if (flag_unsafe_math_optimizations
7257 && optimize && optimize_function_for_speed_p (cfun))
7258 {
7259 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7260
7261 if (sqrtfn != NULL_TREE)
7262 {
7263 tree rpart, ipart, result;
7264
7265 arg = builtin_save_expr (arg);
7266
7267 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7268 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7269
7270 rpart = builtin_save_expr (rpart);
7271 ipart = builtin_save_expr (ipart);
7272
7273 result = fold_build2_loc (loc, PLUS_EXPR, type,
7274 fold_build2_loc (loc, MULT_EXPR, type,
7275 rpart, rpart),
7276 fold_build2_loc (loc, MULT_EXPR, type,
7277 ipart, ipart));
7278
7279 return build_call_expr_loc (loc, sqrtfn, 1, result);
7280 }
7281 }
7282
7283 return NULL_TREE;
7284 }
7285
7286 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7287 complex tree type of the result. If NEG is true, the imaginary
7288 zero is negative. */
7289
7290 static tree
7291 build_complex_cproj (tree type, bool neg)
7292 {
7293 REAL_VALUE_TYPE rinf, rzero = dconst0;
7294
7295 real_inf (&rinf);
7296 rzero.sign = neg;
7297 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7298 build_real (TREE_TYPE (type), rzero));
7299 }
7300
7301 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7302 return type. Return NULL_TREE if no simplification can be made. */
7303
7304 static tree
7305 fold_builtin_cproj (location_t loc, tree arg, tree type)
7306 {
7307 if (!validate_arg (arg, COMPLEX_TYPE)
7308 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7309 return NULL_TREE;
7310
7311 /* If there are no infinities, return arg. */
7312 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7313 return non_lvalue_loc (loc, arg);
7314
7315 /* Calculate the result when the argument is a constant. */
7316 if (TREE_CODE (arg) == COMPLEX_CST)
7317 {
7318 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7319 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7320
7321 if (real_isinf (real) || real_isinf (imag))
7322 return build_complex_cproj (type, imag->sign);
7323 else
7324 return arg;
7325 }
7326 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7327 {
7328 tree real = TREE_OPERAND (arg, 0);
7329 tree imag = TREE_OPERAND (arg, 1);
7330
7331 STRIP_NOPS (real);
7332 STRIP_NOPS (imag);
7333
7334 /* If the real part is inf and the imag part is known to be
7335 nonnegative, return (inf + 0i). Remember side-effects are
7336 possible in the imag part. */
7337 if (TREE_CODE (real) == REAL_CST
7338 && real_isinf (TREE_REAL_CST_PTR (real))
7339 && tree_expr_nonnegative_p (imag))
7340 return omit_one_operand_loc (loc, type,
7341 build_complex_cproj (type, false),
7342 arg);
7343
7344 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7345 Remember side-effects are possible in the real part. */
7346 if (TREE_CODE (imag) == REAL_CST
7347 && real_isinf (TREE_REAL_CST_PTR (imag)))
7348 return
7349 omit_one_operand_loc (loc, type,
7350 build_complex_cproj (type, TREE_REAL_CST_PTR
7351 (imag)->sign), arg);
7352 }
7353
7354 return NULL_TREE;
7355 }
7356
7357 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7358 Return NULL_TREE if no simplification can be made. */
7359
7360 static tree
7361 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7362 {
7363
7364 enum built_in_function fcode;
7365 tree res;
7366
7367 if (!validate_arg (arg, REAL_TYPE))
7368 return NULL_TREE;
7369
7370 /* Calculate the result when the argument is a constant. */
7371 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7372 return res;
7373
7374 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7375 fcode = builtin_mathfn_code (arg);
7376 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7377 {
7378 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7379 arg = fold_build2_loc (loc, MULT_EXPR, type,
7380 CALL_EXPR_ARG (arg, 0),
7381 build_real (type, dconsthalf));
7382 return build_call_expr_loc (loc, expfn, 1, arg);
7383 }
7384
7385 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7386 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7387 {
7388 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7389
7390 if (powfn)
7391 {
7392 tree arg0 = CALL_EXPR_ARG (arg, 0);
7393 tree tree_root;
7394 /* The inner root was either sqrt or cbrt. */
7395 /* This was a conditional expression but it triggered a bug
7396 in Sun C 5.5. */
7397 REAL_VALUE_TYPE dconstroot;
7398 if (BUILTIN_SQRT_P (fcode))
7399 dconstroot = dconsthalf;
7400 else
7401 dconstroot = dconst_third ();
7402
7403 /* Adjust for the outer root. */
7404 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7405 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7406 tree_root = build_real (type, dconstroot);
7407 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7408 }
7409 }
7410
7411 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7412 if (flag_unsafe_math_optimizations
7413 && (fcode == BUILT_IN_POW
7414 || fcode == BUILT_IN_POWF
7415 || fcode == BUILT_IN_POWL))
7416 {
7417 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7418 tree arg0 = CALL_EXPR_ARG (arg, 0);
7419 tree arg1 = CALL_EXPR_ARG (arg, 1);
7420 tree narg1;
7421 if (!tree_expr_nonnegative_p (arg0))
7422 arg0 = build1 (ABS_EXPR, type, arg0);
7423 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7424 build_real (type, dconsthalf));
7425 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7426 }
7427
7428 return NULL_TREE;
7429 }
7430
7431 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7432 Return NULL_TREE if no simplification can be made. */
7433
7434 static tree
7435 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7436 {
7437 const enum built_in_function fcode = builtin_mathfn_code (arg);
7438 tree res;
7439
7440 if (!validate_arg (arg, REAL_TYPE))
7441 return NULL_TREE;
7442
7443 /* Calculate the result when the argument is a constant. */
7444 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7445 return res;
7446
7447 if (flag_unsafe_math_optimizations)
7448 {
7449 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7450 if (BUILTIN_EXPONENT_P (fcode))
7451 {
7452 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7453 const REAL_VALUE_TYPE third_trunc =
7454 real_value_truncate (TYPE_MODE (type), dconst_third ());
7455 arg = fold_build2_loc (loc, MULT_EXPR, type,
7456 CALL_EXPR_ARG (arg, 0),
7457 build_real (type, third_trunc));
7458 return build_call_expr_loc (loc, expfn, 1, arg);
7459 }
7460
7461 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7462 if (BUILTIN_SQRT_P (fcode))
7463 {
7464 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7465
7466 if (powfn)
7467 {
7468 tree arg0 = CALL_EXPR_ARG (arg, 0);
7469 tree tree_root;
7470 REAL_VALUE_TYPE dconstroot = dconst_third ();
7471
7472 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7473 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7474 tree_root = build_real (type, dconstroot);
7475 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7476 }
7477 }
7478
7479 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7480 if (BUILTIN_CBRT_P (fcode))
7481 {
7482 tree arg0 = CALL_EXPR_ARG (arg, 0);
7483 if (tree_expr_nonnegative_p (arg0))
7484 {
7485 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7486
7487 if (powfn)
7488 {
7489 tree tree_root;
7490 REAL_VALUE_TYPE dconstroot;
7491
7492 real_arithmetic (&dconstroot, MULT_EXPR,
7493 dconst_third_ptr (), dconst_third_ptr ());
7494 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7495 tree_root = build_real (type, dconstroot);
7496 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7497 }
7498 }
7499 }
7500
7501 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7502 if (fcode == BUILT_IN_POW
7503 || fcode == BUILT_IN_POWF
7504 || fcode == BUILT_IN_POWL)
7505 {
7506 tree arg00 = CALL_EXPR_ARG (arg, 0);
7507 tree arg01 = CALL_EXPR_ARG (arg, 1);
7508 if (tree_expr_nonnegative_p (arg00))
7509 {
7510 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7511 const REAL_VALUE_TYPE dconstroot
7512 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7513 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7514 build_real (type, dconstroot));
7515 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7516 }
7517 }
7518 }
7519 return NULL_TREE;
7520 }
7521
7522 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7523 TYPE is the type of the return value. Return NULL_TREE if no
7524 simplification can be made. */
7525
7526 static tree
7527 fold_builtin_cos (location_t loc,
7528 tree arg, tree type, tree fndecl)
7529 {
7530 tree res, narg;
7531
7532 if (!validate_arg (arg, REAL_TYPE))
7533 return NULL_TREE;
7534
7535 /* Calculate the result when the argument is a constant. */
7536 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7537 return res;
7538
7539 /* Optimize cos(-x) into cos (x). */
7540 if ((narg = fold_strip_sign_ops (arg)))
7541 return build_call_expr_loc (loc, fndecl, 1, narg);
7542
7543 return NULL_TREE;
7544 }
7545
7546 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7547 Return NULL_TREE if no simplification can be made. */
7548
7549 static tree
7550 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7551 {
7552 if (validate_arg (arg, REAL_TYPE))
7553 {
7554 tree res, narg;
7555
7556 /* Calculate the result when the argument is a constant. */
7557 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7558 return res;
7559
7560 /* Optimize cosh(-x) into cosh (x). */
7561 if ((narg = fold_strip_sign_ops (arg)))
7562 return build_call_expr_loc (loc, fndecl, 1, narg);
7563 }
7564
7565 return NULL_TREE;
7566 }
7567
7568 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7569 argument ARG. TYPE is the type of the return value. Return
7570 NULL_TREE if no simplification can be made. */
7571
7572 static tree
7573 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7574 bool hyper)
7575 {
7576 if (validate_arg (arg, COMPLEX_TYPE)
7577 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7578 {
7579 tree tmp;
7580
7581 /* Calculate the result when the argument is a constant. */
7582 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7583 return tmp;
7584
7585 /* Optimize fn(-x) into fn(x). */
7586 if ((tmp = fold_strip_sign_ops (arg)))
7587 return build_call_expr_loc (loc, fndecl, 1, tmp);
7588 }
7589
7590 return NULL_TREE;
7591 }
7592
7593 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7594 Return NULL_TREE if no simplification can be made. */
7595
7596 static tree
7597 fold_builtin_tan (tree arg, tree type)
7598 {
7599 enum built_in_function fcode;
7600 tree res;
7601
7602 if (!validate_arg (arg, REAL_TYPE))
7603 return NULL_TREE;
7604
7605 /* Calculate the result when the argument is a constant. */
7606 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7607 return res;
7608
7609 /* Optimize tan(atan(x)) = x. */
7610 fcode = builtin_mathfn_code (arg);
7611 if (flag_unsafe_math_optimizations
7612 && (fcode == BUILT_IN_ATAN
7613 || fcode == BUILT_IN_ATANF
7614 || fcode == BUILT_IN_ATANL))
7615 return CALL_EXPR_ARG (arg, 0);
7616
7617 return NULL_TREE;
7618 }
7619
7620 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7621 NULL_TREE if no simplification can be made. */
7622
7623 static tree
7624 fold_builtin_sincos (location_t loc,
7625 tree arg0, tree arg1, tree arg2)
7626 {
7627 tree type;
7628 tree res, fn, call;
7629
7630 if (!validate_arg (arg0, REAL_TYPE)
7631 || !validate_arg (arg1, POINTER_TYPE)
7632 || !validate_arg (arg2, POINTER_TYPE))
7633 return NULL_TREE;
7634
7635 type = TREE_TYPE (arg0);
7636
7637 /* Calculate the result when the argument is a constant. */
7638 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7639 return res;
7640
7641 /* Canonicalize sincos to cexpi. */
7642 if (!targetm.libc_has_function (function_c99_math_complex))
7643 return NULL_TREE;
7644 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7645 if (!fn)
7646 return NULL_TREE;
7647
7648 call = build_call_expr_loc (loc, fn, 1, arg0);
7649 call = builtin_save_expr (call);
7650
7651 return build2 (COMPOUND_EXPR, void_type_node,
7652 build2 (MODIFY_EXPR, void_type_node,
7653 build_fold_indirect_ref_loc (loc, arg1),
7654 build1 (IMAGPART_EXPR, type, call)),
7655 build2 (MODIFY_EXPR, void_type_node,
7656 build_fold_indirect_ref_loc (loc, arg2),
7657 build1 (REALPART_EXPR, type, call)));
7658 }
7659
7660 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7661 NULL_TREE if no simplification can be made. */
7662
7663 static tree
7664 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7665 {
7666 tree rtype;
7667 tree realp, imagp, ifn;
7668 tree res;
7669
7670 if (!validate_arg (arg0, COMPLEX_TYPE)
7671 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7672 return NULL_TREE;
7673
7674 /* Calculate the result when the argument is a constant. */
7675 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7676 return res;
7677
7678 rtype = TREE_TYPE (TREE_TYPE (arg0));
7679
7680 /* In case we can figure out the real part of arg0 and it is constant zero
7681 fold to cexpi. */
7682 if (!targetm.libc_has_function (function_c99_math_complex))
7683 return NULL_TREE;
7684 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7685 if (!ifn)
7686 return NULL_TREE;
7687
7688 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7689 && real_zerop (realp))
7690 {
7691 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7692 return build_call_expr_loc (loc, ifn, 1, narg);
7693 }
7694
7695 /* In case we can easily decompose real and imaginary parts split cexp
7696 to exp (r) * cexpi (i). */
7697 if (flag_unsafe_math_optimizations
7698 && realp)
7699 {
7700 tree rfn, rcall, icall;
7701
7702 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7703 if (!rfn)
7704 return NULL_TREE;
7705
7706 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7707 if (!imagp)
7708 return NULL_TREE;
7709
7710 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7711 icall = builtin_save_expr (icall);
7712 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7713 rcall = builtin_save_expr (rcall);
7714 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7715 fold_build2_loc (loc, MULT_EXPR, rtype,
7716 rcall,
7717 fold_build1_loc (loc, REALPART_EXPR,
7718 rtype, icall)),
7719 fold_build2_loc (loc, MULT_EXPR, rtype,
7720 rcall,
7721 fold_build1_loc (loc, IMAGPART_EXPR,
7722 rtype, icall)));
7723 }
7724
7725 return NULL_TREE;
7726 }
7727
7728 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7729 Return NULL_TREE if no simplification can be made. */
7730
7731 static tree
7732 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7733 {
7734 if (!validate_arg (arg, REAL_TYPE))
7735 return NULL_TREE;
7736
7737 /* Optimize trunc of constant value. */
7738 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7739 {
7740 REAL_VALUE_TYPE r, x;
7741 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7742
7743 x = TREE_REAL_CST (arg);
7744 real_trunc (&r, TYPE_MODE (type), &x);
7745 return build_real (type, r);
7746 }
7747
7748 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7749 }
7750
7751 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7752 Return NULL_TREE if no simplification can be made. */
7753
7754 static tree
7755 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7756 {
7757 if (!validate_arg (arg, REAL_TYPE))
7758 return NULL_TREE;
7759
7760 /* Optimize floor of constant value. */
7761 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7762 {
7763 REAL_VALUE_TYPE x;
7764
7765 x = TREE_REAL_CST (arg);
7766 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7767 {
7768 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7769 REAL_VALUE_TYPE r;
7770
7771 real_floor (&r, TYPE_MODE (type), &x);
7772 return build_real (type, r);
7773 }
7774 }
7775
7776 /* Fold floor (x) where x is nonnegative to trunc (x). */
7777 if (tree_expr_nonnegative_p (arg))
7778 {
7779 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7780 if (truncfn)
7781 return build_call_expr_loc (loc, truncfn, 1, arg);
7782 }
7783
7784 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7785 }
7786
7787 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7788 Return NULL_TREE if no simplification can be made. */
7789
7790 static tree
7791 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7792 {
7793 if (!validate_arg (arg, REAL_TYPE))
7794 return NULL_TREE;
7795
7796 /* Optimize ceil of constant value. */
7797 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7798 {
7799 REAL_VALUE_TYPE x;
7800
7801 x = TREE_REAL_CST (arg);
7802 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7803 {
7804 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7805 REAL_VALUE_TYPE r;
7806
7807 real_ceil (&r, TYPE_MODE (type), &x);
7808 return build_real (type, r);
7809 }
7810 }
7811
7812 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7813 }
7814
7815 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7816 Return NULL_TREE if no simplification can be made. */
7817
7818 static tree
7819 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7820 {
7821 if (!validate_arg (arg, REAL_TYPE))
7822 return NULL_TREE;
7823
7824 /* Optimize round of constant value. */
7825 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7826 {
7827 REAL_VALUE_TYPE x;
7828
7829 x = TREE_REAL_CST (arg);
7830 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7831 {
7832 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7833 REAL_VALUE_TYPE r;
7834
7835 real_round (&r, TYPE_MODE (type), &x);
7836 return build_real (type, r);
7837 }
7838 }
7839
7840 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7841 }
7842
7843 /* Fold function call to builtin lround, lroundf or lroundl (or the
7844 corresponding long long versions) and other rounding functions. ARG
7845 is the argument to the call. Return NULL_TREE if no simplification
7846 can be made. */
7847
7848 static tree
7849 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7850 {
7851 if (!validate_arg (arg, REAL_TYPE))
7852 return NULL_TREE;
7853
7854 /* Optimize lround of constant value. */
7855 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7856 {
7857 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7858
7859 if (real_isfinite (&x))
7860 {
7861 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7862 tree ftype = TREE_TYPE (arg);
7863 double_int val;
7864 REAL_VALUE_TYPE r;
7865
7866 switch (DECL_FUNCTION_CODE (fndecl))
7867 {
7868 CASE_FLT_FN (BUILT_IN_IFLOOR):
7869 CASE_FLT_FN (BUILT_IN_LFLOOR):
7870 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7871 real_floor (&r, TYPE_MODE (ftype), &x);
7872 break;
7873
7874 CASE_FLT_FN (BUILT_IN_ICEIL):
7875 CASE_FLT_FN (BUILT_IN_LCEIL):
7876 CASE_FLT_FN (BUILT_IN_LLCEIL):
7877 real_ceil (&r, TYPE_MODE (ftype), &x);
7878 break;
7879
7880 CASE_FLT_FN (BUILT_IN_IROUND):
7881 CASE_FLT_FN (BUILT_IN_LROUND):
7882 CASE_FLT_FN (BUILT_IN_LLROUND):
7883 real_round (&r, TYPE_MODE (ftype), &x);
7884 break;
7885
7886 default:
7887 gcc_unreachable ();
7888 }
7889
7890 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
7891 if (double_int_fits_to_tree_p (itype, val))
7892 return double_int_to_tree (itype, val);
7893 }
7894 }
7895
7896 switch (DECL_FUNCTION_CODE (fndecl))
7897 {
7898 CASE_FLT_FN (BUILT_IN_LFLOOR):
7899 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7900 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7901 if (tree_expr_nonnegative_p (arg))
7902 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7903 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7904 break;
7905 default:;
7906 }
7907
7908 return fold_fixed_mathfn (loc, fndecl, arg);
7909 }
7910
7911 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7912 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7913 the argument to the call. Return NULL_TREE if no simplification can
7914 be made. */
7915
7916 static tree
7917 fold_builtin_bitop (tree fndecl, tree arg)
7918 {
7919 if (!validate_arg (arg, INTEGER_TYPE))
7920 return NULL_TREE;
7921
7922 /* Optimize for constant argument. */
7923 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7924 {
7925 HOST_WIDE_INT hi, width, result;
7926 unsigned HOST_WIDE_INT lo;
7927 tree type;
7928
7929 type = TREE_TYPE (arg);
7930 width = TYPE_PRECISION (type);
7931 lo = TREE_INT_CST_LOW (arg);
7932
7933 /* Clear all the bits that are beyond the type's precision. */
7934 if (width > HOST_BITS_PER_WIDE_INT)
7935 {
7936 hi = TREE_INT_CST_HIGH (arg);
7937 if (width < HOST_BITS_PER_DOUBLE_INT)
7938 hi &= ~(HOST_WIDE_INT_M1U << (width - HOST_BITS_PER_WIDE_INT));
7939 }
7940 else
7941 {
7942 hi = 0;
7943 if (width < HOST_BITS_PER_WIDE_INT)
7944 lo &= ~(HOST_WIDE_INT_M1U << width);
7945 }
7946
7947 switch (DECL_FUNCTION_CODE (fndecl))
7948 {
7949 CASE_INT_FN (BUILT_IN_FFS):
7950 if (lo != 0)
7951 result = ffs_hwi (lo);
7952 else if (hi != 0)
7953 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
7954 else
7955 result = 0;
7956 break;
7957
7958 CASE_INT_FN (BUILT_IN_CLZ):
7959 if (hi != 0)
7960 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7961 else if (lo != 0)
7962 result = width - floor_log2 (lo) - 1;
7963 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7964 result = width;
7965 break;
7966
7967 CASE_INT_FN (BUILT_IN_CTZ):
7968 if (lo != 0)
7969 result = ctz_hwi (lo);
7970 else if (hi != 0)
7971 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
7972 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7973 result = width;
7974 break;
7975
7976 CASE_INT_FN (BUILT_IN_CLRSB):
7977 if (width > 2 * HOST_BITS_PER_WIDE_INT)
7978 return NULL_TREE;
7979 if (width > HOST_BITS_PER_WIDE_INT
7980 && (hi & ((unsigned HOST_WIDE_INT) 1
7981 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
7982 {
7983 hi = ~hi & ~(HOST_WIDE_INT_M1U
7984 << (width - HOST_BITS_PER_WIDE_INT - 1));
7985 lo = ~lo;
7986 }
7987 else if (width <= HOST_BITS_PER_WIDE_INT
7988 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
7989 lo = ~lo & ~(HOST_WIDE_INT_M1U << (width - 1));
7990 if (hi != 0)
7991 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
7992 else if (lo != 0)
7993 result = width - floor_log2 (lo) - 2;
7994 else
7995 result = width - 1;
7996 break;
7997
7998 CASE_INT_FN (BUILT_IN_POPCOUNT):
7999 result = 0;
8000 while (lo)
8001 result++, lo &= lo - 1;
8002 while (hi)
8003 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8004 break;
8005
8006 CASE_INT_FN (BUILT_IN_PARITY):
8007 result = 0;
8008 while (lo)
8009 result++, lo &= lo - 1;
8010 while (hi)
8011 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8012 result &= 1;
8013 break;
8014
8015 default:
8016 gcc_unreachable ();
8017 }
8018
8019 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8020 }
8021
8022 return NULL_TREE;
8023 }
8024
8025 /* Fold function call to builtin_bswap and the short, long and long long
8026 variants. Return NULL_TREE if no simplification can be made. */
8027 static tree
8028 fold_builtin_bswap (tree fndecl, tree arg)
8029 {
8030 if (! validate_arg (arg, INTEGER_TYPE))
8031 return NULL_TREE;
8032
8033 /* Optimize constant value. */
8034 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8035 {
8036 HOST_WIDE_INT hi, width, r_hi = 0;
8037 unsigned HOST_WIDE_INT lo, r_lo = 0;
8038 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8039
8040 width = TYPE_PRECISION (type);
8041 lo = TREE_INT_CST_LOW (arg);
8042 hi = TREE_INT_CST_HIGH (arg);
8043
8044 switch (DECL_FUNCTION_CODE (fndecl))
8045 {
8046 case BUILT_IN_BSWAP16:
8047 case BUILT_IN_BSWAP32:
8048 case BUILT_IN_BSWAP64:
8049 {
8050 int s;
8051
8052 for (s = 0; s < width; s += 8)
8053 {
8054 int d = width - s - 8;
8055 unsigned HOST_WIDE_INT byte;
8056
8057 if (s < HOST_BITS_PER_WIDE_INT)
8058 byte = (lo >> s) & 0xff;
8059 else
8060 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8061
8062 if (d < HOST_BITS_PER_WIDE_INT)
8063 r_lo |= byte << d;
8064 else
8065 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8066 }
8067 }
8068
8069 break;
8070
8071 default:
8072 gcc_unreachable ();
8073 }
8074
8075 if (width < HOST_BITS_PER_WIDE_INT)
8076 return build_int_cst (type, r_lo);
8077 else
8078 return build_int_cst_wide (type, r_lo, r_hi);
8079 }
8080
8081 return NULL_TREE;
8082 }
8083
8084 /* A subroutine of fold_builtin to fold the various logarithmic
8085 functions. Return NULL_TREE if no simplification can me made.
8086 FUNC is the corresponding MPFR logarithm function. */
8087
8088 static tree
8089 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8090 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8091 {
8092 if (validate_arg (arg, REAL_TYPE))
8093 {
8094 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8095 tree res;
8096 const enum built_in_function fcode = builtin_mathfn_code (arg);
8097
8098 /* Calculate the result when the argument is a constant. */
8099 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8100 return res;
8101
8102 /* Special case, optimize logN(expN(x)) = x. */
8103 if (flag_unsafe_math_optimizations
8104 && ((func == mpfr_log
8105 && (fcode == BUILT_IN_EXP
8106 || fcode == BUILT_IN_EXPF
8107 || fcode == BUILT_IN_EXPL))
8108 || (func == mpfr_log2
8109 && (fcode == BUILT_IN_EXP2
8110 || fcode == BUILT_IN_EXP2F
8111 || fcode == BUILT_IN_EXP2L))
8112 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8113 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8114
8115 /* Optimize logN(func()) for various exponential functions. We
8116 want to determine the value "x" and the power "exponent" in
8117 order to transform logN(x**exponent) into exponent*logN(x). */
8118 if (flag_unsafe_math_optimizations)
8119 {
8120 tree exponent = 0, x = 0;
8121
8122 switch (fcode)
8123 {
8124 CASE_FLT_FN (BUILT_IN_EXP):
8125 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8126 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8127 dconst_e ()));
8128 exponent = CALL_EXPR_ARG (arg, 0);
8129 break;
8130 CASE_FLT_FN (BUILT_IN_EXP2):
8131 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8132 x = build_real (type, dconst2);
8133 exponent = CALL_EXPR_ARG (arg, 0);
8134 break;
8135 CASE_FLT_FN (BUILT_IN_EXP10):
8136 CASE_FLT_FN (BUILT_IN_POW10):
8137 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8138 {
8139 REAL_VALUE_TYPE dconst10;
8140 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8141 x = build_real (type, dconst10);
8142 }
8143 exponent = CALL_EXPR_ARG (arg, 0);
8144 break;
8145 CASE_FLT_FN (BUILT_IN_SQRT):
8146 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8147 x = CALL_EXPR_ARG (arg, 0);
8148 exponent = build_real (type, dconsthalf);
8149 break;
8150 CASE_FLT_FN (BUILT_IN_CBRT):
8151 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8152 x = CALL_EXPR_ARG (arg, 0);
8153 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8154 dconst_third ()));
8155 break;
8156 CASE_FLT_FN (BUILT_IN_POW):
8157 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8158 x = CALL_EXPR_ARG (arg, 0);
8159 exponent = CALL_EXPR_ARG (arg, 1);
8160 break;
8161 default:
8162 break;
8163 }
8164
8165 /* Now perform the optimization. */
8166 if (x && exponent)
8167 {
8168 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8169 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8170 }
8171 }
8172 }
8173
8174 return NULL_TREE;
8175 }
8176
8177 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8178 NULL_TREE if no simplification can be made. */
8179
8180 static tree
8181 fold_builtin_hypot (location_t loc, tree fndecl,
8182 tree arg0, tree arg1, tree type)
8183 {
8184 tree res, narg0, narg1;
8185
8186 if (!validate_arg (arg0, REAL_TYPE)
8187 || !validate_arg (arg1, REAL_TYPE))
8188 return NULL_TREE;
8189
8190 /* Calculate the result when the argument is a constant. */
8191 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8192 return res;
8193
8194 /* If either argument to hypot has a negate or abs, strip that off.
8195 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8196 narg0 = fold_strip_sign_ops (arg0);
8197 narg1 = fold_strip_sign_ops (arg1);
8198 if (narg0 || narg1)
8199 {
8200 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8201 narg1 ? narg1 : arg1);
8202 }
8203
8204 /* If either argument is zero, hypot is fabs of the other. */
8205 if (real_zerop (arg0))
8206 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8207 else if (real_zerop (arg1))
8208 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8209
8210 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8211 if (flag_unsafe_math_optimizations
8212 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8213 {
8214 const REAL_VALUE_TYPE sqrt2_trunc
8215 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8216 return fold_build2_loc (loc, MULT_EXPR, type,
8217 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8218 build_real (type, sqrt2_trunc));
8219 }
8220
8221 return NULL_TREE;
8222 }
8223
8224
8225 /* Fold a builtin function call to pow, powf, or powl. Return
8226 NULL_TREE if no simplification can be made. */
8227 static tree
8228 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8229 {
8230 tree res;
8231
8232 if (!validate_arg (arg0, REAL_TYPE)
8233 || !validate_arg (arg1, REAL_TYPE))
8234 return NULL_TREE;
8235
8236 /* Calculate the result when the argument is a constant. */
8237 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8238 return res;
8239
8240 /* Optimize pow(1.0,y) = 1.0. */
8241 if (real_onep (arg0))
8242 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8243
8244 if (TREE_CODE (arg1) == REAL_CST
8245 && !TREE_OVERFLOW (arg1))
8246 {
8247 REAL_VALUE_TYPE cint;
8248 REAL_VALUE_TYPE c;
8249 HOST_WIDE_INT n;
8250
8251 c = TREE_REAL_CST (arg1);
8252
8253 /* Optimize pow(x,0.0) = 1.0. */
8254 if (REAL_VALUES_EQUAL (c, dconst0))
8255 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8256 arg0);
8257
8258 /* Optimize pow(x,1.0) = x. */
8259 if (REAL_VALUES_EQUAL (c, dconst1))
8260 return arg0;
8261
8262 /* Optimize pow(x,-1.0) = 1.0/x. */
8263 if (REAL_VALUES_EQUAL (c, dconstm1))
8264 return fold_build2_loc (loc, RDIV_EXPR, type,
8265 build_real (type, dconst1), arg0);
8266
8267 /* Optimize pow(x,0.5) = sqrt(x). */
8268 if (flag_unsafe_math_optimizations
8269 && REAL_VALUES_EQUAL (c, dconsthalf))
8270 {
8271 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8272
8273 if (sqrtfn != NULL_TREE)
8274 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8275 }
8276
8277 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8278 if (flag_unsafe_math_optimizations)
8279 {
8280 const REAL_VALUE_TYPE dconstroot
8281 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8282
8283 if (REAL_VALUES_EQUAL (c, dconstroot))
8284 {
8285 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8286 if (cbrtfn != NULL_TREE)
8287 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8288 }
8289 }
8290
8291 /* Check for an integer exponent. */
8292 n = real_to_integer (&c);
8293 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8294 if (real_identical (&c, &cint))
8295 {
8296 /* Attempt to evaluate pow at compile-time, unless this should
8297 raise an exception. */
8298 if (TREE_CODE (arg0) == REAL_CST
8299 && !TREE_OVERFLOW (arg0)
8300 && (n > 0
8301 || (!flag_trapping_math && !flag_errno_math)
8302 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8303 {
8304 REAL_VALUE_TYPE x;
8305 bool inexact;
8306
8307 x = TREE_REAL_CST (arg0);
8308 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8309 if (flag_unsafe_math_optimizations || !inexact)
8310 return build_real (type, x);
8311 }
8312
8313 /* Strip sign ops from even integer powers. */
8314 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8315 {
8316 tree narg0 = fold_strip_sign_ops (arg0);
8317 if (narg0)
8318 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8319 }
8320 }
8321 }
8322
8323 if (flag_unsafe_math_optimizations)
8324 {
8325 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8326
8327 /* Optimize pow(expN(x),y) = expN(x*y). */
8328 if (BUILTIN_EXPONENT_P (fcode))
8329 {
8330 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8331 tree arg = CALL_EXPR_ARG (arg0, 0);
8332 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8333 return build_call_expr_loc (loc, expfn, 1, arg);
8334 }
8335
8336 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8337 if (BUILTIN_SQRT_P (fcode))
8338 {
8339 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8340 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8341 build_real (type, dconsthalf));
8342 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8343 }
8344
8345 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8346 if (BUILTIN_CBRT_P (fcode))
8347 {
8348 tree arg = CALL_EXPR_ARG (arg0, 0);
8349 if (tree_expr_nonnegative_p (arg))
8350 {
8351 const REAL_VALUE_TYPE dconstroot
8352 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8353 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8354 build_real (type, dconstroot));
8355 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8356 }
8357 }
8358
8359 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8360 if (fcode == BUILT_IN_POW
8361 || fcode == BUILT_IN_POWF
8362 || fcode == BUILT_IN_POWL)
8363 {
8364 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8365 if (tree_expr_nonnegative_p (arg00))
8366 {
8367 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8368 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8369 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8370 }
8371 }
8372 }
8373
8374 return NULL_TREE;
8375 }
8376
8377 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8378 Return NULL_TREE if no simplification can be made. */
8379 static tree
8380 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8381 tree arg0, tree arg1, tree type)
8382 {
8383 if (!validate_arg (arg0, REAL_TYPE)
8384 || !validate_arg (arg1, INTEGER_TYPE))
8385 return NULL_TREE;
8386
8387 /* Optimize pow(1.0,y) = 1.0. */
8388 if (real_onep (arg0))
8389 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8390
8391 if (host_integerp (arg1, 0))
8392 {
8393 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8394
8395 /* Evaluate powi at compile-time. */
8396 if (TREE_CODE (arg0) == REAL_CST
8397 && !TREE_OVERFLOW (arg0))
8398 {
8399 REAL_VALUE_TYPE x;
8400 x = TREE_REAL_CST (arg0);
8401 real_powi (&x, TYPE_MODE (type), &x, c);
8402 return build_real (type, x);
8403 }
8404
8405 /* Optimize pow(x,0) = 1.0. */
8406 if (c == 0)
8407 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8408 arg0);
8409
8410 /* Optimize pow(x,1) = x. */
8411 if (c == 1)
8412 return arg0;
8413
8414 /* Optimize pow(x,-1) = 1.0/x. */
8415 if (c == -1)
8416 return fold_build2_loc (loc, RDIV_EXPR, type,
8417 build_real (type, dconst1), arg0);
8418 }
8419
8420 return NULL_TREE;
8421 }
8422
8423 /* A subroutine of fold_builtin to fold the various exponent
8424 functions. Return NULL_TREE if no simplification can be made.
8425 FUNC is the corresponding MPFR exponent function. */
8426
8427 static tree
8428 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8429 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8430 {
8431 if (validate_arg (arg, REAL_TYPE))
8432 {
8433 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8434 tree res;
8435
8436 /* Calculate the result when the argument is a constant. */
8437 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8438 return res;
8439
8440 /* Optimize expN(logN(x)) = x. */
8441 if (flag_unsafe_math_optimizations)
8442 {
8443 const enum built_in_function fcode = builtin_mathfn_code (arg);
8444
8445 if ((func == mpfr_exp
8446 && (fcode == BUILT_IN_LOG
8447 || fcode == BUILT_IN_LOGF
8448 || fcode == BUILT_IN_LOGL))
8449 || (func == mpfr_exp2
8450 && (fcode == BUILT_IN_LOG2
8451 || fcode == BUILT_IN_LOG2F
8452 || fcode == BUILT_IN_LOG2L))
8453 || (func == mpfr_exp10
8454 && (fcode == BUILT_IN_LOG10
8455 || fcode == BUILT_IN_LOG10F
8456 || fcode == BUILT_IN_LOG10L)))
8457 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8458 }
8459 }
8460
8461 return NULL_TREE;
8462 }
8463
8464 /* Return true if VAR is a VAR_DECL or a component thereof. */
8465
8466 static bool
8467 var_decl_component_p (tree var)
8468 {
8469 tree inner = var;
8470 while (handled_component_p (inner))
8471 inner = TREE_OPERAND (inner, 0);
8472 return SSA_VAR_P (inner);
8473 }
8474
8475 /* Fold function call to builtin memset. Return
8476 NULL_TREE if no simplification can be made. */
8477
8478 static tree
8479 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8480 tree type, bool ignore)
8481 {
8482 tree var, ret, etype;
8483 unsigned HOST_WIDE_INT length, cval;
8484
8485 if (! validate_arg (dest, POINTER_TYPE)
8486 || ! validate_arg (c, INTEGER_TYPE)
8487 || ! validate_arg (len, INTEGER_TYPE))
8488 return NULL_TREE;
8489
8490 if (! host_integerp (len, 1))
8491 return NULL_TREE;
8492
8493 /* If the LEN parameter is zero, return DEST. */
8494 if (integer_zerop (len))
8495 return omit_one_operand_loc (loc, type, dest, c);
8496
8497 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8498 return NULL_TREE;
8499
8500 var = dest;
8501 STRIP_NOPS (var);
8502 if (TREE_CODE (var) != ADDR_EXPR)
8503 return NULL_TREE;
8504
8505 var = TREE_OPERAND (var, 0);
8506 if (TREE_THIS_VOLATILE (var))
8507 return NULL_TREE;
8508
8509 etype = TREE_TYPE (var);
8510 if (TREE_CODE (etype) == ARRAY_TYPE)
8511 etype = TREE_TYPE (etype);
8512
8513 if (!INTEGRAL_TYPE_P (etype)
8514 && !POINTER_TYPE_P (etype))
8515 return NULL_TREE;
8516
8517 if (! var_decl_component_p (var))
8518 return NULL_TREE;
8519
8520 length = tree_low_cst (len, 1);
8521 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8522 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
8523 return NULL_TREE;
8524
8525 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8526 return NULL_TREE;
8527
8528 if (integer_zerop (c))
8529 cval = 0;
8530 else
8531 {
8532 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8533 return NULL_TREE;
8534
8535 cval = TREE_INT_CST_LOW (c);
8536 cval &= 0xff;
8537 cval |= cval << 8;
8538 cval |= cval << 16;
8539 cval |= (cval << 31) << 1;
8540 }
8541
8542 ret = build_int_cst_type (etype, cval);
8543 var = build_fold_indirect_ref_loc (loc,
8544 fold_convert_loc (loc,
8545 build_pointer_type (etype),
8546 dest));
8547 ret = build2 (MODIFY_EXPR, etype, var, ret);
8548 if (ignore)
8549 return ret;
8550
8551 return omit_one_operand_loc (loc, type, dest, ret);
8552 }
8553
8554 /* Fold function call to builtin memset. Return
8555 NULL_TREE if no simplification can be made. */
8556
8557 static tree
8558 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8559 {
8560 if (! validate_arg (dest, POINTER_TYPE)
8561 || ! validate_arg (size, INTEGER_TYPE))
8562 return NULL_TREE;
8563
8564 if (!ignore)
8565 return NULL_TREE;
8566
8567 /* New argument list transforming bzero(ptr x, int y) to
8568 memset(ptr x, int 0, size_t y). This is done this way
8569 so that if it isn't expanded inline, we fallback to
8570 calling bzero instead of memset. */
8571
8572 return fold_builtin_memset (loc, dest, integer_zero_node,
8573 fold_convert_loc (loc, size_type_node, size),
8574 void_type_node, ignore);
8575 }
8576
8577 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8578 NULL_TREE if no simplification can be made.
8579 If ENDP is 0, return DEST (like memcpy).
8580 If ENDP is 1, return DEST+LEN (like mempcpy).
8581 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8582 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8583 (memmove). */
8584
8585 static tree
8586 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8587 tree len, tree type, bool ignore, int endp)
8588 {
8589 tree destvar, srcvar, expr;
8590
8591 if (! validate_arg (dest, POINTER_TYPE)
8592 || ! validate_arg (src, POINTER_TYPE)
8593 || ! validate_arg (len, INTEGER_TYPE))
8594 return NULL_TREE;
8595
8596 /* If the LEN parameter is zero, return DEST. */
8597 if (integer_zerop (len))
8598 return omit_one_operand_loc (loc, type, dest, src);
8599
8600 /* If SRC and DEST are the same (and not volatile), return
8601 DEST{,+LEN,+LEN-1}. */
8602 if (operand_equal_p (src, dest, 0))
8603 expr = len;
8604 else
8605 {
8606 tree srctype, desttype;
8607 unsigned int src_align, dest_align;
8608 tree off0;
8609
8610 if (endp == 3)
8611 {
8612 src_align = get_pointer_alignment (src);
8613 dest_align = get_pointer_alignment (dest);
8614
8615 /* Both DEST and SRC must be pointer types.
8616 ??? This is what old code did. Is the testing for pointer types
8617 really mandatory?
8618
8619 If either SRC is readonly or length is 1, we can use memcpy. */
8620 if (!dest_align || !src_align)
8621 return NULL_TREE;
8622 if (readonly_data_expr (src)
8623 || (host_integerp (len, 1)
8624 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8625 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
8626 {
8627 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8628 if (!fn)
8629 return NULL_TREE;
8630 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8631 }
8632
8633 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8634 if (TREE_CODE (src) == ADDR_EXPR
8635 && TREE_CODE (dest) == ADDR_EXPR)
8636 {
8637 tree src_base, dest_base, fn;
8638 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8639 HOST_WIDE_INT size = -1;
8640 HOST_WIDE_INT maxsize = -1;
8641
8642 srcvar = TREE_OPERAND (src, 0);
8643 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8644 &size, &maxsize);
8645 destvar = TREE_OPERAND (dest, 0);
8646 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8647 &size, &maxsize);
8648 if (host_integerp (len, 1))
8649 maxsize = tree_low_cst (len, 1);
8650 else
8651 maxsize = -1;
8652 src_offset /= BITS_PER_UNIT;
8653 dest_offset /= BITS_PER_UNIT;
8654 if (SSA_VAR_P (src_base)
8655 && SSA_VAR_P (dest_base))
8656 {
8657 if (operand_equal_p (src_base, dest_base, 0)
8658 && ranges_overlap_p (src_offset, maxsize,
8659 dest_offset, maxsize))
8660 return NULL_TREE;
8661 }
8662 else if (TREE_CODE (src_base) == MEM_REF
8663 && TREE_CODE (dest_base) == MEM_REF)
8664 {
8665 double_int off;
8666 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8667 TREE_OPERAND (dest_base, 0), 0))
8668 return NULL_TREE;
8669 off = mem_ref_offset (src_base) +
8670 double_int::from_shwi (src_offset);
8671 if (!off.fits_shwi ())
8672 return NULL_TREE;
8673 src_offset = off.low;
8674 off = mem_ref_offset (dest_base) +
8675 double_int::from_shwi (dest_offset);
8676 if (!off.fits_shwi ())
8677 return NULL_TREE;
8678 dest_offset = off.low;
8679 if (ranges_overlap_p (src_offset, maxsize,
8680 dest_offset, maxsize))
8681 return NULL_TREE;
8682 }
8683 else
8684 return NULL_TREE;
8685
8686 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8687 if (!fn)
8688 return NULL_TREE;
8689 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8690 }
8691
8692 /* If the destination and source do not alias optimize into
8693 memcpy as well. */
8694 if ((is_gimple_min_invariant (dest)
8695 || TREE_CODE (dest) == SSA_NAME)
8696 && (is_gimple_min_invariant (src)
8697 || TREE_CODE (src) == SSA_NAME))
8698 {
8699 ao_ref destr, srcr;
8700 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8701 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8702 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8703 {
8704 tree fn;
8705 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8706 if (!fn)
8707 return NULL_TREE;
8708 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8709 }
8710 }
8711
8712 return NULL_TREE;
8713 }
8714
8715 if (!host_integerp (len, 0))
8716 return NULL_TREE;
8717 /* FIXME:
8718 This logic lose for arguments like (type *)malloc (sizeof (type)),
8719 since we strip the casts of up to VOID return value from malloc.
8720 Perhaps we ought to inherit type from non-VOID argument here? */
8721 STRIP_NOPS (src);
8722 STRIP_NOPS (dest);
8723 if (!POINTER_TYPE_P (TREE_TYPE (src))
8724 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8725 return NULL_TREE;
8726 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8727 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8728 {
8729 tree tem = TREE_OPERAND (src, 0);
8730 STRIP_NOPS (tem);
8731 if (tem != TREE_OPERAND (src, 0))
8732 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8733 }
8734 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8735 {
8736 tree tem = TREE_OPERAND (dest, 0);
8737 STRIP_NOPS (tem);
8738 if (tem != TREE_OPERAND (dest, 0))
8739 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8740 }
8741 srctype = TREE_TYPE (TREE_TYPE (src));
8742 if (TREE_CODE (srctype) == ARRAY_TYPE
8743 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8744 {
8745 srctype = TREE_TYPE (srctype);
8746 STRIP_NOPS (src);
8747 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8748 }
8749 desttype = TREE_TYPE (TREE_TYPE (dest));
8750 if (TREE_CODE (desttype) == ARRAY_TYPE
8751 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8752 {
8753 desttype = TREE_TYPE (desttype);
8754 STRIP_NOPS (dest);
8755 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8756 }
8757 if (TREE_ADDRESSABLE (srctype)
8758 || TREE_ADDRESSABLE (desttype))
8759 return NULL_TREE;
8760
8761 src_align = get_pointer_alignment (src);
8762 dest_align = get_pointer_alignment (dest);
8763 if (dest_align < TYPE_ALIGN (desttype)
8764 || src_align < TYPE_ALIGN (srctype))
8765 return NULL_TREE;
8766
8767 if (!ignore)
8768 dest = builtin_save_expr (dest);
8769
8770 /* Build accesses at offset zero with a ref-all character type. */
8771 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8772 ptr_mode, true), 0);
8773
8774 destvar = dest;
8775 STRIP_NOPS (destvar);
8776 if (TREE_CODE (destvar) == ADDR_EXPR
8777 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8778 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8779 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8780 else
8781 destvar = NULL_TREE;
8782
8783 srcvar = src;
8784 STRIP_NOPS (srcvar);
8785 if (TREE_CODE (srcvar) == ADDR_EXPR
8786 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8787 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8788 {
8789 if (!destvar
8790 || src_align >= TYPE_ALIGN (desttype))
8791 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8792 srcvar, off0);
8793 else if (!STRICT_ALIGNMENT)
8794 {
8795 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8796 src_align);
8797 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8798 }
8799 else
8800 srcvar = NULL_TREE;
8801 }
8802 else
8803 srcvar = NULL_TREE;
8804
8805 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8806 return NULL_TREE;
8807
8808 if (srcvar == NULL_TREE)
8809 {
8810 STRIP_NOPS (src);
8811 if (src_align >= TYPE_ALIGN (desttype))
8812 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8813 else
8814 {
8815 if (STRICT_ALIGNMENT)
8816 return NULL_TREE;
8817 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8818 src_align);
8819 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
8820 }
8821 }
8822 else if (destvar == NULL_TREE)
8823 {
8824 STRIP_NOPS (dest);
8825 if (dest_align >= TYPE_ALIGN (srctype))
8826 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8827 else
8828 {
8829 if (STRICT_ALIGNMENT)
8830 return NULL_TREE;
8831 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
8832 dest_align);
8833 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
8834 }
8835 }
8836
8837 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8838 }
8839
8840 if (ignore)
8841 return expr;
8842
8843 if (endp == 0 || endp == 3)
8844 return omit_one_operand_loc (loc, type, dest, expr);
8845
8846 if (expr == len)
8847 expr = NULL_TREE;
8848
8849 if (endp == 2)
8850 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8851 ssize_int (1));
8852
8853 dest = fold_build_pointer_plus_loc (loc, dest, len);
8854 dest = fold_convert_loc (loc, type, dest);
8855 if (expr)
8856 dest = omit_one_operand_loc (loc, type, dest, expr);
8857 return dest;
8858 }
8859
8860 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8861 If LEN is not NULL, it represents the length of the string to be
8862 copied. Return NULL_TREE if no simplification can be made. */
8863
8864 tree
8865 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8866 {
8867 tree fn;
8868
8869 if (!validate_arg (dest, POINTER_TYPE)
8870 || !validate_arg (src, POINTER_TYPE))
8871 return NULL_TREE;
8872
8873 /* If SRC and DEST are the same (and not volatile), return DEST. */
8874 if (operand_equal_p (src, dest, 0))
8875 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8876
8877 if (optimize_function_for_size_p (cfun))
8878 return NULL_TREE;
8879
8880 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8881 if (!fn)
8882 return NULL_TREE;
8883
8884 if (!len)
8885 {
8886 len = c_strlen (src, 1);
8887 if (! len || TREE_SIDE_EFFECTS (len))
8888 return NULL_TREE;
8889 }
8890
8891 len = fold_convert_loc (loc, size_type_node, len);
8892 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
8893 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8894 build_call_expr_loc (loc, fn, 3, dest, src, len));
8895 }
8896
8897 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8898 Return NULL_TREE if no simplification can be made. */
8899
8900 static tree
8901 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8902 {
8903 tree fn, len, lenp1, call, type;
8904
8905 if (!validate_arg (dest, POINTER_TYPE)
8906 || !validate_arg (src, POINTER_TYPE))
8907 return NULL_TREE;
8908
8909 len = c_strlen (src, 1);
8910 if (!len
8911 || TREE_CODE (len) != INTEGER_CST)
8912 return NULL_TREE;
8913
8914 if (optimize_function_for_size_p (cfun)
8915 /* If length is zero it's small enough. */
8916 && !integer_zerop (len))
8917 return NULL_TREE;
8918
8919 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8920 if (!fn)
8921 return NULL_TREE;
8922
8923 lenp1 = size_binop_loc (loc, PLUS_EXPR,
8924 fold_convert_loc (loc, size_type_node, len),
8925 build_int_cst (size_type_node, 1));
8926 /* We use dest twice in building our expression. Save it from
8927 multiple expansions. */
8928 dest = builtin_save_expr (dest);
8929 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8930
8931 type = TREE_TYPE (TREE_TYPE (fndecl));
8932 dest = fold_build_pointer_plus_loc (loc, dest, len);
8933 dest = fold_convert_loc (loc, type, dest);
8934 dest = omit_one_operand_loc (loc, type, dest, call);
8935 return dest;
8936 }
8937
8938 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8939 If SLEN is not NULL, it represents the length of the source string.
8940 Return NULL_TREE if no simplification can be made. */
8941
8942 tree
8943 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8944 tree src, tree len, tree slen)
8945 {
8946 tree fn;
8947
8948 if (!validate_arg (dest, POINTER_TYPE)
8949 || !validate_arg (src, POINTER_TYPE)
8950 || !validate_arg (len, INTEGER_TYPE))
8951 return NULL_TREE;
8952
8953 /* If the LEN parameter is zero, return DEST. */
8954 if (integer_zerop (len))
8955 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8956
8957 /* We can't compare slen with len as constants below if len is not a
8958 constant. */
8959 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8960 return NULL_TREE;
8961
8962 if (!slen)
8963 slen = c_strlen (src, 1);
8964
8965 /* Now, we must be passed a constant src ptr parameter. */
8966 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8967 return NULL_TREE;
8968
8969 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8970
8971 /* We do not support simplification of this case, though we do
8972 support it when expanding trees into RTL. */
8973 /* FIXME: generate a call to __builtin_memset. */
8974 if (tree_int_cst_lt (slen, len))
8975 return NULL_TREE;
8976
8977 /* OK transform into builtin memcpy. */
8978 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8979 if (!fn)
8980 return NULL_TREE;
8981
8982 len = fold_convert_loc (loc, size_type_node, len);
8983 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8984 build_call_expr_loc (loc, fn, 3, dest, src, len));
8985 }
8986
8987 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8988 arguments to the call, and TYPE is its return type.
8989 Return NULL_TREE if no simplification can be made. */
8990
8991 static tree
8992 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8993 {
8994 if (!validate_arg (arg1, POINTER_TYPE)
8995 || !validate_arg (arg2, INTEGER_TYPE)
8996 || !validate_arg (len, INTEGER_TYPE))
8997 return NULL_TREE;
8998 else
8999 {
9000 const char *p1;
9001
9002 if (TREE_CODE (arg2) != INTEGER_CST
9003 || !host_integerp (len, 1))
9004 return NULL_TREE;
9005
9006 p1 = c_getstr (arg1);
9007 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9008 {
9009 char c;
9010 const char *r;
9011 tree tem;
9012
9013 if (target_char_cast (arg2, &c))
9014 return NULL_TREE;
9015
9016 r = (const char *) memchr (p1, c, tree_low_cst (len, 1));
9017
9018 if (r == NULL)
9019 return build_int_cst (TREE_TYPE (arg1), 0);
9020
9021 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
9022 return fold_convert_loc (loc, type, tem);
9023 }
9024 return NULL_TREE;
9025 }
9026 }
9027
9028 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9029 Return NULL_TREE if no simplification can be made. */
9030
9031 static tree
9032 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9033 {
9034 const char *p1, *p2;
9035
9036 if (!validate_arg (arg1, POINTER_TYPE)
9037 || !validate_arg (arg2, POINTER_TYPE)
9038 || !validate_arg (len, INTEGER_TYPE))
9039 return NULL_TREE;
9040
9041 /* If the LEN parameter is zero, return zero. */
9042 if (integer_zerop (len))
9043 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9044 arg1, arg2);
9045
9046 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9047 if (operand_equal_p (arg1, arg2, 0))
9048 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9049
9050 p1 = c_getstr (arg1);
9051 p2 = c_getstr (arg2);
9052
9053 /* If all arguments are constant, and the value of len is not greater
9054 than the lengths of arg1 and arg2, evaluate at compile-time. */
9055 if (host_integerp (len, 1) && p1 && p2
9056 && compare_tree_int (len, strlen (p1) + 1) <= 0
9057 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9058 {
9059 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9060
9061 if (r > 0)
9062 return integer_one_node;
9063 else if (r < 0)
9064 return integer_minus_one_node;
9065 else
9066 return integer_zero_node;
9067 }
9068
9069 /* If len parameter is one, return an expression corresponding to
9070 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9071 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9072 {
9073 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9074 tree cst_uchar_ptr_node
9075 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9076
9077 tree ind1
9078 = fold_convert_loc (loc, integer_type_node,
9079 build1 (INDIRECT_REF, cst_uchar_node,
9080 fold_convert_loc (loc,
9081 cst_uchar_ptr_node,
9082 arg1)));
9083 tree ind2
9084 = fold_convert_loc (loc, integer_type_node,
9085 build1 (INDIRECT_REF, cst_uchar_node,
9086 fold_convert_loc (loc,
9087 cst_uchar_ptr_node,
9088 arg2)));
9089 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9090 }
9091
9092 return NULL_TREE;
9093 }
9094
9095 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9096 Return NULL_TREE if no simplification can be made. */
9097
9098 static tree
9099 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9100 {
9101 const char *p1, *p2;
9102
9103 if (!validate_arg (arg1, POINTER_TYPE)
9104 || !validate_arg (arg2, POINTER_TYPE))
9105 return NULL_TREE;
9106
9107 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9108 if (operand_equal_p (arg1, arg2, 0))
9109 return integer_zero_node;
9110
9111 p1 = c_getstr (arg1);
9112 p2 = c_getstr (arg2);
9113
9114 if (p1 && p2)
9115 {
9116 const int i = strcmp (p1, p2);
9117 if (i < 0)
9118 return integer_minus_one_node;
9119 else if (i > 0)
9120 return integer_one_node;
9121 else
9122 return integer_zero_node;
9123 }
9124
9125 /* If the second arg is "", return *(const unsigned char*)arg1. */
9126 if (p2 && *p2 == '\0')
9127 {
9128 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9129 tree cst_uchar_ptr_node
9130 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9131
9132 return fold_convert_loc (loc, integer_type_node,
9133 build1 (INDIRECT_REF, cst_uchar_node,
9134 fold_convert_loc (loc,
9135 cst_uchar_ptr_node,
9136 arg1)));
9137 }
9138
9139 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9140 if (p1 && *p1 == '\0')
9141 {
9142 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9143 tree cst_uchar_ptr_node
9144 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9145
9146 tree temp
9147 = fold_convert_loc (loc, integer_type_node,
9148 build1 (INDIRECT_REF, cst_uchar_node,
9149 fold_convert_loc (loc,
9150 cst_uchar_ptr_node,
9151 arg2)));
9152 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9153 }
9154
9155 return NULL_TREE;
9156 }
9157
9158 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9159 Return NULL_TREE if no simplification can be made. */
9160
9161 static tree
9162 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9163 {
9164 const char *p1, *p2;
9165
9166 if (!validate_arg (arg1, POINTER_TYPE)
9167 || !validate_arg (arg2, POINTER_TYPE)
9168 || !validate_arg (len, INTEGER_TYPE))
9169 return NULL_TREE;
9170
9171 /* If the LEN parameter is zero, return zero. */
9172 if (integer_zerop (len))
9173 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9174 arg1, arg2);
9175
9176 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9177 if (operand_equal_p (arg1, arg2, 0))
9178 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9179
9180 p1 = c_getstr (arg1);
9181 p2 = c_getstr (arg2);
9182
9183 if (host_integerp (len, 1) && p1 && p2)
9184 {
9185 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9186 if (i > 0)
9187 return integer_one_node;
9188 else if (i < 0)
9189 return integer_minus_one_node;
9190 else
9191 return integer_zero_node;
9192 }
9193
9194 /* If the second arg is "", and the length is greater than zero,
9195 return *(const unsigned char*)arg1. */
9196 if (p2 && *p2 == '\0'
9197 && TREE_CODE (len) == INTEGER_CST
9198 && tree_int_cst_sgn (len) == 1)
9199 {
9200 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9201 tree cst_uchar_ptr_node
9202 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9203
9204 return fold_convert_loc (loc, integer_type_node,
9205 build1 (INDIRECT_REF, cst_uchar_node,
9206 fold_convert_loc (loc,
9207 cst_uchar_ptr_node,
9208 arg1)));
9209 }
9210
9211 /* If the first arg is "", and the length is greater than zero,
9212 return -*(const unsigned char*)arg2. */
9213 if (p1 && *p1 == '\0'
9214 && TREE_CODE (len) == INTEGER_CST
9215 && tree_int_cst_sgn (len) == 1)
9216 {
9217 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9218 tree cst_uchar_ptr_node
9219 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9220
9221 tree temp = fold_convert_loc (loc, integer_type_node,
9222 build1 (INDIRECT_REF, cst_uchar_node,
9223 fold_convert_loc (loc,
9224 cst_uchar_ptr_node,
9225 arg2)));
9226 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9227 }
9228
9229 /* If len parameter is one, return an expression corresponding to
9230 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9231 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9232 {
9233 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9234 tree cst_uchar_ptr_node
9235 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9236
9237 tree ind1 = fold_convert_loc (loc, integer_type_node,
9238 build1 (INDIRECT_REF, cst_uchar_node,
9239 fold_convert_loc (loc,
9240 cst_uchar_ptr_node,
9241 arg1)));
9242 tree ind2 = fold_convert_loc (loc, integer_type_node,
9243 build1 (INDIRECT_REF, cst_uchar_node,
9244 fold_convert_loc (loc,
9245 cst_uchar_ptr_node,
9246 arg2)));
9247 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9248 }
9249
9250 return NULL_TREE;
9251 }
9252
9253 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9254 ARG. Return NULL_TREE if no simplification can be made. */
9255
9256 static tree
9257 fold_builtin_signbit (location_t loc, tree arg, tree type)
9258 {
9259 if (!validate_arg (arg, REAL_TYPE))
9260 return NULL_TREE;
9261
9262 /* If ARG is a compile-time constant, determine the result. */
9263 if (TREE_CODE (arg) == REAL_CST
9264 && !TREE_OVERFLOW (arg))
9265 {
9266 REAL_VALUE_TYPE c;
9267
9268 c = TREE_REAL_CST (arg);
9269 return (REAL_VALUE_NEGATIVE (c)
9270 ? build_one_cst (type)
9271 : build_zero_cst (type));
9272 }
9273
9274 /* If ARG is non-negative, the result is always zero. */
9275 if (tree_expr_nonnegative_p (arg))
9276 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9277
9278 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9279 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9280 return fold_convert (type,
9281 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9282 build_real (TREE_TYPE (arg), dconst0)));
9283
9284 return NULL_TREE;
9285 }
9286
9287 /* Fold function call to builtin copysign, copysignf or copysignl with
9288 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9289 be made. */
9290
9291 static tree
9292 fold_builtin_copysign (location_t loc, tree fndecl,
9293 tree arg1, tree arg2, tree type)
9294 {
9295 tree tem;
9296
9297 if (!validate_arg (arg1, REAL_TYPE)
9298 || !validate_arg (arg2, REAL_TYPE))
9299 return NULL_TREE;
9300
9301 /* copysign(X,X) is X. */
9302 if (operand_equal_p (arg1, arg2, 0))
9303 return fold_convert_loc (loc, type, arg1);
9304
9305 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9306 if (TREE_CODE (arg1) == REAL_CST
9307 && TREE_CODE (arg2) == REAL_CST
9308 && !TREE_OVERFLOW (arg1)
9309 && !TREE_OVERFLOW (arg2))
9310 {
9311 REAL_VALUE_TYPE c1, c2;
9312
9313 c1 = TREE_REAL_CST (arg1);
9314 c2 = TREE_REAL_CST (arg2);
9315 /* c1.sign := c2.sign. */
9316 real_copysign (&c1, &c2);
9317 return build_real (type, c1);
9318 }
9319
9320 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9321 Remember to evaluate Y for side-effects. */
9322 if (tree_expr_nonnegative_p (arg2))
9323 return omit_one_operand_loc (loc, type,
9324 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9325 arg2);
9326
9327 /* Strip sign changing operations for the first argument. */
9328 tem = fold_strip_sign_ops (arg1);
9329 if (tem)
9330 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9331
9332 return NULL_TREE;
9333 }
9334
9335 /* Fold a call to builtin isascii with argument ARG. */
9336
9337 static tree
9338 fold_builtin_isascii (location_t loc, tree arg)
9339 {
9340 if (!validate_arg (arg, INTEGER_TYPE))
9341 return NULL_TREE;
9342 else
9343 {
9344 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9345 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9346 build_int_cst (integer_type_node,
9347 ~ (unsigned HOST_WIDE_INT) 0x7f));
9348 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9349 arg, integer_zero_node);
9350 }
9351 }
9352
9353 /* Fold a call to builtin toascii with argument ARG. */
9354
9355 static tree
9356 fold_builtin_toascii (location_t loc, tree arg)
9357 {
9358 if (!validate_arg (arg, INTEGER_TYPE))
9359 return NULL_TREE;
9360
9361 /* Transform toascii(c) -> (c & 0x7f). */
9362 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9363 build_int_cst (integer_type_node, 0x7f));
9364 }
9365
9366 /* Fold a call to builtin isdigit with argument ARG. */
9367
9368 static tree
9369 fold_builtin_isdigit (location_t loc, tree arg)
9370 {
9371 if (!validate_arg (arg, INTEGER_TYPE))
9372 return NULL_TREE;
9373 else
9374 {
9375 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9376 /* According to the C standard, isdigit is unaffected by locale.
9377 However, it definitely is affected by the target character set. */
9378 unsigned HOST_WIDE_INT target_digit0
9379 = lang_hooks.to_target_charset ('0');
9380
9381 if (target_digit0 == 0)
9382 return NULL_TREE;
9383
9384 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9385 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9386 build_int_cst (unsigned_type_node, target_digit0));
9387 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9388 build_int_cst (unsigned_type_node, 9));
9389 }
9390 }
9391
9392 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9393
9394 static tree
9395 fold_builtin_fabs (location_t loc, tree arg, tree type)
9396 {
9397 if (!validate_arg (arg, REAL_TYPE))
9398 return NULL_TREE;
9399
9400 arg = fold_convert_loc (loc, type, arg);
9401 if (TREE_CODE (arg) == REAL_CST)
9402 return fold_abs_const (arg, type);
9403 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9404 }
9405
9406 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9407
9408 static tree
9409 fold_builtin_abs (location_t loc, tree arg, tree type)
9410 {
9411 if (!validate_arg (arg, INTEGER_TYPE))
9412 return NULL_TREE;
9413
9414 arg = fold_convert_loc (loc, type, arg);
9415 if (TREE_CODE (arg) == INTEGER_CST)
9416 return fold_abs_const (arg, type);
9417 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9418 }
9419
9420 /* Fold a fma operation with arguments ARG[012]. */
9421
9422 tree
9423 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9424 tree type, tree arg0, tree arg1, tree arg2)
9425 {
9426 if (TREE_CODE (arg0) == REAL_CST
9427 && TREE_CODE (arg1) == REAL_CST
9428 && TREE_CODE (arg2) == REAL_CST)
9429 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9430
9431 return NULL_TREE;
9432 }
9433
9434 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9435
9436 static tree
9437 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9438 {
9439 if (validate_arg (arg0, REAL_TYPE)
9440 && validate_arg (arg1, REAL_TYPE)
9441 && validate_arg (arg2, REAL_TYPE))
9442 {
9443 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9444 if (tem)
9445 return tem;
9446
9447 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9448 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9449 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9450 }
9451 return NULL_TREE;
9452 }
9453
9454 /* Fold a call to builtin fmin or fmax. */
9455
9456 static tree
9457 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9458 tree type, bool max)
9459 {
9460 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9461 {
9462 /* Calculate the result when the argument is a constant. */
9463 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9464
9465 if (res)
9466 return res;
9467
9468 /* If either argument is NaN, return the other one. Avoid the
9469 transformation if we get (and honor) a signalling NaN. Using
9470 omit_one_operand() ensures we create a non-lvalue. */
9471 if (TREE_CODE (arg0) == REAL_CST
9472 && real_isnan (&TREE_REAL_CST (arg0))
9473 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9474 || ! TREE_REAL_CST (arg0).signalling))
9475 return omit_one_operand_loc (loc, type, arg1, arg0);
9476 if (TREE_CODE (arg1) == REAL_CST
9477 && real_isnan (&TREE_REAL_CST (arg1))
9478 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9479 || ! TREE_REAL_CST (arg1).signalling))
9480 return omit_one_operand_loc (loc, type, arg0, arg1);
9481
9482 /* Transform fmin/fmax(x,x) -> x. */
9483 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9484 return omit_one_operand_loc (loc, type, arg0, arg1);
9485
9486 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9487 functions to return the numeric arg if the other one is NaN.
9488 These tree codes don't honor that, so only transform if
9489 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9490 handled, so we don't have to worry about it either. */
9491 if (flag_finite_math_only)
9492 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9493 fold_convert_loc (loc, type, arg0),
9494 fold_convert_loc (loc, type, arg1));
9495 }
9496 return NULL_TREE;
9497 }
9498
9499 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9500
9501 static tree
9502 fold_builtin_carg (location_t loc, tree arg, tree type)
9503 {
9504 if (validate_arg (arg, COMPLEX_TYPE)
9505 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9506 {
9507 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9508
9509 if (atan2_fn)
9510 {
9511 tree new_arg = builtin_save_expr (arg);
9512 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9513 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9514 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9515 }
9516 }
9517
9518 return NULL_TREE;
9519 }
9520
9521 /* Fold a call to builtin logb/ilogb. */
9522
9523 static tree
9524 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9525 {
9526 if (! validate_arg (arg, REAL_TYPE))
9527 return NULL_TREE;
9528
9529 STRIP_NOPS (arg);
9530
9531 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9532 {
9533 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9534
9535 switch (value->cl)
9536 {
9537 case rvc_nan:
9538 case rvc_inf:
9539 /* If arg is Inf or NaN and we're logb, return it. */
9540 if (TREE_CODE (rettype) == REAL_TYPE)
9541 {
9542 /* For logb(-Inf) we have to return +Inf. */
9543 if (real_isinf (value) && real_isneg (value))
9544 {
9545 REAL_VALUE_TYPE tem;
9546 real_inf (&tem);
9547 return build_real (rettype, tem);
9548 }
9549 return fold_convert_loc (loc, rettype, arg);
9550 }
9551 /* Fall through... */
9552 case rvc_zero:
9553 /* Zero may set errno and/or raise an exception for logb, also
9554 for ilogb we don't know FP_ILOGB0. */
9555 return NULL_TREE;
9556 case rvc_normal:
9557 /* For normal numbers, proceed iff radix == 2. In GCC,
9558 normalized significands are in the range [0.5, 1.0). We
9559 want the exponent as if they were [1.0, 2.0) so get the
9560 exponent and subtract 1. */
9561 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9562 return fold_convert_loc (loc, rettype,
9563 build_int_cst (integer_type_node,
9564 REAL_EXP (value)-1));
9565 break;
9566 }
9567 }
9568
9569 return NULL_TREE;
9570 }
9571
9572 /* Fold a call to builtin significand, if radix == 2. */
9573
9574 static tree
9575 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9576 {
9577 if (! validate_arg (arg, REAL_TYPE))
9578 return NULL_TREE;
9579
9580 STRIP_NOPS (arg);
9581
9582 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9583 {
9584 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9585
9586 switch (value->cl)
9587 {
9588 case rvc_zero:
9589 case rvc_nan:
9590 case rvc_inf:
9591 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9592 return fold_convert_loc (loc, rettype, arg);
9593 case rvc_normal:
9594 /* For normal numbers, proceed iff radix == 2. */
9595 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9596 {
9597 REAL_VALUE_TYPE result = *value;
9598 /* In GCC, normalized significands are in the range [0.5,
9599 1.0). We want them to be [1.0, 2.0) so set the
9600 exponent to 1. */
9601 SET_REAL_EXP (&result, 1);
9602 return build_real (rettype, result);
9603 }
9604 break;
9605 }
9606 }
9607
9608 return NULL_TREE;
9609 }
9610
9611 /* Fold a call to builtin frexp, we can assume the base is 2. */
9612
9613 static tree
9614 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9615 {
9616 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9617 return NULL_TREE;
9618
9619 STRIP_NOPS (arg0);
9620
9621 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9622 return NULL_TREE;
9623
9624 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9625
9626 /* Proceed if a valid pointer type was passed in. */
9627 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9628 {
9629 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9630 tree frac, exp;
9631
9632 switch (value->cl)
9633 {
9634 case rvc_zero:
9635 /* For +-0, return (*exp = 0, +-0). */
9636 exp = integer_zero_node;
9637 frac = arg0;
9638 break;
9639 case rvc_nan:
9640 case rvc_inf:
9641 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9642 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9643 case rvc_normal:
9644 {
9645 /* Since the frexp function always expects base 2, and in
9646 GCC normalized significands are already in the range
9647 [0.5, 1.0), we have exactly what frexp wants. */
9648 REAL_VALUE_TYPE frac_rvt = *value;
9649 SET_REAL_EXP (&frac_rvt, 0);
9650 frac = build_real (rettype, frac_rvt);
9651 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9652 }
9653 break;
9654 default:
9655 gcc_unreachable ();
9656 }
9657
9658 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9659 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9660 TREE_SIDE_EFFECTS (arg1) = 1;
9661 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9662 }
9663
9664 return NULL_TREE;
9665 }
9666
9667 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9668 then we can assume the base is two. If it's false, then we have to
9669 check the mode of the TYPE parameter in certain cases. */
9670
9671 static tree
9672 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9673 tree type, bool ldexp)
9674 {
9675 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9676 {
9677 STRIP_NOPS (arg0);
9678 STRIP_NOPS (arg1);
9679
9680 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9681 if (real_zerop (arg0) || integer_zerop (arg1)
9682 || (TREE_CODE (arg0) == REAL_CST
9683 && !real_isfinite (&TREE_REAL_CST (arg0))))
9684 return omit_one_operand_loc (loc, type, arg0, arg1);
9685
9686 /* If both arguments are constant, then try to evaluate it. */
9687 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9688 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9689 && host_integerp (arg1, 0))
9690 {
9691 /* Bound the maximum adjustment to twice the range of the
9692 mode's valid exponents. Use abs to ensure the range is
9693 positive as a sanity check. */
9694 const long max_exp_adj = 2 *
9695 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9696 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9697
9698 /* Get the user-requested adjustment. */
9699 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9700
9701 /* The requested adjustment must be inside this range. This
9702 is a preliminary cap to avoid things like overflow, we
9703 may still fail to compute the result for other reasons. */
9704 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9705 {
9706 REAL_VALUE_TYPE initial_result;
9707
9708 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9709
9710 /* Ensure we didn't overflow. */
9711 if (! real_isinf (&initial_result))
9712 {
9713 const REAL_VALUE_TYPE trunc_result
9714 = real_value_truncate (TYPE_MODE (type), initial_result);
9715
9716 /* Only proceed if the target mode can hold the
9717 resulting value. */
9718 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9719 return build_real (type, trunc_result);
9720 }
9721 }
9722 }
9723 }
9724
9725 return NULL_TREE;
9726 }
9727
9728 /* Fold a call to builtin modf. */
9729
9730 static tree
9731 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9732 {
9733 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9734 return NULL_TREE;
9735
9736 STRIP_NOPS (arg0);
9737
9738 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9739 return NULL_TREE;
9740
9741 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9742
9743 /* Proceed if a valid pointer type was passed in. */
9744 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9745 {
9746 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9747 REAL_VALUE_TYPE trunc, frac;
9748
9749 switch (value->cl)
9750 {
9751 case rvc_nan:
9752 case rvc_zero:
9753 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9754 trunc = frac = *value;
9755 break;
9756 case rvc_inf:
9757 /* For +-Inf, return (*arg1 = arg0, +-0). */
9758 frac = dconst0;
9759 frac.sign = value->sign;
9760 trunc = *value;
9761 break;
9762 case rvc_normal:
9763 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9764 real_trunc (&trunc, VOIDmode, value);
9765 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9766 /* If the original number was negative and already
9767 integral, then the fractional part is -0.0. */
9768 if (value->sign && frac.cl == rvc_zero)
9769 frac.sign = value->sign;
9770 break;
9771 }
9772
9773 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9774 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9775 build_real (rettype, trunc));
9776 TREE_SIDE_EFFECTS (arg1) = 1;
9777 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9778 build_real (rettype, frac));
9779 }
9780
9781 return NULL_TREE;
9782 }
9783
9784 /* Given a location LOC, an interclass builtin function decl FNDECL
9785 and its single argument ARG, return an folded expression computing
9786 the same, or NULL_TREE if we either couldn't or didn't want to fold
9787 (the latter happen if there's an RTL instruction available). */
9788
9789 static tree
9790 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9791 {
9792 enum machine_mode mode;
9793
9794 if (!validate_arg (arg, REAL_TYPE))
9795 return NULL_TREE;
9796
9797 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9798 return NULL_TREE;
9799
9800 mode = TYPE_MODE (TREE_TYPE (arg));
9801
9802 /* If there is no optab, try generic code. */
9803 switch (DECL_FUNCTION_CODE (fndecl))
9804 {
9805 tree result;
9806
9807 CASE_FLT_FN (BUILT_IN_ISINF):
9808 {
9809 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9810 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9811 tree const type = TREE_TYPE (arg);
9812 REAL_VALUE_TYPE r;
9813 char buf[128];
9814
9815 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9816 real_from_string (&r, buf);
9817 result = build_call_expr (isgr_fn, 2,
9818 fold_build1_loc (loc, ABS_EXPR, type, arg),
9819 build_real (type, r));
9820 return result;
9821 }
9822 CASE_FLT_FN (BUILT_IN_FINITE):
9823 case BUILT_IN_ISFINITE:
9824 {
9825 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9826 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9827 tree const type = TREE_TYPE (arg);
9828 REAL_VALUE_TYPE r;
9829 char buf[128];
9830
9831 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9832 real_from_string (&r, buf);
9833 result = build_call_expr (isle_fn, 2,
9834 fold_build1_loc (loc, ABS_EXPR, type, arg),
9835 build_real (type, r));
9836 /*result = fold_build2_loc (loc, UNGT_EXPR,
9837 TREE_TYPE (TREE_TYPE (fndecl)),
9838 fold_build1_loc (loc, ABS_EXPR, type, arg),
9839 build_real (type, r));
9840 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9841 TREE_TYPE (TREE_TYPE (fndecl)),
9842 result);*/
9843 return result;
9844 }
9845 case BUILT_IN_ISNORMAL:
9846 {
9847 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9848 islessequal(fabs(x),DBL_MAX). */
9849 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9850 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9851 tree const type = TREE_TYPE (arg);
9852 REAL_VALUE_TYPE rmax, rmin;
9853 char buf[128];
9854
9855 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9856 real_from_string (&rmax, buf);
9857 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9858 real_from_string (&rmin, buf);
9859 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9860 result = build_call_expr (isle_fn, 2, arg,
9861 build_real (type, rmax));
9862 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9863 build_call_expr (isge_fn, 2, arg,
9864 build_real (type, rmin)));
9865 return result;
9866 }
9867 default:
9868 break;
9869 }
9870
9871 return NULL_TREE;
9872 }
9873
9874 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9875 ARG is the argument for the call. */
9876
9877 static tree
9878 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9879 {
9880 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9881 REAL_VALUE_TYPE r;
9882
9883 if (!validate_arg (arg, REAL_TYPE))
9884 return NULL_TREE;
9885
9886 switch (builtin_index)
9887 {
9888 case BUILT_IN_ISINF:
9889 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9890 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9891
9892 if (TREE_CODE (arg) == REAL_CST)
9893 {
9894 r = TREE_REAL_CST (arg);
9895 if (real_isinf (&r))
9896 return real_compare (GT_EXPR, &r, &dconst0)
9897 ? integer_one_node : integer_minus_one_node;
9898 else
9899 return integer_zero_node;
9900 }
9901
9902 return NULL_TREE;
9903
9904 case BUILT_IN_ISINF_SIGN:
9905 {
9906 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9907 /* In a boolean context, GCC will fold the inner COND_EXPR to
9908 1. So e.g. "if (isinf_sign(x))" would be folded to just
9909 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9910 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9911 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9912 tree tmp = NULL_TREE;
9913
9914 arg = builtin_save_expr (arg);
9915
9916 if (signbit_fn && isinf_fn)
9917 {
9918 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9919 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9920
9921 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9922 signbit_call, integer_zero_node);
9923 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9924 isinf_call, integer_zero_node);
9925
9926 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9927 integer_minus_one_node, integer_one_node);
9928 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9929 isinf_call, tmp,
9930 integer_zero_node);
9931 }
9932
9933 return tmp;
9934 }
9935
9936 case BUILT_IN_ISFINITE:
9937 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9938 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9939 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9940
9941 if (TREE_CODE (arg) == REAL_CST)
9942 {
9943 r = TREE_REAL_CST (arg);
9944 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9945 }
9946
9947 return NULL_TREE;
9948
9949 case BUILT_IN_ISNAN:
9950 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9951 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9952
9953 if (TREE_CODE (arg) == REAL_CST)
9954 {
9955 r = TREE_REAL_CST (arg);
9956 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9957 }
9958
9959 arg = builtin_save_expr (arg);
9960 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9961
9962 default:
9963 gcc_unreachable ();
9964 }
9965 }
9966
9967 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9968 This builtin will generate code to return the appropriate floating
9969 point classification depending on the value of the floating point
9970 number passed in. The possible return values must be supplied as
9971 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9972 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9973 one floating point argument which is "type generic". */
9974
9975 static tree
9976 fold_builtin_fpclassify (location_t loc, tree exp)
9977 {
9978 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9979 arg, type, res, tmp;
9980 enum machine_mode mode;
9981 REAL_VALUE_TYPE r;
9982 char buf[128];
9983
9984 /* Verify the required arguments in the original call. */
9985 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9986 INTEGER_TYPE, INTEGER_TYPE,
9987 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9988 return NULL_TREE;
9989
9990 fp_nan = CALL_EXPR_ARG (exp, 0);
9991 fp_infinite = CALL_EXPR_ARG (exp, 1);
9992 fp_normal = CALL_EXPR_ARG (exp, 2);
9993 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9994 fp_zero = CALL_EXPR_ARG (exp, 4);
9995 arg = CALL_EXPR_ARG (exp, 5);
9996 type = TREE_TYPE (arg);
9997 mode = TYPE_MODE (type);
9998 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9999
10000 /* fpclassify(x) ->
10001 isnan(x) ? FP_NAN :
10002 (fabs(x) == Inf ? FP_INFINITE :
10003 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10004 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10005
10006 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10007 build_real (type, dconst0));
10008 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10009 tmp, fp_zero, fp_subnormal);
10010
10011 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10012 real_from_string (&r, buf);
10013 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10014 arg, build_real (type, r));
10015 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10016
10017 if (HONOR_INFINITIES (mode))
10018 {
10019 real_inf (&r);
10020 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10021 build_real (type, r));
10022 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10023 fp_infinite, res);
10024 }
10025
10026 if (HONOR_NANS (mode))
10027 {
10028 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10029 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10030 }
10031
10032 return res;
10033 }
10034
10035 /* Fold a call to an unordered comparison function such as
10036 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10037 being called and ARG0 and ARG1 are the arguments for the call.
10038 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10039 the opposite of the desired result. UNORDERED_CODE is used
10040 for modes that can hold NaNs and ORDERED_CODE is used for
10041 the rest. */
10042
10043 static tree
10044 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10045 enum tree_code unordered_code,
10046 enum tree_code ordered_code)
10047 {
10048 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10049 enum tree_code code;
10050 tree type0, type1;
10051 enum tree_code code0, code1;
10052 tree cmp_type = NULL_TREE;
10053
10054 type0 = TREE_TYPE (arg0);
10055 type1 = TREE_TYPE (arg1);
10056
10057 code0 = TREE_CODE (type0);
10058 code1 = TREE_CODE (type1);
10059
10060 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10061 /* Choose the wider of two real types. */
10062 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10063 ? type0 : type1;
10064 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10065 cmp_type = type0;
10066 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10067 cmp_type = type1;
10068
10069 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10070 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10071
10072 if (unordered_code == UNORDERED_EXPR)
10073 {
10074 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10075 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10076 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10077 }
10078
10079 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10080 : ordered_code;
10081 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10082 fold_build2_loc (loc, code, type, arg0, arg1));
10083 }
10084
10085 /* Fold a call to built-in function FNDECL with 0 arguments.
10086 IGNORE is true if the result of the function call is ignored. This
10087 function returns NULL_TREE if no simplification was possible. */
10088
10089 static tree
10090 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10091 {
10092 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10093 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10094 switch (fcode)
10095 {
10096 CASE_FLT_FN (BUILT_IN_INF):
10097 case BUILT_IN_INFD32:
10098 case BUILT_IN_INFD64:
10099 case BUILT_IN_INFD128:
10100 return fold_builtin_inf (loc, type, true);
10101
10102 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10103 return fold_builtin_inf (loc, type, false);
10104
10105 case BUILT_IN_CLASSIFY_TYPE:
10106 return fold_builtin_classify_type (NULL_TREE);
10107
10108 case BUILT_IN_UNREACHABLE:
10109 if (flag_sanitize & SANITIZE_UNREACHABLE
10110 && (current_function_decl == NULL
10111 || !lookup_attribute ("no_sanitize_undefined",
10112 DECL_ATTRIBUTES (current_function_decl))))
10113 return ubsan_instrument_unreachable (loc);
10114 break;
10115
10116 default:
10117 break;
10118 }
10119 return NULL_TREE;
10120 }
10121
10122 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10123 IGNORE is true if the result of the function call is ignored. This
10124 function returns NULL_TREE if no simplification was possible. */
10125
10126 static tree
10127 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10128 {
10129 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10130 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10131 switch (fcode)
10132 {
10133 case BUILT_IN_CONSTANT_P:
10134 {
10135 tree val = fold_builtin_constant_p (arg0);
10136
10137 /* Gimplification will pull the CALL_EXPR for the builtin out of
10138 an if condition. When not optimizing, we'll not CSE it back.
10139 To avoid link error types of regressions, return false now. */
10140 if (!val && !optimize)
10141 val = integer_zero_node;
10142
10143 return val;
10144 }
10145
10146 case BUILT_IN_CLASSIFY_TYPE:
10147 return fold_builtin_classify_type (arg0);
10148
10149 case BUILT_IN_STRLEN:
10150 return fold_builtin_strlen (loc, type, arg0);
10151
10152 CASE_FLT_FN (BUILT_IN_FABS):
10153 case BUILT_IN_FABSD32:
10154 case BUILT_IN_FABSD64:
10155 case BUILT_IN_FABSD128:
10156 return fold_builtin_fabs (loc, arg0, type);
10157
10158 case BUILT_IN_ABS:
10159 case BUILT_IN_LABS:
10160 case BUILT_IN_LLABS:
10161 case BUILT_IN_IMAXABS:
10162 return fold_builtin_abs (loc, arg0, type);
10163
10164 CASE_FLT_FN (BUILT_IN_CONJ):
10165 if (validate_arg (arg0, COMPLEX_TYPE)
10166 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10167 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10168 break;
10169
10170 CASE_FLT_FN (BUILT_IN_CREAL):
10171 if (validate_arg (arg0, COMPLEX_TYPE)
10172 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10173 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10174 break;
10175
10176 CASE_FLT_FN (BUILT_IN_CIMAG):
10177 if (validate_arg (arg0, COMPLEX_TYPE)
10178 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10179 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10180 break;
10181
10182 CASE_FLT_FN (BUILT_IN_CCOS):
10183 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
10184
10185 CASE_FLT_FN (BUILT_IN_CCOSH):
10186 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
10187
10188 CASE_FLT_FN (BUILT_IN_CPROJ):
10189 return fold_builtin_cproj (loc, arg0, type);
10190
10191 CASE_FLT_FN (BUILT_IN_CSIN):
10192 if (validate_arg (arg0, COMPLEX_TYPE)
10193 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10194 return do_mpc_arg1 (arg0, type, mpc_sin);
10195 break;
10196
10197 CASE_FLT_FN (BUILT_IN_CSINH):
10198 if (validate_arg (arg0, COMPLEX_TYPE)
10199 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10200 return do_mpc_arg1 (arg0, type, mpc_sinh);
10201 break;
10202
10203 CASE_FLT_FN (BUILT_IN_CTAN):
10204 if (validate_arg (arg0, COMPLEX_TYPE)
10205 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10206 return do_mpc_arg1 (arg0, type, mpc_tan);
10207 break;
10208
10209 CASE_FLT_FN (BUILT_IN_CTANH):
10210 if (validate_arg (arg0, COMPLEX_TYPE)
10211 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10212 return do_mpc_arg1 (arg0, type, mpc_tanh);
10213 break;
10214
10215 CASE_FLT_FN (BUILT_IN_CLOG):
10216 if (validate_arg (arg0, COMPLEX_TYPE)
10217 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10218 return do_mpc_arg1 (arg0, type, mpc_log);
10219 break;
10220
10221 CASE_FLT_FN (BUILT_IN_CSQRT):
10222 if (validate_arg (arg0, COMPLEX_TYPE)
10223 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10224 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10225 break;
10226
10227 CASE_FLT_FN (BUILT_IN_CASIN):
10228 if (validate_arg (arg0, COMPLEX_TYPE)
10229 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10230 return do_mpc_arg1 (arg0, type, mpc_asin);
10231 break;
10232
10233 CASE_FLT_FN (BUILT_IN_CACOS):
10234 if (validate_arg (arg0, COMPLEX_TYPE)
10235 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10236 return do_mpc_arg1 (arg0, type, mpc_acos);
10237 break;
10238
10239 CASE_FLT_FN (BUILT_IN_CATAN):
10240 if (validate_arg (arg0, COMPLEX_TYPE)
10241 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10242 return do_mpc_arg1 (arg0, type, mpc_atan);
10243 break;
10244
10245 CASE_FLT_FN (BUILT_IN_CASINH):
10246 if (validate_arg (arg0, COMPLEX_TYPE)
10247 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10248 return do_mpc_arg1 (arg0, type, mpc_asinh);
10249 break;
10250
10251 CASE_FLT_FN (BUILT_IN_CACOSH):
10252 if (validate_arg (arg0, COMPLEX_TYPE)
10253 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10254 return do_mpc_arg1 (arg0, type, mpc_acosh);
10255 break;
10256
10257 CASE_FLT_FN (BUILT_IN_CATANH):
10258 if (validate_arg (arg0, COMPLEX_TYPE)
10259 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10260 return do_mpc_arg1 (arg0, type, mpc_atanh);
10261 break;
10262
10263 CASE_FLT_FN (BUILT_IN_CABS):
10264 return fold_builtin_cabs (loc, arg0, type, fndecl);
10265
10266 CASE_FLT_FN (BUILT_IN_CARG):
10267 return fold_builtin_carg (loc, arg0, type);
10268
10269 CASE_FLT_FN (BUILT_IN_SQRT):
10270 return fold_builtin_sqrt (loc, arg0, type);
10271
10272 CASE_FLT_FN (BUILT_IN_CBRT):
10273 return fold_builtin_cbrt (loc, arg0, type);
10274
10275 CASE_FLT_FN (BUILT_IN_ASIN):
10276 if (validate_arg (arg0, REAL_TYPE))
10277 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10278 &dconstm1, &dconst1, true);
10279 break;
10280
10281 CASE_FLT_FN (BUILT_IN_ACOS):
10282 if (validate_arg (arg0, REAL_TYPE))
10283 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10284 &dconstm1, &dconst1, true);
10285 break;
10286
10287 CASE_FLT_FN (BUILT_IN_ATAN):
10288 if (validate_arg (arg0, REAL_TYPE))
10289 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10290 break;
10291
10292 CASE_FLT_FN (BUILT_IN_ASINH):
10293 if (validate_arg (arg0, REAL_TYPE))
10294 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10295 break;
10296
10297 CASE_FLT_FN (BUILT_IN_ACOSH):
10298 if (validate_arg (arg0, REAL_TYPE))
10299 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10300 &dconst1, NULL, true);
10301 break;
10302
10303 CASE_FLT_FN (BUILT_IN_ATANH):
10304 if (validate_arg (arg0, REAL_TYPE))
10305 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10306 &dconstm1, &dconst1, false);
10307 break;
10308
10309 CASE_FLT_FN (BUILT_IN_SIN):
10310 if (validate_arg (arg0, REAL_TYPE))
10311 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10312 break;
10313
10314 CASE_FLT_FN (BUILT_IN_COS):
10315 return fold_builtin_cos (loc, arg0, type, fndecl);
10316
10317 CASE_FLT_FN (BUILT_IN_TAN):
10318 return fold_builtin_tan (arg0, type);
10319
10320 CASE_FLT_FN (BUILT_IN_CEXP):
10321 return fold_builtin_cexp (loc, arg0, type);
10322
10323 CASE_FLT_FN (BUILT_IN_CEXPI):
10324 if (validate_arg (arg0, REAL_TYPE))
10325 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10326 break;
10327
10328 CASE_FLT_FN (BUILT_IN_SINH):
10329 if (validate_arg (arg0, REAL_TYPE))
10330 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10331 break;
10332
10333 CASE_FLT_FN (BUILT_IN_COSH):
10334 return fold_builtin_cosh (loc, arg0, type, fndecl);
10335
10336 CASE_FLT_FN (BUILT_IN_TANH):
10337 if (validate_arg (arg0, REAL_TYPE))
10338 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10339 break;
10340
10341 CASE_FLT_FN (BUILT_IN_ERF):
10342 if (validate_arg (arg0, REAL_TYPE))
10343 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10344 break;
10345
10346 CASE_FLT_FN (BUILT_IN_ERFC):
10347 if (validate_arg (arg0, REAL_TYPE))
10348 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10349 break;
10350
10351 CASE_FLT_FN (BUILT_IN_TGAMMA):
10352 if (validate_arg (arg0, REAL_TYPE))
10353 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10354 break;
10355
10356 CASE_FLT_FN (BUILT_IN_EXP):
10357 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10358
10359 CASE_FLT_FN (BUILT_IN_EXP2):
10360 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10361
10362 CASE_FLT_FN (BUILT_IN_EXP10):
10363 CASE_FLT_FN (BUILT_IN_POW10):
10364 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10365
10366 CASE_FLT_FN (BUILT_IN_EXPM1):
10367 if (validate_arg (arg0, REAL_TYPE))
10368 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10369 break;
10370
10371 CASE_FLT_FN (BUILT_IN_LOG):
10372 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10373
10374 CASE_FLT_FN (BUILT_IN_LOG2):
10375 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10376
10377 CASE_FLT_FN (BUILT_IN_LOG10):
10378 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10379
10380 CASE_FLT_FN (BUILT_IN_LOG1P):
10381 if (validate_arg (arg0, REAL_TYPE))
10382 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10383 &dconstm1, NULL, false);
10384 break;
10385
10386 CASE_FLT_FN (BUILT_IN_J0):
10387 if (validate_arg (arg0, REAL_TYPE))
10388 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10389 NULL, NULL, 0);
10390 break;
10391
10392 CASE_FLT_FN (BUILT_IN_J1):
10393 if (validate_arg (arg0, REAL_TYPE))
10394 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10395 NULL, NULL, 0);
10396 break;
10397
10398 CASE_FLT_FN (BUILT_IN_Y0):
10399 if (validate_arg (arg0, REAL_TYPE))
10400 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10401 &dconst0, NULL, false);
10402 break;
10403
10404 CASE_FLT_FN (BUILT_IN_Y1):
10405 if (validate_arg (arg0, REAL_TYPE))
10406 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10407 &dconst0, NULL, false);
10408 break;
10409
10410 CASE_FLT_FN (BUILT_IN_NAN):
10411 case BUILT_IN_NAND32:
10412 case BUILT_IN_NAND64:
10413 case BUILT_IN_NAND128:
10414 return fold_builtin_nan (arg0, type, true);
10415
10416 CASE_FLT_FN (BUILT_IN_NANS):
10417 return fold_builtin_nan (arg0, type, false);
10418
10419 CASE_FLT_FN (BUILT_IN_FLOOR):
10420 return fold_builtin_floor (loc, fndecl, arg0);
10421
10422 CASE_FLT_FN (BUILT_IN_CEIL):
10423 return fold_builtin_ceil (loc, fndecl, arg0);
10424
10425 CASE_FLT_FN (BUILT_IN_TRUNC):
10426 return fold_builtin_trunc (loc, fndecl, arg0);
10427
10428 CASE_FLT_FN (BUILT_IN_ROUND):
10429 return fold_builtin_round (loc, fndecl, arg0);
10430
10431 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10432 CASE_FLT_FN (BUILT_IN_RINT):
10433 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10434
10435 CASE_FLT_FN (BUILT_IN_ICEIL):
10436 CASE_FLT_FN (BUILT_IN_LCEIL):
10437 CASE_FLT_FN (BUILT_IN_LLCEIL):
10438 CASE_FLT_FN (BUILT_IN_LFLOOR):
10439 CASE_FLT_FN (BUILT_IN_IFLOOR):
10440 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10441 CASE_FLT_FN (BUILT_IN_IROUND):
10442 CASE_FLT_FN (BUILT_IN_LROUND):
10443 CASE_FLT_FN (BUILT_IN_LLROUND):
10444 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10445
10446 CASE_FLT_FN (BUILT_IN_IRINT):
10447 CASE_FLT_FN (BUILT_IN_LRINT):
10448 CASE_FLT_FN (BUILT_IN_LLRINT):
10449 return fold_fixed_mathfn (loc, fndecl, arg0);
10450
10451 case BUILT_IN_BSWAP16:
10452 case BUILT_IN_BSWAP32:
10453 case BUILT_IN_BSWAP64:
10454 return fold_builtin_bswap (fndecl, arg0);
10455
10456 CASE_INT_FN (BUILT_IN_FFS):
10457 CASE_INT_FN (BUILT_IN_CLZ):
10458 CASE_INT_FN (BUILT_IN_CTZ):
10459 CASE_INT_FN (BUILT_IN_CLRSB):
10460 CASE_INT_FN (BUILT_IN_POPCOUNT):
10461 CASE_INT_FN (BUILT_IN_PARITY):
10462 return fold_builtin_bitop (fndecl, arg0);
10463
10464 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10465 return fold_builtin_signbit (loc, arg0, type);
10466
10467 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10468 return fold_builtin_significand (loc, arg0, type);
10469
10470 CASE_FLT_FN (BUILT_IN_ILOGB):
10471 CASE_FLT_FN (BUILT_IN_LOGB):
10472 return fold_builtin_logb (loc, arg0, type);
10473
10474 case BUILT_IN_ISASCII:
10475 return fold_builtin_isascii (loc, arg0);
10476
10477 case BUILT_IN_TOASCII:
10478 return fold_builtin_toascii (loc, arg0);
10479
10480 case BUILT_IN_ISDIGIT:
10481 return fold_builtin_isdigit (loc, arg0);
10482
10483 CASE_FLT_FN (BUILT_IN_FINITE):
10484 case BUILT_IN_FINITED32:
10485 case BUILT_IN_FINITED64:
10486 case BUILT_IN_FINITED128:
10487 case BUILT_IN_ISFINITE:
10488 {
10489 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10490 if (ret)
10491 return ret;
10492 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10493 }
10494
10495 CASE_FLT_FN (BUILT_IN_ISINF):
10496 case BUILT_IN_ISINFD32:
10497 case BUILT_IN_ISINFD64:
10498 case BUILT_IN_ISINFD128:
10499 {
10500 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10501 if (ret)
10502 return ret;
10503 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10504 }
10505
10506 case BUILT_IN_ISNORMAL:
10507 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10508
10509 case BUILT_IN_ISINF_SIGN:
10510 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10511
10512 CASE_FLT_FN (BUILT_IN_ISNAN):
10513 case BUILT_IN_ISNAND32:
10514 case BUILT_IN_ISNAND64:
10515 case BUILT_IN_ISNAND128:
10516 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10517
10518 case BUILT_IN_PRINTF:
10519 case BUILT_IN_PRINTF_UNLOCKED:
10520 case BUILT_IN_VPRINTF:
10521 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10522
10523 case BUILT_IN_FREE:
10524 if (integer_zerop (arg0))
10525 return build_empty_stmt (loc);
10526 break;
10527
10528 default:
10529 break;
10530 }
10531
10532 return NULL_TREE;
10533
10534 }
10535
10536 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10537 IGNORE is true if the result of the function call is ignored. This
10538 function returns NULL_TREE if no simplification was possible. */
10539
10540 static tree
10541 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10542 {
10543 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10544 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10545
10546 switch (fcode)
10547 {
10548 CASE_FLT_FN (BUILT_IN_JN):
10549 if (validate_arg (arg0, INTEGER_TYPE)
10550 && validate_arg (arg1, REAL_TYPE))
10551 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10552 break;
10553
10554 CASE_FLT_FN (BUILT_IN_YN):
10555 if (validate_arg (arg0, INTEGER_TYPE)
10556 && validate_arg (arg1, REAL_TYPE))
10557 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10558 &dconst0, false);
10559 break;
10560
10561 CASE_FLT_FN (BUILT_IN_DREM):
10562 CASE_FLT_FN (BUILT_IN_REMAINDER):
10563 if (validate_arg (arg0, REAL_TYPE)
10564 && validate_arg (arg1, REAL_TYPE))
10565 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10566 break;
10567
10568 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10569 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10570 if (validate_arg (arg0, REAL_TYPE)
10571 && validate_arg (arg1, POINTER_TYPE))
10572 return do_mpfr_lgamma_r (arg0, arg1, type);
10573 break;
10574
10575 CASE_FLT_FN (BUILT_IN_ATAN2):
10576 if (validate_arg (arg0, REAL_TYPE)
10577 && validate_arg (arg1, REAL_TYPE))
10578 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10579 break;
10580
10581 CASE_FLT_FN (BUILT_IN_FDIM):
10582 if (validate_arg (arg0, REAL_TYPE)
10583 && validate_arg (arg1, REAL_TYPE))
10584 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10585 break;
10586
10587 CASE_FLT_FN (BUILT_IN_HYPOT):
10588 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10589
10590 CASE_FLT_FN (BUILT_IN_CPOW):
10591 if (validate_arg (arg0, COMPLEX_TYPE)
10592 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10593 && validate_arg (arg1, COMPLEX_TYPE)
10594 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10595 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10596 break;
10597
10598 CASE_FLT_FN (BUILT_IN_LDEXP):
10599 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10600 CASE_FLT_FN (BUILT_IN_SCALBN):
10601 CASE_FLT_FN (BUILT_IN_SCALBLN):
10602 return fold_builtin_load_exponent (loc, arg0, arg1,
10603 type, /*ldexp=*/false);
10604
10605 CASE_FLT_FN (BUILT_IN_FREXP):
10606 return fold_builtin_frexp (loc, arg0, arg1, type);
10607
10608 CASE_FLT_FN (BUILT_IN_MODF):
10609 return fold_builtin_modf (loc, arg0, arg1, type);
10610
10611 case BUILT_IN_BZERO:
10612 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10613
10614 case BUILT_IN_FPUTS:
10615 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10616
10617 case BUILT_IN_FPUTS_UNLOCKED:
10618 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10619
10620 case BUILT_IN_STRSTR:
10621 return fold_builtin_strstr (loc, arg0, arg1, type);
10622
10623 case BUILT_IN_STRCAT:
10624 return fold_builtin_strcat (loc, arg0, arg1);
10625
10626 case BUILT_IN_STRSPN:
10627 return fold_builtin_strspn (loc, arg0, arg1);
10628
10629 case BUILT_IN_STRCSPN:
10630 return fold_builtin_strcspn (loc, arg0, arg1);
10631
10632 case BUILT_IN_STRCHR:
10633 case BUILT_IN_INDEX:
10634 return fold_builtin_strchr (loc, arg0, arg1, type);
10635
10636 case BUILT_IN_STRRCHR:
10637 case BUILT_IN_RINDEX:
10638 return fold_builtin_strrchr (loc, arg0, arg1, type);
10639
10640 case BUILT_IN_STRCPY:
10641 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10642
10643 case BUILT_IN_STPCPY:
10644 if (ignore)
10645 {
10646 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10647 if (!fn)
10648 break;
10649
10650 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10651 }
10652 else
10653 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10654 break;
10655
10656 case BUILT_IN_STRCMP:
10657 return fold_builtin_strcmp (loc, arg0, arg1);
10658
10659 case BUILT_IN_STRPBRK:
10660 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10661
10662 case BUILT_IN_EXPECT:
10663 return fold_builtin_expect (loc, arg0, arg1);
10664
10665 CASE_FLT_FN (BUILT_IN_POW):
10666 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10667
10668 CASE_FLT_FN (BUILT_IN_POWI):
10669 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10670
10671 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10672 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10673
10674 CASE_FLT_FN (BUILT_IN_FMIN):
10675 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10676
10677 CASE_FLT_FN (BUILT_IN_FMAX):
10678 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10679
10680 case BUILT_IN_ISGREATER:
10681 return fold_builtin_unordered_cmp (loc, fndecl,
10682 arg0, arg1, UNLE_EXPR, LE_EXPR);
10683 case BUILT_IN_ISGREATEREQUAL:
10684 return fold_builtin_unordered_cmp (loc, fndecl,
10685 arg0, arg1, UNLT_EXPR, LT_EXPR);
10686 case BUILT_IN_ISLESS:
10687 return fold_builtin_unordered_cmp (loc, fndecl,
10688 arg0, arg1, UNGE_EXPR, GE_EXPR);
10689 case BUILT_IN_ISLESSEQUAL:
10690 return fold_builtin_unordered_cmp (loc, fndecl,
10691 arg0, arg1, UNGT_EXPR, GT_EXPR);
10692 case BUILT_IN_ISLESSGREATER:
10693 return fold_builtin_unordered_cmp (loc, fndecl,
10694 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10695 case BUILT_IN_ISUNORDERED:
10696 return fold_builtin_unordered_cmp (loc, fndecl,
10697 arg0, arg1, UNORDERED_EXPR,
10698 NOP_EXPR);
10699
10700 /* We do the folding for va_start in the expander. */
10701 case BUILT_IN_VA_START:
10702 break;
10703
10704 case BUILT_IN_SPRINTF:
10705 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10706
10707 case BUILT_IN_OBJECT_SIZE:
10708 return fold_builtin_object_size (arg0, arg1);
10709
10710 case BUILT_IN_PRINTF:
10711 case BUILT_IN_PRINTF_UNLOCKED:
10712 case BUILT_IN_VPRINTF:
10713 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10714
10715 case BUILT_IN_PRINTF_CHK:
10716 case BUILT_IN_VPRINTF_CHK:
10717 if (!validate_arg (arg0, INTEGER_TYPE)
10718 || TREE_SIDE_EFFECTS (arg0))
10719 return NULL_TREE;
10720 else
10721 return fold_builtin_printf (loc, fndecl,
10722 arg1, NULL_TREE, ignore, fcode);
10723 break;
10724
10725 case BUILT_IN_FPRINTF:
10726 case BUILT_IN_FPRINTF_UNLOCKED:
10727 case BUILT_IN_VFPRINTF:
10728 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10729 ignore, fcode);
10730
10731 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10732 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10733
10734 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10735 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10736
10737 default:
10738 break;
10739 }
10740 return NULL_TREE;
10741 }
10742
10743 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10744 and ARG2. IGNORE is true if the result of the function call is ignored.
10745 This function returns NULL_TREE if no simplification was possible. */
10746
10747 static tree
10748 fold_builtin_3 (location_t loc, tree fndecl,
10749 tree arg0, tree arg1, tree arg2, bool ignore)
10750 {
10751 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10752 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10753 switch (fcode)
10754 {
10755
10756 CASE_FLT_FN (BUILT_IN_SINCOS):
10757 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10758
10759 CASE_FLT_FN (BUILT_IN_FMA):
10760 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10761 break;
10762
10763 CASE_FLT_FN (BUILT_IN_REMQUO):
10764 if (validate_arg (arg0, REAL_TYPE)
10765 && validate_arg (arg1, REAL_TYPE)
10766 && validate_arg (arg2, POINTER_TYPE))
10767 return do_mpfr_remquo (arg0, arg1, arg2);
10768 break;
10769
10770 case BUILT_IN_MEMSET:
10771 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10772
10773 case BUILT_IN_BCOPY:
10774 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10775 void_type_node, true, /*endp=*/3);
10776
10777 case BUILT_IN_MEMCPY:
10778 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10779 type, ignore, /*endp=*/0);
10780
10781 case BUILT_IN_MEMPCPY:
10782 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10783 type, ignore, /*endp=*/1);
10784
10785 case BUILT_IN_MEMMOVE:
10786 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10787 type, ignore, /*endp=*/3);
10788
10789 case BUILT_IN_STRNCAT:
10790 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10791
10792 case BUILT_IN_STRNCPY:
10793 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10794
10795 case BUILT_IN_STRNCMP:
10796 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10797
10798 case BUILT_IN_MEMCHR:
10799 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10800
10801 case BUILT_IN_BCMP:
10802 case BUILT_IN_MEMCMP:
10803 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10804
10805 case BUILT_IN_SPRINTF:
10806 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10807
10808 case BUILT_IN_SNPRINTF:
10809 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
10810
10811 case BUILT_IN_STRCPY_CHK:
10812 case BUILT_IN_STPCPY_CHK:
10813 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10814 ignore, fcode);
10815
10816 case BUILT_IN_STRCAT_CHK:
10817 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10818
10819 case BUILT_IN_PRINTF_CHK:
10820 case BUILT_IN_VPRINTF_CHK:
10821 if (!validate_arg (arg0, INTEGER_TYPE)
10822 || TREE_SIDE_EFFECTS (arg0))
10823 return NULL_TREE;
10824 else
10825 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10826 break;
10827
10828 case BUILT_IN_FPRINTF:
10829 case BUILT_IN_FPRINTF_UNLOCKED:
10830 case BUILT_IN_VFPRINTF:
10831 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10832 ignore, fcode);
10833
10834 case BUILT_IN_FPRINTF_CHK:
10835 case BUILT_IN_VFPRINTF_CHK:
10836 if (!validate_arg (arg1, INTEGER_TYPE)
10837 || TREE_SIDE_EFFECTS (arg1))
10838 return NULL_TREE;
10839 else
10840 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10841 ignore, fcode);
10842
10843 default:
10844 break;
10845 }
10846 return NULL_TREE;
10847 }
10848
10849 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10850 ARG2, and ARG3. IGNORE is true if the result of the function call is
10851 ignored. This function returns NULL_TREE if no simplification was
10852 possible. */
10853
10854 static tree
10855 fold_builtin_4 (location_t loc, tree fndecl,
10856 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10857 {
10858 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10859
10860 switch (fcode)
10861 {
10862 case BUILT_IN_MEMCPY_CHK:
10863 case BUILT_IN_MEMPCPY_CHK:
10864 case BUILT_IN_MEMMOVE_CHK:
10865 case BUILT_IN_MEMSET_CHK:
10866 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10867 NULL_TREE, ignore,
10868 DECL_FUNCTION_CODE (fndecl));
10869
10870 case BUILT_IN_STRNCPY_CHK:
10871 case BUILT_IN_STPNCPY_CHK:
10872 return fold_builtin_stxncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE,
10873 ignore, fcode);
10874
10875 case BUILT_IN_STRNCAT_CHK:
10876 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10877
10878 case BUILT_IN_SNPRINTF:
10879 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
10880
10881 case BUILT_IN_FPRINTF_CHK:
10882 case BUILT_IN_VFPRINTF_CHK:
10883 if (!validate_arg (arg1, INTEGER_TYPE)
10884 || TREE_SIDE_EFFECTS (arg1))
10885 return NULL_TREE;
10886 else
10887 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10888 ignore, fcode);
10889 break;
10890
10891 default:
10892 break;
10893 }
10894 return NULL_TREE;
10895 }
10896
10897 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10898 arguments, where NARGS <= 4. IGNORE is true if the result of the
10899 function call is ignored. This function returns NULL_TREE if no
10900 simplification was possible. Note that this only folds builtins with
10901 fixed argument patterns. Foldings that do varargs-to-varargs
10902 transformations, or that match calls with more than 4 arguments,
10903 need to be handled with fold_builtin_varargs instead. */
10904
10905 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10906
10907 static tree
10908 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10909 {
10910 tree ret = NULL_TREE;
10911
10912 switch (nargs)
10913 {
10914 case 0:
10915 ret = fold_builtin_0 (loc, fndecl, ignore);
10916 break;
10917 case 1:
10918 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10919 break;
10920 case 2:
10921 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10922 break;
10923 case 3:
10924 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10925 break;
10926 case 4:
10927 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10928 ignore);
10929 break;
10930 default:
10931 break;
10932 }
10933 if (ret)
10934 {
10935 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10936 SET_EXPR_LOCATION (ret, loc);
10937 TREE_NO_WARNING (ret) = 1;
10938 return ret;
10939 }
10940 return NULL_TREE;
10941 }
10942
10943 /* Builtins with folding operations that operate on "..." arguments
10944 need special handling; we need to store the arguments in a convenient
10945 data structure before attempting any folding. Fortunately there are
10946 only a few builtins that fall into this category. FNDECL is the
10947 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10948 result of the function call is ignored. */
10949
10950 static tree
10951 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10952 bool ignore ATTRIBUTE_UNUSED)
10953 {
10954 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10955 tree ret = NULL_TREE;
10956
10957 switch (fcode)
10958 {
10959 case BUILT_IN_SPRINTF_CHK:
10960 case BUILT_IN_VSPRINTF_CHK:
10961 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10962 break;
10963
10964 case BUILT_IN_SNPRINTF_CHK:
10965 case BUILT_IN_VSNPRINTF_CHK:
10966 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10967 break;
10968
10969 case BUILT_IN_FPCLASSIFY:
10970 ret = fold_builtin_fpclassify (loc, exp);
10971 break;
10972
10973 default:
10974 break;
10975 }
10976 if (ret)
10977 {
10978 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10979 SET_EXPR_LOCATION (ret, loc);
10980 TREE_NO_WARNING (ret) = 1;
10981 return ret;
10982 }
10983 return NULL_TREE;
10984 }
10985
10986 /* Return true if FNDECL shouldn't be folded right now.
10987 If a built-in function has an inline attribute always_inline
10988 wrapper, defer folding it after always_inline functions have
10989 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10990 might not be performed. */
10991
10992 bool
10993 avoid_folding_inline_builtin (tree fndecl)
10994 {
10995 return (DECL_DECLARED_INLINE_P (fndecl)
10996 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10997 && cfun
10998 && !cfun->always_inline_functions_inlined
10999 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11000 }
11001
11002 /* A wrapper function for builtin folding that prevents warnings for
11003 "statement without effect" and the like, caused by removing the
11004 call node earlier than the warning is generated. */
11005
11006 tree
11007 fold_call_expr (location_t loc, tree exp, bool ignore)
11008 {
11009 tree ret = NULL_TREE;
11010 tree fndecl = get_callee_fndecl (exp);
11011 if (fndecl
11012 && TREE_CODE (fndecl) == FUNCTION_DECL
11013 && DECL_BUILT_IN (fndecl)
11014 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11015 yet. Defer folding until we see all the arguments
11016 (after inlining). */
11017 && !CALL_EXPR_VA_ARG_PACK (exp))
11018 {
11019 int nargs = call_expr_nargs (exp);
11020
11021 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11022 instead last argument is __builtin_va_arg_pack (). Defer folding
11023 even in that case, until arguments are finalized. */
11024 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11025 {
11026 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11027 if (fndecl2
11028 && TREE_CODE (fndecl2) == FUNCTION_DECL
11029 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11030 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11031 return NULL_TREE;
11032 }
11033
11034 if (avoid_folding_inline_builtin (fndecl))
11035 return NULL_TREE;
11036
11037 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11038 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11039 CALL_EXPR_ARGP (exp), ignore);
11040 else
11041 {
11042 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11043 {
11044 tree *args = CALL_EXPR_ARGP (exp);
11045 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11046 }
11047 if (!ret)
11048 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11049 if (ret)
11050 return ret;
11051 }
11052 }
11053 return NULL_TREE;
11054 }
11055
11056 /* Conveniently construct a function call expression. FNDECL names the
11057 function to be called and N arguments are passed in the array
11058 ARGARRAY. */
11059
11060 tree
11061 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11062 {
11063 tree fntype = TREE_TYPE (fndecl);
11064 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11065
11066 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11067 }
11068
11069 /* Conveniently construct a function call expression. FNDECL names the
11070 function to be called and the arguments are passed in the vector
11071 VEC. */
11072
11073 tree
11074 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11075 {
11076 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11077 vec_safe_address (vec));
11078 }
11079
11080
11081 /* Conveniently construct a function call expression. FNDECL names the
11082 function to be called, N is the number of arguments, and the "..."
11083 parameters are the argument expressions. */
11084
11085 tree
11086 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11087 {
11088 va_list ap;
11089 tree *argarray = XALLOCAVEC (tree, n);
11090 int i;
11091
11092 va_start (ap, n);
11093 for (i = 0; i < n; i++)
11094 argarray[i] = va_arg (ap, tree);
11095 va_end (ap);
11096 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11097 }
11098
11099 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11100 varargs macros aren't supported by all bootstrap compilers. */
11101
11102 tree
11103 build_call_expr (tree fndecl, int n, ...)
11104 {
11105 va_list ap;
11106 tree *argarray = XALLOCAVEC (tree, n);
11107 int i;
11108
11109 va_start (ap, n);
11110 for (i = 0; i < n; i++)
11111 argarray[i] = va_arg (ap, tree);
11112 va_end (ap);
11113 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11114 }
11115
11116 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11117 N arguments are passed in the array ARGARRAY. */
11118
11119 tree
11120 fold_builtin_call_array (location_t loc, tree type,
11121 tree fn,
11122 int n,
11123 tree *argarray)
11124 {
11125 tree ret = NULL_TREE;
11126 tree exp;
11127
11128 if (TREE_CODE (fn) == ADDR_EXPR)
11129 {
11130 tree fndecl = TREE_OPERAND (fn, 0);
11131 if (TREE_CODE (fndecl) == FUNCTION_DECL
11132 && DECL_BUILT_IN (fndecl))
11133 {
11134 /* If last argument is __builtin_va_arg_pack (), arguments to this
11135 function are not finalized yet. Defer folding until they are. */
11136 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11137 {
11138 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11139 if (fndecl2
11140 && TREE_CODE (fndecl2) == FUNCTION_DECL
11141 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11142 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11143 return build_call_array_loc (loc, type, fn, n, argarray);
11144 }
11145 if (avoid_folding_inline_builtin (fndecl))
11146 return build_call_array_loc (loc, type, fn, n, argarray);
11147 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11148 {
11149 ret = targetm.fold_builtin (fndecl, n, argarray, false);
11150 if (ret)
11151 return ret;
11152
11153 return build_call_array_loc (loc, type, fn, n, argarray);
11154 }
11155 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11156 {
11157 /* First try the transformations that don't require consing up
11158 an exp. */
11159 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11160 if (ret)
11161 return ret;
11162 }
11163
11164 /* If we got this far, we need to build an exp. */
11165 exp = build_call_array_loc (loc, type, fn, n, argarray);
11166 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11167 return ret ? ret : exp;
11168 }
11169 }
11170
11171 return build_call_array_loc (loc, type, fn, n, argarray);
11172 }
11173
11174 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11175 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11176 of arguments in ARGS to be omitted. OLDNARGS is the number of
11177 elements in ARGS. */
11178
11179 static tree
11180 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11181 int skip, tree fndecl, int n, va_list newargs)
11182 {
11183 int nargs = oldnargs - skip + n;
11184 tree *buffer;
11185
11186 if (n > 0)
11187 {
11188 int i, j;
11189
11190 buffer = XALLOCAVEC (tree, nargs);
11191 for (i = 0; i < n; i++)
11192 buffer[i] = va_arg (newargs, tree);
11193 for (j = skip; j < oldnargs; j++, i++)
11194 buffer[i] = args[j];
11195 }
11196 else
11197 buffer = args + skip;
11198
11199 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11200 }
11201
11202 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11203 list ARGS along with N new arguments specified as the "..."
11204 parameters. SKIP is the number of arguments in ARGS to be omitted.
11205 OLDNARGS is the number of elements in ARGS. */
11206
11207 static tree
11208 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
11209 int skip, tree fndecl, int n, ...)
11210 {
11211 va_list ap;
11212 tree t;
11213
11214 va_start (ap, n);
11215 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
11216 va_end (ap);
11217
11218 return t;
11219 }
11220
11221 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11222 along with N new arguments specified as the "..." parameters. SKIP
11223 is the number of arguments in EXP to be omitted. This function is used
11224 to do varargs-to-varargs transformations. */
11225
11226 static tree
11227 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11228 {
11229 va_list ap;
11230 tree t;
11231
11232 va_start (ap, n);
11233 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11234 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11235 va_end (ap);
11236
11237 return t;
11238 }
11239
11240 /* Validate a single argument ARG against a tree code CODE representing
11241 a type. */
11242
11243 static bool
11244 validate_arg (const_tree arg, enum tree_code code)
11245 {
11246 if (!arg)
11247 return false;
11248 else if (code == POINTER_TYPE)
11249 return POINTER_TYPE_P (TREE_TYPE (arg));
11250 else if (code == INTEGER_TYPE)
11251 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11252 return code == TREE_CODE (TREE_TYPE (arg));
11253 }
11254
11255 /* This function validates the types of a function call argument list
11256 against a specified list of tree_codes. If the last specifier is a 0,
11257 that represents an ellipses, otherwise the last specifier must be a
11258 VOID_TYPE.
11259
11260 This is the GIMPLE version of validate_arglist. Eventually we want to
11261 completely convert builtins.c to work from GIMPLEs and the tree based
11262 validate_arglist will then be removed. */
11263
11264 bool
11265 validate_gimple_arglist (const_gimple call, ...)
11266 {
11267 enum tree_code code;
11268 bool res = 0;
11269 va_list ap;
11270 const_tree arg;
11271 size_t i;
11272
11273 va_start (ap, call);
11274 i = 0;
11275
11276 do
11277 {
11278 code = (enum tree_code) va_arg (ap, int);
11279 switch (code)
11280 {
11281 case 0:
11282 /* This signifies an ellipses, any further arguments are all ok. */
11283 res = true;
11284 goto end;
11285 case VOID_TYPE:
11286 /* This signifies an endlink, if no arguments remain, return
11287 true, otherwise return false. */
11288 res = (i == gimple_call_num_args (call));
11289 goto end;
11290 default:
11291 /* If no parameters remain or the parameter's code does not
11292 match the specified code, return false. Otherwise continue
11293 checking any remaining arguments. */
11294 arg = gimple_call_arg (call, i++);
11295 if (!validate_arg (arg, code))
11296 goto end;
11297 break;
11298 }
11299 }
11300 while (1);
11301
11302 /* We need gotos here since we can only have one VA_CLOSE in a
11303 function. */
11304 end: ;
11305 va_end (ap);
11306
11307 return res;
11308 }
11309
11310 /* This function validates the types of a function call argument list
11311 against a specified list of tree_codes. If the last specifier is a 0,
11312 that represents an ellipses, otherwise the last specifier must be a
11313 VOID_TYPE. */
11314
11315 bool
11316 validate_arglist (const_tree callexpr, ...)
11317 {
11318 enum tree_code code;
11319 bool res = 0;
11320 va_list ap;
11321 const_call_expr_arg_iterator iter;
11322 const_tree arg;
11323
11324 va_start (ap, callexpr);
11325 init_const_call_expr_arg_iterator (callexpr, &iter);
11326
11327 do
11328 {
11329 code = (enum tree_code) va_arg (ap, int);
11330 switch (code)
11331 {
11332 case 0:
11333 /* This signifies an ellipses, any further arguments are all ok. */
11334 res = true;
11335 goto end;
11336 case VOID_TYPE:
11337 /* This signifies an endlink, if no arguments remain, return
11338 true, otherwise return false. */
11339 res = !more_const_call_expr_args_p (&iter);
11340 goto end;
11341 default:
11342 /* If no parameters remain or the parameter's code does not
11343 match the specified code, return false. Otherwise continue
11344 checking any remaining arguments. */
11345 arg = next_const_call_expr_arg (&iter);
11346 if (!validate_arg (arg, code))
11347 goto end;
11348 break;
11349 }
11350 }
11351 while (1);
11352
11353 /* We need gotos here since we can only have one VA_CLOSE in a
11354 function. */
11355 end: ;
11356 va_end (ap);
11357
11358 return res;
11359 }
11360
11361 /* Default target-specific builtin expander that does nothing. */
11362
11363 rtx
11364 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11365 rtx target ATTRIBUTE_UNUSED,
11366 rtx subtarget ATTRIBUTE_UNUSED,
11367 enum machine_mode mode ATTRIBUTE_UNUSED,
11368 int ignore ATTRIBUTE_UNUSED)
11369 {
11370 return NULL_RTX;
11371 }
11372
11373 /* Returns true is EXP represents data that would potentially reside
11374 in a readonly section. */
11375
11376 static bool
11377 readonly_data_expr (tree exp)
11378 {
11379 STRIP_NOPS (exp);
11380
11381 if (TREE_CODE (exp) != ADDR_EXPR)
11382 return false;
11383
11384 exp = get_base_address (TREE_OPERAND (exp, 0));
11385 if (!exp)
11386 return false;
11387
11388 /* Make sure we call decl_readonly_section only for trees it
11389 can handle (since it returns true for everything it doesn't
11390 understand). */
11391 if (TREE_CODE (exp) == STRING_CST
11392 || TREE_CODE (exp) == CONSTRUCTOR
11393 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11394 return decl_readonly_section (exp, 0);
11395 else
11396 return false;
11397 }
11398
11399 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11400 to the call, and TYPE is its return type.
11401
11402 Return NULL_TREE if no simplification was possible, otherwise return the
11403 simplified form of the call as a tree.
11404
11405 The simplified form may be a constant or other expression which
11406 computes the same value, but in a more efficient manner (including
11407 calls to other builtin functions).
11408
11409 The call may contain arguments which need to be evaluated, but
11410 which are not useful to determine the result of the call. In
11411 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11412 COMPOUND_EXPR will be an argument which must be evaluated.
11413 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11414 COMPOUND_EXPR in the chain will contain the tree for the simplified
11415 form of the builtin function call. */
11416
11417 static tree
11418 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11419 {
11420 if (!validate_arg (s1, POINTER_TYPE)
11421 || !validate_arg (s2, POINTER_TYPE))
11422 return NULL_TREE;
11423 else
11424 {
11425 tree fn;
11426 const char *p1, *p2;
11427
11428 p2 = c_getstr (s2);
11429 if (p2 == NULL)
11430 return NULL_TREE;
11431
11432 p1 = c_getstr (s1);
11433 if (p1 != NULL)
11434 {
11435 const char *r = strstr (p1, p2);
11436 tree tem;
11437
11438 if (r == NULL)
11439 return build_int_cst (TREE_TYPE (s1), 0);
11440
11441 /* Return an offset into the constant string argument. */
11442 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11443 return fold_convert_loc (loc, type, tem);
11444 }
11445
11446 /* The argument is const char *, and the result is char *, so we need
11447 a type conversion here to avoid a warning. */
11448 if (p2[0] == '\0')
11449 return fold_convert_loc (loc, type, s1);
11450
11451 if (p2[1] != '\0')
11452 return NULL_TREE;
11453
11454 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11455 if (!fn)
11456 return NULL_TREE;
11457
11458 /* New argument list transforming strstr(s1, s2) to
11459 strchr(s1, s2[0]). */
11460 return build_call_expr_loc (loc, fn, 2, s1,
11461 build_int_cst (integer_type_node, p2[0]));
11462 }
11463 }
11464
11465 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11466 the call, and TYPE is its return type.
11467
11468 Return NULL_TREE if no simplification was possible, otherwise return the
11469 simplified form of the call as a tree.
11470
11471 The simplified form may be a constant or other expression which
11472 computes the same value, but in a more efficient manner (including
11473 calls to other builtin functions).
11474
11475 The call may contain arguments which need to be evaluated, but
11476 which are not useful to determine the result of the call. In
11477 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11478 COMPOUND_EXPR will be an argument which must be evaluated.
11479 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11480 COMPOUND_EXPR in the chain will contain the tree for the simplified
11481 form of the builtin function call. */
11482
11483 static tree
11484 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11485 {
11486 if (!validate_arg (s1, POINTER_TYPE)
11487 || !validate_arg (s2, INTEGER_TYPE))
11488 return NULL_TREE;
11489 else
11490 {
11491 const char *p1;
11492
11493 if (TREE_CODE (s2) != INTEGER_CST)
11494 return NULL_TREE;
11495
11496 p1 = c_getstr (s1);
11497 if (p1 != NULL)
11498 {
11499 char c;
11500 const char *r;
11501 tree tem;
11502
11503 if (target_char_cast (s2, &c))
11504 return NULL_TREE;
11505
11506 r = strchr (p1, c);
11507
11508 if (r == NULL)
11509 return build_int_cst (TREE_TYPE (s1), 0);
11510
11511 /* Return an offset into the constant string argument. */
11512 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11513 return fold_convert_loc (loc, type, tem);
11514 }
11515 return NULL_TREE;
11516 }
11517 }
11518
11519 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11520 the call, and TYPE is its return type.
11521
11522 Return NULL_TREE if no simplification was possible, otherwise return the
11523 simplified form of the call as a tree.
11524
11525 The simplified form may be a constant or other expression which
11526 computes the same value, but in a more efficient manner (including
11527 calls to other builtin functions).
11528
11529 The call may contain arguments which need to be evaluated, but
11530 which are not useful to determine the result of the call. In
11531 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11532 COMPOUND_EXPR will be an argument which must be evaluated.
11533 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11534 COMPOUND_EXPR in the chain will contain the tree for the simplified
11535 form of the builtin function call. */
11536
11537 static tree
11538 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11539 {
11540 if (!validate_arg (s1, POINTER_TYPE)
11541 || !validate_arg (s2, INTEGER_TYPE))
11542 return NULL_TREE;
11543 else
11544 {
11545 tree fn;
11546 const char *p1;
11547
11548 if (TREE_CODE (s2) != INTEGER_CST)
11549 return NULL_TREE;
11550
11551 p1 = c_getstr (s1);
11552 if (p1 != NULL)
11553 {
11554 char c;
11555 const char *r;
11556 tree tem;
11557
11558 if (target_char_cast (s2, &c))
11559 return NULL_TREE;
11560
11561 r = strrchr (p1, c);
11562
11563 if (r == NULL)
11564 return build_int_cst (TREE_TYPE (s1), 0);
11565
11566 /* Return an offset into the constant string argument. */
11567 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11568 return fold_convert_loc (loc, type, tem);
11569 }
11570
11571 if (! integer_zerop (s2))
11572 return NULL_TREE;
11573
11574 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11575 if (!fn)
11576 return NULL_TREE;
11577
11578 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11579 return build_call_expr_loc (loc, fn, 2, s1, s2);
11580 }
11581 }
11582
11583 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11584 to the call, and TYPE is its return type.
11585
11586 Return NULL_TREE if no simplification was possible, otherwise return the
11587 simplified form of the call as a tree.
11588
11589 The simplified form may be a constant or other expression which
11590 computes the same value, but in a more efficient manner (including
11591 calls to other builtin functions).
11592
11593 The call may contain arguments which need to be evaluated, but
11594 which are not useful to determine the result of the call. In
11595 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11596 COMPOUND_EXPR will be an argument which must be evaluated.
11597 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11598 COMPOUND_EXPR in the chain will contain the tree for the simplified
11599 form of the builtin function call. */
11600
11601 static tree
11602 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11603 {
11604 if (!validate_arg (s1, POINTER_TYPE)
11605 || !validate_arg (s2, POINTER_TYPE))
11606 return NULL_TREE;
11607 else
11608 {
11609 tree fn;
11610 const char *p1, *p2;
11611
11612 p2 = c_getstr (s2);
11613 if (p2 == NULL)
11614 return NULL_TREE;
11615
11616 p1 = c_getstr (s1);
11617 if (p1 != NULL)
11618 {
11619 const char *r = strpbrk (p1, p2);
11620 tree tem;
11621
11622 if (r == NULL)
11623 return build_int_cst (TREE_TYPE (s1), 0);
11624
11625 /* Return an offset into the constant string argument. */
11626 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11627 return fold_convert_loc (loc, type, tem);
11628 }
11629
11630 if (p2[0] == '\0')
11631 /* strpbrk(x, "") == NULL.
11632 Evaluate and ignore s1 in case it had side-effects. */
11633 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11634
11635 if (p2[1] != '\0')
11636 return NULL_TREE; /* Really call strpbrk. */
11637
11638 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11639 if (!fn)
11640 return NULL_TREE;
11641
11642 /* New argument list transforming strpbrk(s1, s2) to
11643 strchr(s1, s2[0]). */
11644 return build_call_expr_loc (loc, fn, 2, s1,
11645 build_int_cst (integer_type_node, p2[0]));
11646 }
11647 }
11648
11649 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11650 to the call.
11651
11652 Return NULL_TREE if no simplification was possible, otherwise return the
11653 simplified form of the call as a tree.
11654
11655 The simplified form may be a constant or other expression which
11656 computes the same value, but in a more efficient manner (including
11657 calls to other builtin functions).
11658
11659 The call may contain arguments which need to be evaluated, but
11660 which are not useful to determine the result of the call. In
11661 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11662 COMPOUND_EXPR will be an argument which must be evaluated.
11663 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11664 COMPOUND_EXPR in the chain will contain the tree for the simplified
11665 form of the builtin function call. */
11666
11667 static tree
11668 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11669 {
11670 if (!validate_arg (dst, POINTER_TYPE)
11671 || !validate_arg (src, POINTER_TYPE))
11672 return NULL_TREE;
11673 else
11674 {
11675 const char *p = c_getstr (src);
11676
11677 /* If the string length is zero, return the dst parameter. */
11678 if (p && *p == '\0')
11679 return dst;
11680
11681 if (optimize_insn_for_speed_p ())
11682 {
11683 /* See if we can store by pieces into (dst + strlen(dst)). */
11684 tree newdst, call;
11685 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11686 tree strcpy_fn = builtin_decl_implicit (BUILT_IN_STRCPY);
11687
11688 if (!strlen_fn || !strcpy_fn)
11689 return NULL_TREE;
11690
11691 /* If we don't have a movstr we don't want to emit an strcpy
11692 call. We have to do that if the length of the source string
11693 isn't computable (in that case we can use memcpy probably
11694 later expanding to a sequence of mov instructions). If we
11695 have movstr instructions we can emit strcpy calls. */
11696 if (!HAVE_movstr)
11697 {
11698 tree len = c_strlen (src, 1);
11699 if (! len || TREE_SIDE_EFFECTS (len))
11700 return NULL_TREE;
11701 }
11702
11703 /* Stabilize the argument list. */
11704 dst = builtin_save_expr (dst);
11705
11706 /* Create strlen (dst). */
11707 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11708 /* Create (dst p+ strlen (dst)). */
11709
11710 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11711 newdst = builtin_save_expr (newdst);
11712
11713 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11714 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11715 }
11716 return NULL_TREE;
11717 }
11718 }
11719
11720 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11721 arguments to the call.
11722
11723 Return NULL_TREE if no simplification was possible, otherwise return the
11724 simplified form of the call as a tree.
11725
11726 The simplified form may be a constant or other expression which
11727 computes the same value, but in a more efficient manner (including
11728 calls to other builtin functions).
11729
11730 The call may contain arguments which need to be evaluated, but
11731 which are not useful to determine the result of the call. In
11732 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11733 COMPOUND_EXPR will be an argument which must be evaluated.
11734 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11735 COMPOUND_EXPR in the chain will contain the tree for the simplified
11736 form of the builtin function call. */
11737
11738 static tree
11739 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11740 {
11741 if (!validate_arg (dst, POINTER_TYPE)
11742 || !validate_arg (src, POINTER_TYPE)
11743 || !validate_arg (len, INTEGER_TYPE))
11744 return NULL_TREE;
11745 else
11746 {
11747 const char *p = c_getstr (src);
11748
11749 /* If the requested length is zero, or the src parameter string
11750 length is zero, return the dst parameter. */
11751 if (integer_zerop (len) || (p && *p == '\0'))
11752 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11753
11754 /* If the requested len is greater than or equal to the string
11755 length, call strcat. */
11756 if (TREE_CODE (len) == INTEGER_CST && p
11757 && compare_tree_int (len, strlen (p)) >= 0)
11758 {
11759 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11760
11761 /* If the replacement _DECL isn't initialized, don't do the
11762 transformation. */
11763 if (!fn)
11764 return NULL_TREE;
11765
11766 return build_call_expr_loc (loc, fn, 2, dst, src);
11767 }
11768 return NULL_TREE;
11769 }
11770 }
11771
11772 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11773 to the call.
11774
11775 Return NULL_TREE if no simplification was possible, otherwise return the
11776 simplified form of the call as a tree.
11777
11778 The simplified form may be a constant or other expression which
11779 computes the same value, but in a more efficient manner (including
11780 calls to other builtin functions).
11781
11782 The call may contain arguments which need to be evaluated, but
11783 which are not useful to determine the result of the call. In
11784 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11785 COMPOUND_EXPR will be an argument which must be evaluated.
11786 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11787 COMPOUND_EXPR in the chain will contain the tree for the simplified
11788 form of the builtin function call. */
11789
11790 static tree
11791 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11792 {
11793 if (!validate_arg (s1, POINTER_TYPE)
11794 || !validate_arg (s2, POINTER_TYPE))
11795 return NULL_TREE;
11796 else
11797 {
11798 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11799
11800 /* If both arguments are constants, evaluate at compile-time. */
11801 if (p1 && p2)
11802 {
11803 const size_t r = strspn (p1, p2);
11804 return build_int_cst (size_type_node, r);
11805 }
11806
11807 /* If either argument is "", return NULL_TREE. */
11808 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11809 /* Evaluate and ignore both arguments in case either one has
11810 side-effects. */
11811 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11812 s1, s2);
11813 return NULL_TREE;
11814 }
11815 }
11816
11817 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11818 to the call.
11819
11820 Return NULL_TREE if no simplification was possible, otherwise return the
11821 simplified form of the call as a tree.
11822
11823 The simplified form may be a constant or other expression which
11824 computes the same value, but in a more efficient manner (including
11825 calls to other builtin functions).
11826
11827 The call may contain arguments which need to be evaluated, but
11828 which are not useful to determine the result of the call. In
11829 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11830 COMPOUND_EXPR will be an argument which must be evaluated.
11831 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11832 COMPOUND_EXPR in the chain will contain the tree for the simplified
11833 form of the builtin function call. */
11834
11835 static tree
11836 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11837 {
11838 if (!validate_arg (s1, POINTER_TYPE)
11839 || !validate_arg (s2, POINTER_TYPE))
11840 return NULL_TREE;
11841 else
11842 {
11843 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11844
11845 /* If both arguments are constants, evaluate at compile-time. */
11846 if (p1 && p2)
11847 {
11848 const size_t r = strcspn (p1, p2);
11849 return build_int_cst (size_type_node, r);
11850 }
11851
11852 /* If the first argument is "", return NULL_TREE. */
11853 if (p1 && *p1 == '\0')
11854 {
11855 /* Evaluate and ignore argument s2 in case it has
11856 side-effects. */
11857 return omit_one_operand_loc (loc, size_type_node,
11858 size_zero_node, s2);
11859 }
11860
11861 /* If the second argument is "", return __builtin_strlen(s1). */
11862 if (p2 && *p2 == '\0')
11863 {
11864 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11865
11866 /* If the replacement _DECL isn't initialized, don't do the
11867 transformation. */
11868 if (!fn)
11869 return NULL_TREE;
11870
11871 return build_call_expr_loc (loc, fn, 1, s1);
11872 }
11873 return NULL_TREE;
11874 }
11875 }
11876
11877 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11878 to the call. IGNORE is true if the value returned
11879 by the builtin will be ignored. UNLOCKED is true is true if this
11880 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11881 the known length of the string. Return NULL_TREE if no simplification
11882 was possible. */
11883
11884 tree
11885 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11886 bool ignore, bool unlocked, tree len)
11887 {
11888 /* If we're using an unlocked function, assume the other unlocked
11889 functions exist explicitly. */
11890 tree const fn_fputc = (unlocked
11891 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
11892 : builtin_decl_implicit (BUILT_IN_FPUTC));
11893 tree const fn_fwrite = (unlocked
11894 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
11895 : builtin_decl_implicit (BUILT_IN_FWRITE));
11896
11897 /* If the return value is used, don't do the transformation. */
11898 if (!ignore)
11899 return NULL_TREE;
11900
11901 /* Verify the arguments in the original call. */
11902 if (!validate_arg (arg0, POINTER_TYPE)
11903 || !validate_arg (arg1, POINTER_TYPE))
11904 return NULL_TREE;
11905
11906 if (! len)
11907 len = c_strlen (arg0, 0);
11908
11909 /* Get the length of the string passed to fputs. If the length
11910 can't be determined, punt. */
11911 if (!len
11912 || TREE_CODE (len) != INTEGER_CST)
11913 return NULL_TREE;
11914
11915 switch (compare_tree_int (len, 1))
11916 {
11917 case -1: /* length is 0, delete the call entirely . */
11918 return omit_one_operand_loc (loc, integer_type_node,
11919 integer_zero_node, arg1);;
11920
11921 case 0: /* length is 1, call fputc. */
11922 {
11923 const char *p = c_getstr (arg0);
11924
11925 if (p != NULL)
11926 {
11927 if (fn_fputc)
11928 return build_call_expr_loc (loc, fn_fputc, 2,
11929 build_int_cst
11930 (integer_type_node, p[0]), arg1);
11931 else
11932 return NULL_TREE;
11933 }
11934 }
11935 /* FALLTHROUGH */
11936 case 1: /* length is greater than 1, call fwrite. */
11937 {
11938 /* If optimizing for size keep fputs. */
11939 if (optimize_function_for_size_p (cfun))
11940 return NULL_TREE;
11941 /* New argument list transforming fputs(string, stream) to
11942 fwrite(string, 1, len, stream). */
11943 if (fn_fwrite)
11944 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11945 size_one_node, len, arg1);
11946 else
11947 return NULL_TREE;
11948 }
11949 default:
11950 gcc_unreachable ();
11951 }
11952 return NULL_TREE;
11953 }
11954
11955 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11956 produced. False otherwise. This is done so that we don't output the error
11957 or warning twice or three times. */
11958
11959 bool
11960 fold_builtin_next_arg (tree exp, bool va_start_p)
11961 {
11962 tree fntype = TREE_TYPE (current_function_decl);
11963 int nargs = call_expr_nargs (exp);
11964 tree arg;
11965 /* There is good chance the current input_location points inside the
11966 definition of the va_start macro (perhaps on the token for
11967 builtin) in a system header, so warnings will not be emitted.
11968 Use the location in real source code. */
11969 source_location current_location =
11970 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11971 NULL);
11972
11973 if (!stdarg_p (fntype))
11974 {
11975 error ("%<va_start%> used in function with fixed args");
11976 return true;
11977 }
11978
11979 if (va_start_p)
11980 {
11981 if (va_start_p && (nargs != 2))
11982 {
11983 error ("wrong number of arguments to function %<va_start%>");
11984 return true;
11985 }
11986 arg = CALL_EXPR_ARG (exp, 1);
11987 }
11988 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11989 when we checked the arguments and if needed issued a warning. */
11990 else
11991 {
11992 if (nargs == 0)
11993 {
11994 /* Evidently an out of date version of <stdarg.h>; can't validate
11995 va_start's second argument, but can still work as intended. */
11996 warning_at (current_location,
11997 OPT_Wvarargs,
11998 "%<__builtin_next_arg%> called without an argument");
11999 return true;
12000 }
12001 else if (nargs > 1)
12002 {
12003 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12004 return true;
12005 }
12006 arg = CALL_EXPR_ARG (exp, 0);
12007 }
12008
12009 if (TREE_CODE (arg) == SSA_NAME)
12010 arg = SSA_NAME_VAR (arg);
12011
12012 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12013 or __builtin_next_arg (0) the first time we see it, after checking
12014 the arguments and if needed issuing a warning. */
12015 if (!integer_zerop (arg))
12016 {
12017 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12018
12019 /* Strip off all nops for the sake of the comparison. This
12020 is not quite the same as STRIP_NOPS. It does more.
12021 We must also strip off INDIRECT_EXPR for C++ reference
12022 parameters. */
12023 while (CONVERT_EXPR_P (arg)
12024 || TREE_CODE (arg) == INDIRECT_REF)
12025 arg = TREE_OPERAND (arg, 0);
12026 if (arg != last_parm)
12027 {
12028 /* FIXME: Sometimes with the tree optimizers we can get the
12029 not the last argument even though the user used the last
12030 argument. We just warn and set the arg to be the last
12031 argument so that we will get wrong-code because of
12032 it. */
12033 warning_at (current_location,
12034 OPT_Wvarargs,
12035 "second parameter of %<va_start%> not last named argument");
12036 }
12037
12038 /* Undefined by C99 7.15.1.4p4 (va_start):
12039 "If the parameter parmN is declared with the register storage
12040 class, with a function or array type, or with a type that is
12041 not compatible with the type that results after application of
12042 the default argument promotions, the behavior is undefined."
12043 */
12044 else if (DECL_REGISTER (arg))
12045 {
12046 warning_at (current_location,
12047 OPT_Wvarargs,
12048 "undefined behaviour when second parameter of "
12049 "%<va_start%> is declared with %<register%> storage");
12050 }
12051
12052 /* We want to verify the second parameter just once before the tree
12053 optimizers are run and then avoid keeping it in the tree,
12054 as otherwise we could warn even for correct code like:
12055 void foo (int i, ...)
12056 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12057 if (va_start_p)
12058 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12059 else
12060 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12061 }
12062 return false;
12063 }
12064
12065
12066 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12067 ORIG may be null if this is a 2-argument call. We don't attempt to
12068 simplify calls with more than 3 arguments.
12069
12070 Return NULL_TREE if no simplification was possible, otherwise return the
12071 simplified form of the call as a tree. If IGNORED is true, it means that
12072 the caller does not use the returned value of the function. */
12073
12074 static tree
12075 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12076 tree orig, int ignored)
12077 {
12078 tree call, retval;
12079 const char *fmt_str = NULL;
12080
12081 /* Verify the required arguments in the original call. We deal with two
12082 types of sprintf() calls: 'sprintf (str, fmt)' and
12083 'sprintf (dest, "%s", orig)'. */
12084 if (!validate_arg (dest, POINTER_TYPE)
12085 || !validate_arg (fmt, POINTER_TYPE))
12086 return NULL_TREE;
12087 if (orig && !validate_arg (orig, POINTER_TYPE))
12088 return NULL_TREE;
12089
12090 /* Check whether the format is a literal string constant. */
12091 fmt_str = c_getstr (fmt);
12092 if (fmt_str == NULL)
12093 return NULL_TREE;
12094
12095 call = NULL_TREE;
12096 retval = NULL_TREE;
12097
12098 if (!init_target_chars ())
12099 return NULL_TREE;
12100
12101 /* If the format doesn't contain % args or %%, use strcpy. */
12102 if (strchr (fmt_str, target_percent) == NULL)
12103 {
12104 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12105
12106 if (!fn)
12107 return NULL_TREE;
12108
12109 /* Don't optimize sprintf (buf, "abc", ptr++). */
12110 if (orig)
12111 return NULL_TREE;
12112
12113 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12114 'format' is known to contain no % formats. */
12115 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12116 if (!ignored)
12117 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12118 }
12119
12120 /* If the format is "%s", use strcpy if the result isn't used. */
12121 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12122 {
12123 tree fn;
12124 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12125
12126 if (!fn)
12127 return NULL_TREE;
12128
12129 /* Don't crash on sprintf (str1, "%s"). */
12130 if (!orig)
12131 return NULL_TREE;
12132
12133 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12134 if (!ignored)
12135 {
12136 retval = c_strlen (orig, 1);
12137 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12138 return NULL_TREE;
12139 }
12140 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12141 }
12142
12143 if (call && retval)
12144 {
12145 retval = fold_convert_loc
12146 (loc, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF))),
12147 retval);
12148 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12149 }
12150 else
12151 return call;
12152 }
12153
12154 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12155 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12156 attempt to simplify calls with more than 4 arguments.
12157
12158 Return NULL_TREE if no simplification was possible, otherwise return the
12159 simplified form of the call as a tree. If IGNORED is true, it means that
12160 the caller does not use the returned value of the function. */
12161
12162 static tree
12163 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
12164 tree orig, int ignored)
12165 {
12166 tree call, retval;
12167 const char *fmt_str = NULL;
12168 unsigned HOST_WIDE_INT destlen;
12169
12170 /* Verify the required arguments in the original call. We deal with two
12171 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12172 'snprintf (dest, cst, "%s", orig)'. */
12173 if (!validate_arg (dest, POINTER_TYPE)
12174 || !validate_arg (destsize, INTEGER_TYPE)
12175 || !validate_arg (fmt, POINTER_TYPE))
12176 return NULL_TREE;
12177 if (orig && !validate_arg (orig, POINTER_TYPE))
12178 return NULL_TREE;
12179
12180 if (!host_integerp (destsize, 1))
12181 return NULL_TREE;
12182
12183 /* Check whether the format is a literal string constant. */
12184 fmt_str = c_getstr (fmt);
12185 if (fmt_str == NULL)
12186 return NULL_TREE;
12187
12188 call = NULL_TREE;
12189 retval = NULL_TREE;
12190
12191 if (!init_target_chars ())
12192 return NULL_TREE;
12193
12194 destlen = tree_low_cst (destsize, 1);
12195
12196 /* If the format doesn't contain % args or %%, use strcpy. */
12197 if (strchr (fmt_str, target_percent) == NULL)
12198 {
12199 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12200 size_t len = strlen (fmt_str);
12201
12202 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12203 if (orig)
12204 return NULL_TREE;
12205
12206 /* We could expand this as
12207 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12208 or to
12209 memcpy (str, fmt_with_nul_at_cstm1, cst);
12210 but in the former case that might increase code size
12211 and in the latter case grow .rodata section too much.
12212 So punt for now. */
12213 if (len >= destlen)
12214 return NULL_TREE;
12215
12216 if (!fn)
12217 return NULL_TREE;
12218
12219 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12220 'format' is known to contain no % formats and
12221 strlen (fmt) < cst. */
12222 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12223
12224 if (!ignored)
12225 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12226 }
12227
12228 /* If the format is "%s", use strcpy if the result isn't used. */
12229 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12230 {
12231 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12232 unsigned HOST_WIDE_INT origlen;
12233
12234 /* Don't crash on snprintf (str1, cst, "%s"). */
12235 if (!orig)
12236 return NULL_TREE;
12237
12238 retval = c_strlen (orig, 1);
12239 if (!retval || !host_integerp (retval, 1))
12240 return NULL_TREE;
12241
12242 origlen = tree_low_cst (retval, 1);
12243 /* We could expand this as
12244 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12245 or to
12246 memcpy (str1, str2_with_nul_at_cstm1, cst);
12247 but in the former case that might increase code size
12248 and in the latter case grow .rodata section too much.
12249 So punt for now. */
12250 if (origlen >= destlen)
12251 return NULL_TREE;
12252
12253 /* Convert snprintf (str1, cst, "%s", str2) into
12254 strcpy (str1, str2) if strlen (str2) < cst. */
12255 if (!fn)
12256 return NULL_TREE;
12257
12258 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12259
12260 if (ignored)
12261 retval = NULL_TREE;
12262 }
12263
12264 if (call && retval)
12265 {
12266 tree fn = builtin_decl_explicit (BUILT_IN_SNPRINTF);
12267 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
12268 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12269 }
12270 else
12271 return call;
12272 }
12273
12274 /* Expand a call EXP to __builtin_object_size. */
12275
12276 rtx
12277 expand_builtin_object_size (tree exp)
12278 {
12279 tree ost;
12280 int object_size_type;
12281 tree fndecl = get_callee_fndecl (exp);
12282
12283 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12284 {
12285 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12286 exp, fndecl);
12287 expand_builtin_trap ();
12288 return const0_rtx;
12289 }
12290
12291 ost = CALL_EXPR_ARG (exp, 1);
12292 STRIP_NOPS (ost);
12293
12294 if (TREE_CODE (ost) != INTEGER_CST
12295 || tree_int_cst_sgn (ost) < 0
12296 || compare_tree_int (ost, 3) > 0)
12297 {
12298 error ("%Klast argument of %D is not integer constant between 0 and 3",
12299 exp, fndecl);
12300 expand_builtin_trap ();
12301 return const0_rtx;
12302 }
12303
12304 object_size_type = tree_low_cst (ost, 0);
12305
12306 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12307 }
12308
12309 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12310 FCODE is the BUILT_IN_* to use.
12311 Return NULL_RTX if we failed; the caller should emit a normal call,
12312 otherwise try to get the result in TARGET, if convenient (and in
12313 mode MODE if that's convenient). */
12314
12315 static rtx
12316 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12317 enum built_in_function fcode)
12318 {
12319 tree dest, src, len, size;
12320
12321 if (!validate_arglist (exp,
12322 POINTER_TYPE,
12323 fcode == BUILT_IN_MEMSET_CHK
12324 ? INTEGER_TYPE : POINTER_TYPE,
12325 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12326 return NULL_RTX;
12327
12328 dest = CALL_EXPR_ARG (exp, 0);
12329 src = CALL_EXPR_ARG (exp, 1);
12330 len = CALL_EXPR_ARG (exp, 2);
12331 size = CALL_EXPR_ARG (exp, 3);
12332
12333 if (! host_integerp (size, 1))
12334 return NULL_RTX;
12335
12336 if (host_integerp (len, 1) || integer_all_onesp (size))
12337 {
12338 tree fn;
12339
12340 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12341 {
12342 warning_at (tree_nonartificial_location (exp),
12343 0, "%Kcall to %D will always overflow destination buffer",
12344 exp, get_callee_fndecl (exp));
12345 return NULL_RTX;
12346 }
12347
12348 fn = NULL_TREE;
12349 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12350 mem{cpy,pcpy,move,set} is available. */
12351 switch (fcode)
12352 {
12353 case BUILT_IN_MEMCPY_CHK:
12354 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12355 break;
12356 case BUILT_IN_MEMPCPY_CHK:
12357 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12358 break;
12359 case BUILT_IN_MEMMOVE_CHK:
12360 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12361 break;
12362 case BUILT_IN_MEMSET_CHK:
12363 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12364 break;
12365 default:
12366 break;
12367 }
12368
12369 if (! fn)
12370 return NULL_RTX;
12371
12372 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12373 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12374 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12375 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12376 }
12377 else if (fcode == BUILT_IN_MEMSET_CHK)
12378 return NULL_RTX;
12379 else
12380 {
12381 unsigned int dest_align = get_pointer_alignment (dest);
12382
12383 /* If DEST is not a pointer type, call the normal function. */
12384 if (dest_align == 0)
12385 return NULL_RTX;
12386
12387 /* If SRC and DEST are the same (and not volatile), do nothing. */
12388 if (operand_equal_p (src, dest, 0))
12389 {
12390 tree expr;
12391
12392 if (fcode != BUILT_IN_MEMPCPY_CHK)
12393 {
12394 /* Evaluate and ignore LEN in case it has side-effects. */
12395 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12396 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12397 }
12398
12399 expr = fold_build_pointer_plus (dest, len);
12400 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12401 }
12402
12403 /* __memmove_chk special case. */
12404 if (fcode == BUILT_IN_MEMMOVE_CHK)
12405 {
12406 unsigned int src_align = get_pointer_alignment (src);
12407
12408 if (src_align == 0)
12409 return NULL_RTX;
12410
12411 /* If src is categorized for a readonly section we can use
12412 normal __memcpy_chk. */
12413 if (readonly_data_expr (src))
12414 {
12415 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12416 if (!fn)
12417 return NULL_RTX;
12418 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12419 dest, src, len, size);
12420 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12421 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12422 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12423 }
12424 }
12425 return NULL_RTX;
12426 }
12427 }
12428
12429 /* Emit warning if a buffer overflow is detected at compile time. */
12430
12431 static void
12432 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12433 {
12434 int is_strlen = 0;
12435 tree len, size;
12436 location_t loc = tree_nonartificial_location (exp);
12437
12438 switch (fcode)
12439 {
12440 case BUILT_IN_STRCPY_CHK:
12441 case BUILT_IN_STPCPY_CHK:
12442 /* For __strcat_chk the warning will be emitted only if overflowing
12443 by at least strlen (dest) + 1 bytes. */
12444 case BUILT_IN_STRCAT_CHK:
12445 len = CALL_EXPR_ARG (exp, 1);
12446 size = CALL_EXPR_ARG (exp, 2);
12447 is_strlen = 1;
12448 break;
12449 case BUILT_IN_STRNCAT_CHK:
12450 case BUILT_IN_STRNCPY_CHK:
12451 case BUILT_IN_STPNCPY_CHK:
12452 len = CALL_EXPR_ARG (exp, 2);
12453 size = CALL_EXPR_ARG (exp, 3);
12454 break;
12455 case BUILT_IN_SNPRINTF_CHK:
12456 case BUILT_IN_VSNPRINTF_CHK:
12457 len = CALL_EXPR_ARG (exp, 1);
12458 size = CALL_EXPR_ARG (exp, 3);
12459 break;
12460 default:
12461 gcc_unreachable ();
12462 }
12463
12464 if (!len || !size)
12465 return;
12466
12467 if (! host_integerp (size, 1) || integer_all_onesp (size))
12468 return;
12469
12470 if (is_strlen)
12471 {
12472 len = c_strlen (len, 1);
12473 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12474 return;
12475 }
12476 else if (fcode == BUILT_IN_STRNCAT_CHK)
12477 {
12478 tree src = CALL_EXPR_ARG (exp, 1);
12479 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12480 return;
12481 src = c_strlen (src, 1);
12482 if (! src || ! host_integerp (src, 1))
12483 {
12484 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12485 exp, get_callee_fndecl (exp));
12486 return;
12487 }
12488 else if (tree_int_cst_lt (src, size))
12489 return;
12490 }
12491 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12492 return;
12493
12494 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12495 exp, get_callee_fndecl (exp));
12496 }
12497
12498 /* Emit warning if a buffer overflow is detected at compile time
12499 in __sprintf_chk/__vsprintf_chk calls. */
12500
12501 static void
12502 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12503 {
12504 tree size, len, fmt;
12505 const char *fmt_str;
12506 int nargs = call_expr_nargs (exp);
12507
12508 /* Verify the required arguments in the original call. */
12509
12510 if (nargs < 4)
12511 return;
12512 size = CALL_EXPR_ARG (exp, 2);
12513 fmt = CALL_EXPR_ARG (exp, 3);
12514
12515 if (! host_integerp (size, 1) || integer_all_onesp (size))
12516 return;
12517
12518 /* Check whether the format is a literal string constant. */
12519 fmt_str = c_getstr (fmt);
12520 if (fmt_str == NULL)
12521 return;
12522
12523 if (!init_target_chars ())
12524 return;
12525
12526 /* If the format doesn't contain % args or %%, we know its size. */
12527 if (strchr (fmt_str, target_percent) == 0)
12528 len = build_int_cstu (size_type_node, strlen (fmt_str));
12529 /* If the format is "%s" and first ... argument is a string literal,
12530 we know it too. */
12531 else if (fcode == BUILT_IN_SPRINTF_CHK
12532 && strcmp (fmt_str, target_percent_s) == 0)
12533 {
12534 tree arg;
12535
12536 if (nargs < 5)
12537 return;
12538 arg = CALL_EXPR_ARG (exp, 4);
12539 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12540 return;
12541
12542 len = c_strlen (arg, 1);
12543 if (!len || ! host_integerp (len, 1))
12544 return;
12545 }
12546 else
12547 return;
12548
12549 if (! tree_int_cst_lt (len, size))
12550 warning_at (tree_nonartificial_location (exp),
12551 0, "%Kcall to %D will always overflow destination buffer",
12552 exp, get_callee_fndecl (exp));
12553 }
12554
12555 /* Emit warning if a free is called with address of a variable. */
12556
12557 static void
12558 maybe_emit_free_warning (tree exp)
12559 {
12560 tree arg = CALL_EXPR_ARG (exp, 0);
12561
12562 STRIP_NOPS (arg);
12563 if (TREE_CODE (arg) != ADDR_EXPR)
12564 return;
12565
12566 arg = get_base_address (TREE_OPERAND (arg, 0));
12567 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12568 return;
12569
12570 if (SSA_VAR_P (arg))
12571 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12572 "%Kattempt to free a non-heap object %qD", exp, arg);
12573 else
12574 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12575 "%Kattempt to free a non-heap object", exp);
12576 }
12577
12578 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12579 if possible. */
12580
12581 tree
12582 fold_builtin_object_size (tree ptr, tree ost)
12583 {
12584 unsigned HOST_WIDE_INT bytes;
12585 int object_size_type;
12586
12587 if (!validate_arg (ptr, POINTER_TYPE)
12588 || !validate_arg (ost, INTEGER_TYPE))
12589 return NULL_TREE;
12590
12591 STRIP_NOPS (ost);
12592
12593 if (TREE_CODE (ost) != INTEGER_CST
12594 || tree_int_cst_sgn (ost) < 0
12595 || compare_tree_int (ost, 3) > 0)
12596 return NULL_TREE;
12597
12598 object_size_type = tree_low_cst (ost, 0);
12599
12600 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12601 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12602 and (size_t) 0 for types 2 and 3. */
12603 if (TREE_SIDE_EFFECTS (ptr))
12604 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12605
12606 if (TREE_CODE (ptr) == ADDR_EXPR)
12607 {
12608 bytes = compute_builtin_object_size (ptr, object_size_type);
12609 if (double_int_fits_to_tree_p (size_type_node,
12610 double_int::from_uhwi (bytes)))
12611 return build_int_cstu (size_type_node, bytes);
12612 }
12613 else if (TREE_CODE (ptr) == SSA_NAME)
12614 {
12615 /* If object size is not known yet, delay folding until
12616 later. Maybe subsequent passes will help determining
12617 it. */
12618 bytes = compute_builtin_object_size (ptr, object_size_type);
12619 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12620 && double_int_fits_to_tree_p (size_type_node,
12621 double_int::from_uhwi (bytes)))
12622 return build_int_cstu (size_type_node, bytes);
12623 }
12624
12625 return NULL_TREE;
12626 }
12627
12628 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12629 DEST, SRC, LEN, and SIZE are the arguments to the call.
12630 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12631 code of the builtin. If MAXLEN is not NULL, it is maximum length
12632 passed as third argument. */
12633
12634 tree
12635 fold_builtin_memory_chk (location_t loc, tree fndecl,
12636 tree dest, tree src, tree len, tree size,
12637 tree maxlen, bool ignore,
12638 enum built_in_function fcode)
12639 {
12640 tree fn;
12641
12642 if (!validate_arg (dest, POINTER_TYPE)
12643 || !validate_arg (src,
12644 (fcode == BUILT_IN_MEMSET_CHK
12645 ? INTEGER_TYPE : POINTER_TYPE))
12646 || !validate_arg (len, INTEGER_TYPE)
12647 || !validate_arg (size, INTEGER_TYPE))
12648 return NULL_TREE;
12649
12650 /* If SRC and DEST are the same (and not volatile), return DEST
12651 (resp. DEST+LEN for __mempcpy_chk). */
12652 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12653 {
12654 if (fcode != BUILT_IN_MEMPCPY_CHK)
12655 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12656 dest, len);
12657 else
12658 {
12659 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12660 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12661 }
12662 }
12663
12664 if (! host_integerp (size, 1))
12665 return NULL_TREE;
12666
12667 if (! integer_all_onesp (size))
12668 {
12669 if (! host_integerp (len, 1))
12670 {
12671 /* If LEN is not constant, try MAXLEN too.
12672 For MAXLEN only allow optimizing into non-_ocs function
12673 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12674 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12675 {
12676 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12677 {
12678 /* (void) __mempcpy_chk () can be optimized into
12679 (void) __memcpy_chk (). */
12680 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12681 if (!fn)
12682 return NULL_TREE;
12683
12684 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12685 }
12686 return NULL_TREE;
12687 }
12688 }
12689 else
12690 maxlen = len;
12691
12692 if (tree_int_cst_lt (size, maxlen))
12693 return NULL_TREE;
12694 }
12695
12696 fn = NULL_TREE;
12697 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12698 mem{cpy,pcpy,move,set} is available. */
12699 switch (fcode)
12700 {
12701 case BUILT_IN_MEMCPY_CHK:
12702 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12703 break;
12704 case BUILT_IN_MEMPCPY_CHK:
12705 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12706 break;
12707 case BUILT_IN_MEMMOVE_CHK:
12708 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12709 break;
12710 case BUILT_IN_MEMSET_CHK:
12711 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12712 break;
12713 default:
12714 break;
12715 }
12716
12717 if (!fn)
12718 return NULL_TREE;
12719
12720 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12721 }
12722
12723 /* Fold a call to the __st[rp]cpy_chk builtin.
12724 DEST, SRC, and SIZE are the arguments to the call.
12725 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12726 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12727 strings passed as second argument. */
12728
12729 tree
12730 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12731 tree src, tree size,
12732 tree maxlen, bool ignore,
12733 enum built_in_function fcode)
12734 {
12735 tree len, fn;
12736
12737 if (!validate_arg (dest, POINTER_TYPE)
12738 || !validate_arg (src, POINTER_TYPE)
12739 || !validate_arg (size, INTEGER_TYPE))
12740 return NULL_TREE;
12741
12742 /* If SRC and DEST are the same (and not volatile), return DEST. */
12743 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12744 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12745
12746 if (! host_integerp (size, 1))
12747 return NULL_TREE;
12748
12749 if (! integer_all_onesp (size))
12750 {
12751 len = c_strlen (src, 1);
12752 if (! len || ! host_integerp (len, 1))
12753 {
12754 /* If LEN is not constant, try MAXLEN too.
12755 For MAXLEN only allow optimizing into non-_ocs function
12756 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12757 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12758 {
12759 if (fcode == BUILT_IN_STPCPY_CHK)
12760 {
12761 if (! ignore)
12762 return NULL_TREE;
12763
12764 /* If return value of __stpcpy_chk is ignored,
12765 optimize into __strcpy_chk. */
12766 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
12767 if (!fn)
12768 return NULL_TREE;
12769
12770 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12771 }
12772
12773 if (! len || TREE_SIDE_EFFECTS (len))
12774 return NULL_TREE;
12775
12776 /* If c_strlen returned something, but not a constant,
12777 transform __strcpy_chk into __memcpy_chk. */
12778 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12779 if (!fn)
12780 return NULL_TREE;
12781
12782 len = fold_convert_loc (loc, size_type_node, len);
12783 len = size_binop_loc (loc, PLUS_EXPR, len,
12784 build_int_cst (size_type_node, 1));
12785 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12786 build_call_expr_loc (loc, fn, 4,
12787 dest, src, len, size));
12788 }
12789 }
12790 else
12791 maxlen = len;
12792
12793 if (! tree_int_cst_lt (maxlen, size))
12794 return NULL_TREE;
12795 }
12796
12797 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12798 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
12799 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
12800 if (!fn)
12801 return NULL_TREE;
12802
12803 return build_call_expr_loc (loc, fn, 2, dest, src);
12804 }
12805
12806 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
12807 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12808 length passed as third argument. IGNORE is true if return value can be
12809 ignored. FCODE is the BUILT_IN_* code of the builtin. */
12810
12811 tree
12812 fold_builtin_stxncpy_chk (location_t loc, tree dest, tree src,
12813 tree len, tree size, tree maxlen, bool ignore,
12814 enum built_in_function fcode)
12815 {
12816 tree fn;
12817
12818 if (!validate_arg (dest, POINTER_TYPE)
12819 || !validate_arg (src, POINTER_TYPE)
12820 || !validate_arg (len, INTEGER_TYPE)
12821 || !validate_arg (size, INTEGER_TYPE))
12822 return NULL_TREE;
12823
12824 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
12825 {
12826 /* If return value of __stpncpy_chk is ignored,
12827 optimize into __strncpy_chk. */
12828 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
12829 if (fn)
12830 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12831 }
12832
12833 if (! host_integerp (size, 1))
12834 return NULL_TREE;
12835
12836 if (! integer_all_onesp (size))
12837 {
12838 if (! host_integerp (len, 1))
12839 {
12840 /* If LEN is not constant, try MAXLEN too.
12841 For MAXLEN only allow optimizing into non-_ocs function
12842 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12843 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12844 return NULL_TREE;
12845 }
12846 else
12847 maxlen = len;
12848
12849 if (tree_int_cst_lt (size, maxlen))
12850 return NULL_TREE;
12851 }
12852
12853 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
12854 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
12855 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
12856 if (!fn)
12857 return NULL_TREE;
12858
12859 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12860 }
12861
12862 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12863 are the arguments to the call. */
12864
12865 static tree
12866 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12867 tree src, tree size)
12868 {
12869 tree fn;
12870 const char *p;
12871
12872 if (!validate_arg (dest, POINTER_TYPE)
12873 || !validate_arg (src, POINTER_TYPE)
12874 || !validate_arg (size, INTEGER_TYPE))
12875 return NULL_TREE;
12876
12877 p = c_getstr (src);
12878 /* If the SRC parameter is "", return DEST. */
12879 if (p && *p == '\0')
12880 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12881
12882 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12883 return NULL_TREE;
12884
12885 /* If __builtin_strcat_chk is used, assume strcat is available. */
12886 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
12887 if (!fn)
12888 return NULL_TREE;
12889
12890 return build_call_expr_loc (loc, fn, 2, dest, src);
12891 }
12892
12893 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12894 LEN, and SIZE. */
12895
12896 static tree
12897 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12898 tree dest, tree src, tree len, tree size)
12899 {
12900 tree fn;
12901 const char *p;
12902
12903 if (!validate_arg (dest, POINTER_TYPE)
12904 || !validate_arg (src, POINTER_TYPE)
12905 || !validate_arg (size, INTEGER_TYPE)
12906 || !validate_arg (size, INTEGER_TYPE))
12907 return NULL_TREE;
12908
12909 p = c_getstr (src);
12910 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12911 if (p && *p == '\0')
12912 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12913 else if (integer_zerop (len))
12914 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12915
12916 if (! host_integerp (size, 1))
12917 return NULL_TREE;
12918
12919 if (! integer_all_onesp (size))
12920 {
12921 tree src_len = c_strlen (src, 1);
12922 if (src_len
12923 && host_integerp (src_len, 1)
12924 && host_integerp (len, 1)
12925 && ! tree_int_cst_lt (len, src_len))
12926 {
12927 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12928 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
12929 if (!fn)
12930 return NULL_TREE;
12931
12932 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12933 }
12934 return NULL_TREE;
12935 }
12936
12937 /* If __builtin_strncat_chk is used, assume strncat is available. */
12938 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
12939 if (!fn)
12940 return NULL_TREE;
12941
12942 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12943 }
12944
12945 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
12946 Return NULL_TREE if a normal call should be emitted rather than
12947 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
12948 or BUILT_IN_VSPRINTF_CHK. */
12949
12950 static tree
12951 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
12952 enum built_in_function fcode)
12953 {
12954 tree dest, size, len, fn, fmt, flag;
12955 const char *fmt_str;
12956
12957 /* Verify the required arguments in the original call. */
12958 if (nargs < 4)
12959 return NULL_TREE;
12960 dest = args[0];
12961 if (!validate_arg (dest, POINTER_TYPE))
12962 return NULL_TREE;
12963 flag = args[1];
12964 if (!validate_arg (flag, INTEGER_TYPE))
12965 return NULL_TREE;
12966 size = args[2];
12967 if (!validate_arg (size, INTEGER_TYPE))
12968 return NULL_TREE;
12969 fmt = args[3];
12970 if (!validate_arg (fmt, POINTER_TYPE))
12971 return NULL_TREE;
12972
12973 if (! host_integerp (size, 1))
12974 return NULL_TREE;
12975
12976 len = NULL_TREE;
12977
12978 if (!init_target_chars ())
12979 return NULL_TREE;
12980
12981 /* Check whether the format is a literal string constant. */
12982 fmt_str = c_getstr (fmt);
12983 if (fmt_str != NULL)
12984 {
12985 /* If the format doesn't contain % args or %%, we know the size. */
12986 if (strchr (fmt_str, target_percent) == 0)
12987 {
12988 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12989 len = build_int_cstu (size_type_node, strlen (fmt_str));
12990 }
12991 /* If the format is "%s" and first ... argument is a string literal,
12992 we know the size too. */
12993 else if (fcode == BUILT_IN_SPRINTF_CHK
12994 && strcmp (fmt_str, target_percent_s) == 0)
12995 {
12996 tree arg;
12997
12998 if (nargs == 5)
12999 {
13000 arg = args[4];
13001 if (validate_arg (arg, POINTER_TYPE))
13002 {
13003 len = c_strlen (arg, 1);
13004 if (! len || ! host_integerp (len, 1))
13005 len = NULL_TREE;
13006 }
13007 }
13008 }
13009 }
13010
13011 if (! integer_all_onesp (size))
13012 {
13013 if (! len || ! tree_int_cst_lt (len, size))
13014 return NULL_TREE;
13015 }
13016
13017 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13018 or if format doesn't contain % chars or is "%s". */
13019 if (! integer_zerop (flag))
13020 {
13021 if (fmt_str == NULL)
13022 return NULL_TREE;
13023 if (strchr (fmt_str, target_percent) != NULL
13024 && strcmp (fmt_str, target_percent_s))
13025 return NULL_TREE;
13026 }
13027
13028 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13029 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
13030 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
13031 if (!fn)
13032 return NULL_TREE;
13033
13034 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
13035 }
13036
13037 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13038 a normal call should be emitted rather than expanding the function
13039 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13040
13041 static tree
13042 fold_builtin_sprintf_chk (location_t loc, tree exp,
13043 enum built_in_function fcode)
13044 {
13045 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
13046 CALL_EXPR_ARGP (exp), fcode);
13047 }
13048
13049 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13050 NULL_TREE if a normal call should be emitted rather than expanding
13051 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13052 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13053 passed as second argument. */
13054
13055 static tree
13056 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
13057 tree maxlen, enum built_in_function fcode)
13058 {
13059 tree dest, size, len, fn, fmt, flag;
13060 const char *fmt_str;
13061
13062 /* Verify the required arguments in the original call. */
13063 if (nargs < 5)
13064 return NULL_TREE;
13065 dest = args[0];
13066 if (!validate_arg (dest, POINTER_TYPE))
13067 return NULL_TREE;
13068 len = args[1];
13069 if (!validate_arg (len, INTEGER_TYPE))
13070 return NULL_TREE;
13071 flag = args[2];
13072 if (!validate_arg (flag, INTEGER_TYPE))
13073 return NULL_TREE;
13074 size = args[3];
13075 if (!validate_arg (size, INTEGER_TYPE))
13076 return NULL_TREE;
13077 fmt = args[4];
13078 if (!validate_arg (fmt, POINTER_TYPE))
13079 return NULL_TREE;
13080
13081 if (! host_integerp (size, 1))
13082 return NULL_TREE;
13083
13084 if (! integer_all_onesp (size))
13085 {
13086 if (! host_integerp (len, 1))
13087 {
13088 /* If LEN is not constant, try MAXLEN too.
13089 For MAXLEN only allow optimizing into non-_ocs function
13090 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13091 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13092 return NULL_TREE;
13093 }
13094 else
13095 maxlen = len;
13096
13097 if (tree_int_cst_lt (size, maxlen))
13098 return NULL_TREE;
13099 }
13100
13101 if (!init_target_chars ())
13102 return NULL_TREE;
13103
13104 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13105 or if format doesn't contain % chars or is "%s". */
13106 if (! integer_zerop (flag))
13107 {
13108 fmt_str = c_getstr (fmt);
13109 if (fmt_str == NULL)
13110 return NULL_TREE;
13111 if (strchr (fmt_str, target_percent) != NULL
13112 && strcmp (fmt_str, target_percent_s))
13113 return NULL_TREE;
13114 }
13115
13116 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13117 available. */
13118 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
13119 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
13120 if (!fn)
13121 return NULL_TREE;
13122
13123 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
13124 }
13125
13126 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13127 a normal call should be emitted rather than expanding the function
13128 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13129 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13130 passed as second argument. */
13131
13132 tree
13133 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
13134 enum built_in_function fcode)
13135 {
13136 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
13137 CALL_EXPR_ARGP (exp), maxlen, fcode);
13138 }
13139
13140 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13141 FMT and ARG are the arguments to the call; we don't fold cases with
13142 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13143
13144 Return NULL_TREE if no simplification was possible, otherwise return the
13145 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13146 code of the function to be simplified. */
13147
13148 static tree
13149 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13150 tree arg, bool ignore,
13151 enum built_in_function fcode)
13152 {
13153 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13154 const char *fmt_str = NULL;
13155
13156 /* If the return value is used, don't do the transformation. */
13157 if (! ignore)
13158 return NULL_TREE;
13159
13160 /* Verify the required arguments in the original call. */
13161 if (!validate_arg (fmt, POINTER_TYPE))
13162 return NULL_TREE;
13163
13164 /* Check whether the format is a literal string constant. */
13165 fmt_str = c_getstr (fmt);
13166 if (fmt_str == NULL)
13167 return NULL_TREE;
13168
13169 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13170 {
13171 /* If we're using an unlocked function, assume the other
13172 unlocked functions exist explicitly. */
13173 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
13174 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
13175 }
13176 else
13177 {
13178 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
13179 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
13180 }
13181
13182 if (!init_target_chars ())
13183 return NULL_TREE;
13184
13185 if (strcmp (fmt_str, target_percent_s) == 0
13186 || strchr (fmt_str, target_percent) == NULL)
13187 {
13188 const char *str;
13189
13190 if (strcmp (fmt_str, target_percent_s) == 0)
13191 {
13192 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13193 return NULL_TREE;
13194
13195 if (!arg || !validate_arg (arg, POINTER_TYPE))
13196 return NULL_TREE;
13197
13198 str = c_getstr (arg);
13199 if (str == NULL)
13200 return NULL_TREE;
13201 }
13202 else
13203 {
13204 /* The format specifier doesn't contain any '%' characters. */
13205 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13206 && arg)
13207 return NULL_TREE;
13208 str = fmt_str;
13209 }
13210
13211 /* If the string was "", printf does nothing. */
13212 if (str[0] == '\0')
13213 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13214
13215 /* If the string has length of 1, call putchar. */
13216 if (str[1] == '\0')
13217 {
13218 /* Given printf("c"), (where c is any one character,)
13219 convert "c"[0] to an int and pass that to the replacement
13220 function. */
13221 newarg = build_int_cst (integer_type_node, str[0]);
13222 if (fn_putchar)
13223 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13224 }
13225 else
13226 {
13227 /* If the string was "string\n", call puts("string"). */
13228 size_t len = strlen (str);
13229 if ((unsigned char)str[len - 1] == target_newline
13230 && (size_t) (int) len == len
13231 && (int) len > 0)
13232 {
13233 char *newstr;
13234 tree offset_node, string_cst;
13235
13236 /* Create a NUL-terminated string that's one char shorter
13237 than the original, stripping off the trailing '\n'. */
13238 newarg = build_string_literal (len, str);
13239 string_cst = string_constant (newarg, &offset_node);
13240 gcc_checking_assert (string_cst
13241 && (TREE_STRING_LENGTH (string_cst)
13242 == (int) len)
13243 && integer_zerop (offset_node)
13244 && (unsigned char)
13245 TREE_STRING_POINTER (string_cst)[len - 1]
13246 == target_newline);
13247 /* build_string_literal creates a new STRING_CST,
13248 modify it in place to avoid double copying. */
13249 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
13250 newstr[len - 1] = '\0';
13251 if (fn_puts)
13252 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13253 }
13254 else
13255 /* We'd like to arrange to call fputs(string,stdout) here,
13256 but we need stdout and don't have a way to get it yet. */
13257 return NULL_TREE;
13258 }
13259 }
13260
13261 /* The other optimizations can be done only on the non-va_list variants. */
13262 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13263 return NULL_TREE;
13264
13265 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13266 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13267 {
13268 if (!arg || !validate_arg (arg, POINTER_TYPE))
13269 return NULL_TREE;
13270 if (fn_puts)
13271 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13272 }
13273
13274 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13275 else if (strcmp (fmt_str, target_percent_c) == 0)
13276 {
13277 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13278 return NULL_TREE;
13279 if (fn_putchar)
13280 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13281 }
13282
13283 if (!call)
13284 return NULL_TREE;
13285
13286 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13287 }
13288
13289 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13290 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13291 more than 3 arguments, and ARG may be null in the 2-argument case.
13292
13293 Return NULL_TREE if no simplification was possible, otherwise return the
13294 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13295 code of the function to be simplified. */
13296
13297 static tree
13298 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13299 tree fmt, tree arg, bool ignore,
13300 enum built_in_function fcode)
13301 {
13302 tree fn_fputc, fn_fputs, call = NULL_TREE;
13303 const char *fmt_str = NULL;
13304
13305 /* If the return value is used, don't do the transformation. */
13306 if (! ignore)
13307 return NULL_TREE;
13308
13309 /* Verify the required arguments in the original call. */
13310 if (!validate_arg (fp, POINTER_TYPE))
13311 return NULL_TREE;
13312 if (!validate_arg (fmt, POINTER_TYPE))
13313 return NULL_TREE;
13314
13315 /* Check whether the format is a literal string constant. */
13316 fmt_str = c_getstr (fmt);
13317 if (fmt_str == NULL)
13318 return NULL_TREE;
13319
13320 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13321 {
13322 /* If we're using an unlocked function, assume the other
13323 unlocked functions exist explicitly. */
13324 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
13325 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
13326 }
13327 else
13328 {
13329 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
13330 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
13331 }
13332
13333 if (!init_target_chars ())
13334 return NULL_TREE;
13335
13336 /* If the format doesn't contain % args or %%, use strcpy. */
13337 if (strchr (fmt_str, target_percent) == NULL)
13338 {
13339 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13340 && arg)
13341 return NULL_TREE;
13342
13343 /* If the format specifier was "", fprintf does nothing. */
13344 if (fmt_str[0] == '\0')
13345 {
13346 /* If FP has side-effects, just wait until gimplification is
13347 done. */
13348 if (TREE_SIDE_EFFECTS (fp))
13349 return NULL_TREE;
13350
13351 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13352 }
13353
13354 /* When "string" doesn't contain %, replace all cases of
13355 fprintf (fp, string) with fputs (string, fp). The fputs
13356 builtin will take care of special cases like length == 1. */
13357 if (fn_fputs)
13358 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13359 }
13360
13361 /* The other optimizations can be done only on the non-va_list variants. */
13362 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13363 return NULL_TREE;
13364
13365 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13366 else if (strcmp (fmt_str, target_percent_s) == 0)
13367 {
13368 if (!arg || !validate_arg (arg, POINTER_TYPE))
13369 return NULL_TREE;
13370 if (fn_fputs)
13371 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13372 }
13373
13374 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13375 else if (strcmp (fmt_str, target_percent_c) == 0)
13376 {
13377 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13378 return NULL_TREE;
13379 if (fn_fputc)
13380 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13381 }
13382
13383 if (!call)
13384 return NULL_TREE;
13385 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13386 }
13387
13388 /* Initialize format string characters in the target charset. */
13389
13390 static bool
13391 init_target_chars (void)
13392 {
13393 static bool init;
13394 if (!init)
13395 {
13396 target_newline = lang_hooks.to_target_charset ('\n');
13397 target_percent = lang_hooks.to_target_charset ('%');
13398 target_c = lang_hooks.to_target_charset ('c');
13399 target_s = lang_hooks.to_target_charset ('s');
13400 if (target_newline == 0 || target_percent == 0 || target_c == 0
13401 || target_s == 0)
13402 return false;
13403
13404 target_percent_c[0] = target_percent;
13405 target_percent_c[1] = target_c;
13406 target_percent_c[2] = '\0';
13407
13408 target_percent_s[0] = target_percent;
13409 target_percent_s[1] = target_s;
13410 target_percent_s[2] = '\0';
13411
13412 target_percent_s_newline[0] = target_percent;
13413 target_percent_s_newline[1] = target_s;
13414 target_percent_s_newline[2] = target_newline;
13415 target_percent_s_newline[3] = '\0';
13416
13417 init = true;
13418 }
13419 return true;
13420 }
13421
13422 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13423 and no overflow/underflow occurred. INEXACT is true if M was not
13424 exactly calculated. TYPE is the tree type for the result. This
13425 function assumes that you cleared the MPFR flags and then
13426 calculated M to see if anything subsequently set a flag prior to
13427 entering this function. Return NULL_TREE if any checks fail. */
13428
13429 static tree
13430 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13431 {
13432 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13433 overflow/underflow occurred. If -frounding-math, proceed iff the
13434 result of calling FUNC was exact. */
13435 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13436 && (!flag_rounding_math || !inexact))
13437 {
13438 REAL_VALUE_TYPE rr;
13439
13440 real_from_mpfr (&rr, m, type, GMP_RNDN);
13441 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13442 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13443 but the mpft_t is not, then we underflowed in the
13444 conversion. */
13445 if (real_isfinite (&rr)
13446 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13447 {
13448 REAL_VALUE_TYPE rmode;
13449
13450 real_convert (&rmode, TYPE_MODE (type), &rr);
13451 /* Proceed iff the specified mode can hold the value. */
13452 if (real_identical (&rmode, &rr))
13453 return build_real (type, rmode);
13454 }
13455 }
13456 return NULL_TREE;
13457 }
13458
13459 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13460 number and no overflow/underflow occurred. INEXACT is true if M
13461 was not exactly calculated. TYPE is the tree type for the result.
13462 This function assumes that you cleared the MPFR flags and then
13463 calculated M to see if anything subsequently set a flag prior to
13464 entering this function. Return NULL_TREE if any checks fail, if
13465 FORCE_CONVERT is true, then bypass the checks. */
13466
13467 static tree
13468 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13469 {
13470 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13471 overflow/underflow occurred. If -frounding-math, proceed iff the
13472 result of calling FUNC was exact. */
13473 if (force_convert
13474 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13475 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13476 && (!flag_rounding_math || !inexact)))
13477 {
13478 REAL_VALUE_TYPE re, im;
13479
13480 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13481 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13482 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13483 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13484 but the mpft_t is not, then we underflowed in the
13485 conversion. */
13486 if (force_convert
13487 || (real_isfinite (&re) && real_isfinite (&im)
13488 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13489 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13490 {
13491 REAL_VALUE_TYPE re_mode, im_mode;
13492
13493 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13494 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13495 /* Proceed iff the specified mode can hold the value. */
13496 if (force_convert
13497 || (real_identical (&re_mode, &re)
13498 && real_identical (&im_mode, &im)))
13499 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13500 build_real (TREE_TYPE (type), im_mode));
13501 }
13502 }
13503 return NULL_TREE;
13504 }
13505
13506 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13507 FUNC on it and return the resulting value as a tree with type TYPE.
13508 If MIN and/or MAX are not NULL, then the supplied ARG must be
13509 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13510 acceptable values, otherwise they are not. The mpfr precision is
13511 set to the precision of TYPE. We assume that function FUNC returns
13512 zero if the result could be calculated exactly within the requested
13513 precision. */
13514
13515 static tree
13516 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13517 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13518 bool inclusive)
13519 {
13520 tree result = NULL_TREE;
13521
13522 STRIP_NOPS (arg);
13523
13524 /* To proceed, MPFR must exactly represent the target floating point
13525 format, which only happens when the target base equals two. */
13526 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13527 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13528 {
13529 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13530
13531 if (real_isfinite (ra)
13532 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13533 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13534 {
13535 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13536 const int prec = fmt->p;
13537 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13538 int inexact;
13539 mpfr_t m;
13540
13541 mpfr_init2 (m, prec);
13542 mpfr_from_real (m, ra, GMP_RNDN);
13543 mpfr_clear_flags ();
13544 inexact = func (m, m, rnd);
13545 result = do_mpfr_ckconv (m, type, inexact);
13546 mpfr_clear (m);
13547 }
13548 }
13549
13550 return result;
13551 }
13552
13553 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13554 FUNC on it and return the resulting value as a tree with type TYPE.
13555 The mpfr precision is set to the precision of TYPE. We assume that
13556 function FUNC returns zero if the result could be calculated
13557 exactly within the requested precision. */
13558
13559 static tree
13560 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13561 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13562 {
13563 tree result = NULL_TREE;
13564
13565 STRIP_NOPS (arg1);
13566 STRIP_NOPS (arg2);
13567
13568 /* To proceed, MPFR must exactly represent the target floating point
13569 format, which only happens when the target base equals two. */
13570 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13571 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13572 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13573 {
13574 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13575 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13576
13577 if (real_isfinite (ra1) && real_isfinite (ra2))
13578 {
13579 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13580 const int prec = fmt->p;
13581 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13582 int inexact;
13583 mpfr_t m1, m2;
13584
13585 mpfr_inits2 (prec, m1, m2, NULL);
13586 mpfr_from_real (m1, ra1, GMP_RNDN);
13587 mpfr_from_real (m2, ra2, GMP_RNDN);
13588 mpfr_clear_flags ();
13589 inexact = func (m1, m1, m2, rnd);
13590 result = do_mpfr_ckconv (m1, type, inexact);
13591 mpfr_clears (m1, m2, NULL);
13592 }
13593 }
13594
13595 return result;
13596 }
13597
13598 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13599 FUNC on it and return the resulting value as a tree with type TYPE.
13600 The mpfr precision is set to the precision of TYPE. We assume that
13601 function FUNC returns zero if the result could be calculated
13602 exactly within the requested precision. */
13603
13604 static tree
13605 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13606 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13607 {
13608 tree result = NULL_TREE;
13609
13610 STRIP_NOPS (arg1);
13611 STRIP_NOPS (arg2);
13612 STRIP_NOPS (arg3);
13613
13614 /* To proceed, MPFR must exactly represent the target floating point
13615 format, which only happens when the target base equals two. */
13616 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13617 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13618 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13619 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13620 {
13621 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13622 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13623 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13624
13625 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13626 {
13627 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13628 const int prec = fmt->p;
13629 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13630 int inexact;
13631 mpfr_t m1, m2, m3;
13632
13633 mpfr_inits2 (prec, m1, m2, m3, NULL);
13634 mpfr_from_real (m1, ra1, GMP_RNDN);
13635 mpfr_from_real (m2, ra2, GMP_RNDN);
13636 mpfr_from_real (m3, ra3, GMP_RNDN);
13637 mpfr_clear_flags ();
13638 inexact = func (m1, m1, m2, m3, rnd);
13639 result = do_mpfr_ckconv (m1, type, inexact);
13640 mpfr_clears (m1, m2, m3, NULL);
13641 }
13642 }
13643
13644 return result;
13645 }
13646
13647 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13648 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13649 If ARG_SINP and ARG_COSP are NULL then the result is returned
13650 as a complex value.
13651 The type is taken from the type of ARG and is used for setting the
13652 precision of the calculation and results. */
13653
13654 static tree
13655 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13656 {
13657 tree const type = TREE_TYPE (arg);
13658 tree result = NULL_TREE;
13659
13660 STRIP_NOPS (arg);
13661
13662 /* To proceed, MPFR must exactly represent the target floating point
13663 format, which only happens when the target base equals two. */
13664 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13665 && TREE_CODE (arg) == REAL_CST
13666 && !TREE_OVERFLOW (arg))
13667 {
13668 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13669
13670 if (real_isfinite (ra))
13671 {
13672 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13673 const int prec = fmt->p;
13674 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13675 tree result_s, result_c;
13676 int inexact;
13677 mpfr_t m, ms, mc;
13678
13679 mpfr_inits2 (prec, m, ms, mc, NULL);
13680 mpfr_from_real (m, ra, GMP_RNDN);
13681 mpfr_clear_flags ();
13682 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13683 result_s = do_mpfr_ckconv (ms, type, inexact);
13684 result_c = do_mpfr_ckconv (mc, type, inexact);
13685 mpfr_clears (m, ms, mc, NULL);
13686 if (result_s && result_c)
13687 {
13688 /* If we are to return in a complex value do so. */
13689 if (!arg_sinp && !arg_cosp)
13690 return build_complex (build_complex_type (type),
13691 result_c, result_s);
13692
13693 /* Dereference the sin/cos pointer arguments. */
13694 arg_sinp = build_fold_indirect_ref (arg_sinp);
13695 arg_cosp = build_fold_indirect_ref (arg_cosp);
13696 /* Proceed if valid pointer type were passed in. */
13697 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13698 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13699 {
13700 /* Set the values. */
13701 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13702 result_s);
13703 TREE_SIDE_EFFECTS (result_s) = 1;
13704 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13705 result_c);
13706 TREE_SIDE_EFFECTS (result_c) = 1;
13707 /* Combine the assignments into a compound expr. */
13708 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13709 result_s, result_c));
13710 }
13711 }
13712 }
13713 }
13714 return result;
13715 }
13716
13717 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13718 two-argument mpfr order N Bessel function FUNC on them and return
13719 the resulting value as a tree with type TYPE. The mpfr precision
13720 is set to the precision of TYPE. We assume that function FUNC
13721 returns zero if the result could be calculated exactly within the
13722 requested precision. */
13723 static tree
13724 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13725 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13726 const REAL_VALUE_TYPE *min, bool inclusive)
13727 {
13728 tree result = NULL_TREE;
13729
13730 STRIP_NOPS (arg1);
13731 STRIP_NOPS (arg2);
13732
13733 /* To proceed, MPFR must exactly represent the target floating point
13734 format, which only happens when the target base equals two. */
13735 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13736 && host_integerp (arg1, 0)
13737 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13738 {
13739 const HOST_WIDE_INT n = tree_low_cst (arg1, 0);
13740 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13741
13742 if (n == (long)n
13743 && real_isfinite (ra)
13744 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13745 {
13746 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13747 const int prec = fmt->p;
13748 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13749 int inexact;
13750 mpfr_t m;
13751
13752 mpfr_init2 (m, prec);
13753 mpfr_from_real (m, ra, GMP_RNDN);
13754 mpfr_clear_flags ();
13755 inexact = func (m, n, m, rnd);
13756 result = do_mpfr_ckconv (m, type, inexact);
13757 mpfr_clear (m);
13758 }
13759 }
13760
13761 return result;
13762 }
13763
13764 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13765 the pointer *(ARG_QUO) and return the result. The type is taken
13766 from the type of ARG0 and is used for setting the precision of the
13767 calculation and results. */
13768
13769 static tree
13770 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13771 {
13772 tree const type = TREE_TYPE (arg0);
13773 tree result = NULL_TREE;
13774
13775 STRIP_NOPS (arg0);
13776 STRIP_NOPS (arg1);
13777
13778 /* To proceed, MPFR must exactly represent the target floating point
13779 format, which only happens when the target base equals two. */
13780 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13781 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13782 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13783 {
13784 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13785 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13786
13787 if (real_isfinite (ra0) && real_isfinite (ra1))
13788 {
13789 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13790 const int prec = fmt->p;
13791 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13792 tree result_rem;
13793 long integer_quo;
13794 mpfr_t m0, m1;
13795
13796 mpfr_inits2 (prec, m0, m1, NULL);
13797 mpfr_from_real (m0, ra0, GMP_RNDN);
13798 mpfr_from_real (m1, ra1, GMP_RNDN);
13799 mpfr_clear_flags ();
13800 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13801 /* Remquo is independent of the rounding mode, so pass
13802 inexact=0 to do_mpfr_ckconv(). */
13803 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13804 mpfr_clears (m0, m1, NULL);
13805 if (result_rem)
13806 {
13807 /* MPFR calculates quo in the host's long so it may
13808 return more bits in quo than the target int can hold
13809 if sizeof(host long) > sizeof(target int). This can
13810 happen even for native compilers in LP64 mode. In
13811 these cases, modulo the quo value with the largest
13812 number that the target int can hold while leaving one
13813 bit for the sign. */
13814 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13815 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13816
13817 /* Dereference the quo pointer argument. */
13818 arg_quo = build_fold_indirect_ref (arg_quo);
13819 /* Proceed iff a valid pointer type was passed in. */
13820 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13821 {
13822 /* Set the value. */
13823 tree result_quo
13824 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13825 build_int_cst (TREE_TYPE (arg_quo),
13826 integer_quo));
13827 TREE_SIDE_EFFECTS (result_quo) = 1;
13828 /* Combine the quo assignment with the rem. */
13829 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13830 result_quo, result_rem));
13831 }
13832 }
13833 }
13834 }
13835 return result;
13836 }
13837
13838 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13839 resulting value as a tree with type TYPE. The mpfr precision is
13840 set to the precision of TYPE. We assume that this mpfr function
13841 returns zero if the result could be calculated exactly within the
13842 requested precision. In addition, the integer pointer represented
13843 by ARG_SG will be dereferenced and set to the appropriate signgam
13844 (-1,1) value. */
13845
13846 static tree
13847 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13848 {
13849 tree result = NULL_TREE;
13850
13851 STRIP_NOPS (arg);
13852
13853 /* To proceed, MPFR must exactly represent the target floating point
13854 format, which only happens when the target base equals two. Also
13855 verify ARG is a constant and that ARG_SG is an int pointer. */
13856 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13857 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13858 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13859 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13860 {
13861 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13862
13863 /* In addition to NaN and Inf, the argument cannot be zero or a
13864 negative integer. */
13865 if (real_isfinite (ra)
13866 && ra->cl != rvc_zero
13867 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
13868 {
13869 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13870 const int prec = fmt->p;
13871 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13872 int inexact, sg;
13873 mpfr_t m;
13874 tree result_lg;
13875
13876 mpfr_init2 (m, prec);
13877 mpfr_from_real (m, ra, GMP_RNDN);
13878 mpfr_clear_flags ();
13879 inexact = mpfr_lgamma (m, &sg, m, rnd);
13880 result_lg = do_mpfr_ckconv (m, type, inexact);
13881 mpfr_clear (m);
13882 if (result_lg)
13883 {
13884 tree result_sg;
13885
13886 /* Dereference the arg_sg pointer argument. */
13887 arg_sg = build_fold_indirect_ref (arg_sg);
13888 /* Assign the signgam value into *arg_sg. */
13889 result_sg = fold_build2 (MODIFY_EXPR,
13890 TREE_TYPE (arg_sg), arg_sg,
13891 build_int_cst (TREE_TYPE (arg_sg), sg));
13892 TREE_SIDE_EFFECTS (result_sg) = 1;
13893 /* Combine the signgam assignment with the lgamma result. */
13894 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13895 result_sg, result_lg));
13896 }
13897 }
13898 }
13899
13900 return result;
13901 }
13902
13903 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13904 function FUNC on it and return the resulting value as a tree with
13905 type TYPE. The mpfr precision is set to the precision of TYPE. We
13906 assume that function FUNC returns zero if the result could be
13907 calculated exactly within the requested precision. */
13908
13909 static tree
13910 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13911 {
13912 tree result = NULL_TREE;
13913
13914 STRIP_NOPS (arg);
13915
13916 /* To proceed, MPFR must exactly represent the target floating point
13917 format, which only happens when the target base equals two. */
13918 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13919 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13920 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13921 {
13922 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13923 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13924
13925 if (real_isfinite (re) && real_isfinite (im))
13926 {
13927 const struct real_format *const fmt =
13928 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13929 const int prec = fmt->p;
13930 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13931 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13932 int inexact;
13933 mpc_t m;
13934
13935 mpc_init2 (m, prec);
13936 mpfr_from_real (mpc_realref (m), re, rnd);
13937 mpfr_from_real (mpc_imagref (m), im, rnd);
13938 mpfr_clear_flags ();
13939 inexact = func (m, m, crnd);
13940 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13941 mpc_clear (m);
13942 }
13943 }
13944
13945 return result;
13946 }
13947
13948 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13949 mpc function FUNC on it and return the resulting value as a tree
13950 with type TYPE. The mpfr precision is set to the precision of
13951 TYPE. We assume that function FUNC returns zero if the result
13952 could be calculated exactly within the requested precision. If
13953 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13954 in the arguments and/or results. */
13955
13956 tree
13957 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13958 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13959 {
13960 tree result = NULL_TREE;
13961
13962 STRIP_NOPS (arg0);
13963 STRIP_NOPS (arg1);
13964
13965 /* To proceed, MPFR must exactly represent the target floating point
13966 format, which only happens when the target base equals two. */
13967 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13968 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13969 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13970 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13971 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13972 {
13973 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13974 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13975 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13976 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13977
13978 if (do_nonfinite
13979 || (real_isfinite (re0) && real_isfinite (im0)
13980 && real_isfinite (re1) && real_isfinite (im1)))
13981 {
13982 const struct real_format *const fmt =
13983 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13984 const int prec = fmt->p;
13985 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13986 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13987 int inexact;
13988 mpc_t m0, m1;
13989
13990 mpc_init2 (m0, prec);
13991 mpc_init2 (m1, prec);
13992 mpfr_from_real (mpc_realref (m0), re0, rnd);
13993 mpfr_from_real (mpc_imagref (m0), im0, rnd);
13994 mpfr_from_real (mpc_realref (m1), re1, rnd);
13995 mpfr_from_real (mpc_imagref (m1), im1, rnd);
13996 mpfr_clear_flags ();
13997 inexact = func (m0, m0, m1, crnd);
13998 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13999 mpc_clear (m0);
14000 mpc_clear (m1);
14001 }
14002 }
14003
14004 return result;
14005 }
14006
14007 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14008 a normal call should be emitted rather than expanding the function
14009 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14010
14011 static tree
14012 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
14013 {
14014 int nargs = gimple_call_num_args (stmt);
14015
14016 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
14017 (nargs > 0
14018 ? gimple_call_arg_ptr (stmt, 0)
14019 : &error_mark_node), fcode);
14020 }
14021
14022 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14023 a normal call should be emitted rather than expanding the function
14024 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14025 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14026 passed as second argument. */
14027
14028 tree
14029 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14030 enum built_in_function fcode)
14031 {
14032 int nargs = gimple_call_num_args (stmt);
14033
14034 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
14035 (nargs > 0
14036 ? gimple_call_arg_ptr (stmt, 0)
14037 : &error_mark_node), maxlen, fcode);
14038 }
14039
14040 /* Builtins with folding operations that operate on "..." arguments
14041 need special handling; we need to store the arguments in a convenient
14042 data structure before attempting any folding. Fortunately there are
14043 only a few builtins that fall into this category. FNDECL is the
14044 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14045 result of the function call is ignored. */
14046
14047 static tree
14048 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14049 bool ignore ATTRIBUTE_UNUSED)
14050 {
14051 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14052 tree ret = NULL_TREE;
14053
14054 switch (fcode)
14055 {
14056 case BUILT_IN_SPRINTF_CHK:
14057 case BUILT_IN_VSPRINTF_CHK:
14058 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14059 break;
14060
14061 case BUILT_IN_SNPRINTF_CHK:
14062 case BUILT_IN_VSNPRINTF_CHK:
14063 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14064
14065 default:
14066 break;
14067 }
14068 if (ret)
14069 {
14070 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14071 TREE_NO_WARNING (ret) = 1;
14072 return ret;
14073 }
14074 return NULL_TREE;
14075 }
14076
14077 /* A wrapper function for builtin folding that prevents warnings for
14078 "statement without effect" and the like, caused by removing the
14079 call node earlier than the warning is generated. */
14080
14081 tree
14082 fold_call_stmt (gimple stmt, bool ignore)
14083 {
14084 tree ret = NULL_TREE;
14085 tree fndecl = gimple_call_fndecl (stmt);
14086 location_t loc = gimple_location (stmt);
14087 if (fndecl
14088 && TREE_CODE (fndecl) == FUNCTION_DECL
14089 && DECL_BUILT_IN (fndecl)
14090 && !gimple_call_va_arg_pack_p (stmt))
14091 {
14092 int nargs = gimple_call_num_args (stmt);
14093 tree *args = (nargs > 0
14094 ? gimple_call_arg_ptr (stmt, 0)
14095 : &error_mark_node);
14096
14097 if (avoid_folding_inline_builtin (fndecl))
14098 return NULL_TREE;
14099 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14100 {
14101 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14102 }
14103 else
14104 {
14105 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14106 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14107 if (!ret)
14108 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14109 if (ret)
14110 {
14111 /* Propagate location information from original call to
14112 expansion of builtin. Otherwise things like
14113 maybe_emit_chk_warning, that operate on the expansion
14114 of a builtin, will use the wrong location information. */
14115 if (gimple_has_location (stmt))
14116 {
14117 tree realret = ret;
14118 if (TREE_CODE (ret) == NOP_EXPR)
14119 realret = TREE_OPERAND (ret, 0);
14120 if (CAN_HAVE_LOCATION_P (realret)
14121 && !EXPR_HAS_LOCATION (realret))
14122 SET_EXPR_LOCATION (realret, loc);
14123 return realret;
14124 }
14125 return ret;
14126 }
14127 }
14128 }
14129 return NULL_TREE;
14130 }
14131
14132 /* Look up the function in builtin_decl that corresponds to DECL
14133 and set ASMSPEC as its user assembler name. DECL must be a
14134 function decl that declares a builtin. */
14135
14136 void
14137 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14138 {
14139 tree builtin;
14140 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
14141 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
14142 && asmspec != 0);
14143
14144 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14145 set_user_assembler_name (builtin, asmspec);
14146 switch (DECL_FUNCTION_CODE (decl))
14147 {
14148 case BUILT_IN_MEMCPY:
14149 init_block_move_fn (asmspec);
14150 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
14151 break;
14152 case BUILT_IN_MEMSET:
14153 init_block_clear_fn (asmspec);
14154 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
14155 break;
14156 case BUILT_IN_MEMMOVE:
14157 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
14158 break;
14159 case BUILT_IN_MEMCMP:
14160 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
14161 break;
14162 case BUILT_IN_ABORT:
14163 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
14164 break;
14165 case BUILT_IN_FFS:
14166 if (INT_TYPE_SIZE < BITS_PER_WORD)
14167 {
14168 set_user_assembler_libfunc ("ffs", asmspec);
14169 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
14170 MODE_INT, 0), "ffs");
14171 }
14172 break;
14173 default:
14174 break;
14175 }
14176 }
14177
14178 /* Return true if DECL is a builtin that expands to a constant or similarly
14179 simple code. */
14180 bool
14181 is_simple_builtin (tree decl)
14182 {
14183 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14184 switch (DECL_FUNCTION_CODE (decl))
14185 {
14186 /* Builtins that expand to constants. */
14187 case BUILT_IN_CONSTANT_P:
14188 case BUILT_IN_EXPECT:
14189 case BUILT_IN_OBJECT_SIZE:
14190 case BUILT_IN_UNREACHABLE:
14191 /* Simple register moves or loads from stack. */
14192 case BUILT_IN_ASSUME_ALIGNED:
14193 case BUILT_IN_RETURN_ADDRESS:
14194 case BUILT_IN_EXTRACT_RETURN_ADDR:
14195 case BUILT_IN_FROB_RETURN_ADDR:
14196 case BUILT_IN_RETURN:
14197 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14198 case BUILT_IN_FRAME_ADDRESS:
14199 case BUILT_IN_VA_END:
14200 case BUILT_IN_STACK_SAVE:
14201 case BUILT_IN_STACK_RESTORE:
14202 /* Exception state returns or moves registers around. */
14203 case BUILT_IN_EH_FILTER:
14204 case BUILT_IN_EH_POINTER:
14205 case BUILT_IN_EH_COPY_VALUES:
14206 return true;
14207
14208 default:
14209 return false;
14210 }
14211
14212 return false;
14213 }
14214
14215 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14216 most probably expanded inline into reasonably simple code. This is a
14217 superset of is_simple_builtin. */
14218 bool
14219 is_inexpensive_builtin (tree decl)
14220 {
14221 if (!decl)
14222 return false;
14223 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14224 return true;
14225 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14226 switch (DECL_FUNCTION_CODE (decl))
14227 {
14228 case BUILT_IN_ABS:
14229 case BUILT_IN_ALLOCA:
14230 case BUILT_IN_ALLOCA_WITH_ALIGN:
14231 case BUILT_IN_BSWAP16:
14232 case BUILT_IN_BSWAP32:
14233 case BUILT_IN_BSWAP64:
14234 case BUILT_IN_CLZ:
14235 case BUILT_IN_CLZIMAX:
14236 case BUILT_IN_CLZL:
14237 case BUILT_IN_CLZLL:
14238 case BUILT_IN_CTZ:
14239 case BUILT_IN_CTZIMAX:
14240 case BUILT_IN_CTZL:
14241 case BUILT_IN_CTZLL:
14242 case BUILT_IN_FFS:
14243 case BUILT_IN_FFSIMAX:
14244 case BUILT_IN_FFSL:
14245 case BUILT_IN_FFSLL:
14246 case BUILT_IN_IMAXABS:
14247 case BUILT_IN_FINITE:
14248 case BUILT_IN_FINITEF:
14249 case BUILT_IN_FINITEL:
14250 case BUILT_IN_FINITED32:
14251 case BUILT_IN_FINITED64:
14252 case BUILT_IN_FINITED128:
14253 case BUILT_IN_FPCLASSIFY:
14254 case BUILT_IN_ISFINITE:
14255 case BUILT_IN_ISINF_SIGN:
14256 case BUILT_IN_ISINF:
14257 case BUILT_IN_ISINFF:
14258 case BUILT_IN_ISINFL:
14259 case BUILT_IN_ISINFD32:
14260 case BUILT_IN_ISINFD64:
14261 case BUILT_IN_ISINFD128:
14262 case BUILT_IN_ISNAN:
14263 case BUILT_IN_ISNANF:
14264 case BUILT_IN_ISNANL:
14265 case BUILT_IN_ISNAND32:
14266 case BUILT_IN_ISNAND64:
14267 case BUILT_IN_ISNAND128:
14268 case BUILT_IN_ISNORMAL:
14269 case BUILT_IN_ISGREATER:
14270 case BUILT_IN_ISGREATEREQUAL:
14271 case BUILT_IN_ISLESS:
14272 case BUILT_IN_ISLESSEQUAL:
14273 case BUILT_IN_ISLESSGREATER:
14274 case BUILT_IN_ISUNORDERED:
14275 case BUILT_IN_VA_ARG_PACK:
14276 case BUILT_IN_VA_ARG_PACK_LEN:
14277 case BUILT_IN_VA_COPY:
14278 case BUILT_IN_TRAP:
14279 case BUILT_IN_SAVEREGS:
14280 case BUILT_IN_POPCOUNTL:
14281 case BUILT_IN_POPCOUNTLL:
14282 case BUILT_IN_POPCOUNTIMAX:
14283 case BUILT_IN_POPCOUNT:
14284 case BUILT_IN_PARITYL:
14285 case BUILT_IN_PARITYLL:
14286 case BUILT_IN_PARITYIMAX:
14287 case BUILT_IN_PARITY:
14288 case BUILT_IN_LABS:
14289 case BUILT_IN_LLABS:
14290 case BUILT_IN_PREFETCH:
14291 return true;
14292
14293 default:
14294 return is_simple_builtin (decl);
14295 }
14296
14297 return false;
14298 }