re PR c++/29234 (Call to operator() of temporary object wrongly parsed)
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "realmpfr.h"
28 #include "gimple.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "expr.h"
36 #include "optabs.h"
37 #include "libfuncs.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "predict.h"
42 #include "tm_p.h"
43 #include "target.h"
44 #include "langhooks.h"
45 #include "basic-block.h"
46 #include "tree-ssanames.h"
47 #include "tree-dfa.h"
48 #include "value-prof.h"
49 #include "diagnostic-core.h"
50 #include "builtins.h"
51 #include "ubsan.h"
52 #include "cilk.h"
53
54
55 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
56
57 struct target_builtins default_target_builtins;
58 #if SWITCHABLE_TARGET
59 struct target_builtins *this_target_builtins = &default_target_builtins;
60 #endif
61
62 /* Define the names of the builtin function types and codes. */
63 const char *const built_in_class_names[BUILT_IN_LAST]
64 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
65
66 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
67 const char * built_in_names[(int) END_BUILTINS] =
68 {
69 #include "builtins.def"
70 };
71 #undef DEF_BUILTIN
72
73 /* Setup an array of _DECL trees, make sure each element is
74 initialized to NULL_TREE. */
75 builtin_info_type builtin_info;
76
77 /* Non-zero if __builtin_constant_p should be folded right away. */
78 bool force_folding_builtin_constant_p;
79
80 static const char *c_getstr (tree);
81 static rtx c_readstr (const char *, enum machine_mode);
82 static int target_char_cast (tree, char *);
83 static rtx get_memory_rtx (tree, tree);
84 static int apply_args_size (void);
85 static int apply_result_size (void);
86 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
87 static rtx result_vector (int, rtx);
88 #endif
89 static void expand_builtin_update_setjmp_buf (rtx);
90 static void expand_builtin_prefetch (tree);
91 static rtx expand_builtin_apply_args (void);
92 static rtx expand_builtin_apply_args_1 (void);
93 static rtx expand_builtin_apply (rtx, rtx, rtx);
94 static void expand_builtin_return (rtx);
95 static enum type_class type_to_class (tree);
96 static rtx expand_builtin_classify_type (tree);
97 static void expand_errno_check (tree, rtx);
98 static rtx expand_builtin_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
100 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
102 static rtx expand_builtin_interclass_mathfn (tree, rtx);
103 static rtx expand_builtin_sincos (tree);
104 static rtx expand_builtin_cexpi (tree, rtx);
105 static rtx expand_builtin_int_roundingfn (tree, rtx);
106 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
107 static rtx expand_builtin_next_arg (void);
108 static rtx expand_builtin_va_start (tree);
109 static rtx expand_builtin_va_end (tree);
110 static rtx expand_builtin_va_copy (tree);
111 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
112 static rtx expand_builtin_strcmp (tree, rtx);
113 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
114 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
115 static rtx expand_builtin_memcpy (tree, rtx);
116 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
118 enum machine_mode, int);
119 static rtx expand_builtin_strcpy (tree, rtx);
120 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
121 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_strncpy (tree, rtx);
123 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
124 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
126 static rtx expand_builtin_bzero (tree);
127 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_alloca (tree, bool);
129 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
130 static rtx expand_builtin_frame_address (tree, tree);
131 static tree stabilize_va_list_loc (location_t, tree, int);
132 static rtx expand_builtin_expect (tree, rtx);
133 static tree fold_builtin_constant_p (tree);
134 static tree fold_builtin_expect (location_t, tree, tree);
135 static tree fold_builtin_classify_type (tree);
136 static tree fold_builtin_strlen (location_t, tree, tree);
137 static tree fold_builtin_inf (location_t, tree, int);
138 static tree fold_builtin_nan (tree, tree, int);
139 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
140 static bool validate_arg (const_tree, enum tree_code code);
141 static bool integer_valued_real_p (tree);
142 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
143 static bool readonly_data_expr (tree);
144 static rtx expand_builtin_fabs (tree, rtx, rtx);
145 static rtx expand_builtin_signbit (tree, rtx);
146 static tree fold_builtin_sqrt (location_t, tree, tree);
147 static tree fold_builtin_cbrt (location_t, tree, tree);
148 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
149 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
150 static tree fold_builtin_cos (location_t, tree, tree, tree);
151 static tree fold_builtin_cosh (location_t, tree, tree, tree);
152 static tree fold_builtin_tan (tree, tree);
153 static tree fold_builtin_trunc (location_t, tree, tree);
154 static tree fold_builtin_floor (location_t, tree, tree);
155 static tree fold_builtin_ceil (location_t, tree, tree);
156 static tree fold_builtin_round (location_t, tree, tree);
157 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
158 static tree fold_builtin_bitop (tree, tree);
159 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
160 static tree fold_builtin_strchr (location_t, tree, tree, tree);
161 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
162 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
163 static tree fold_builtin_strcmp (location_t, tree, tree);
164 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
165 static tree fold_builtin_signbit (location_t, tree, tree);
166 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
167 static tree fold_builtin_isascii (location_t, tree);
168 static tree fold_builtin_toascii (location_t, tree);
169 static tree fold_builtin_isdigit (location_t, tree);
170 static tree fold_builtin_fabs (location_t, tree, tree);
171 static tree fold_builtin_abs (location_t, tree, tree);
172 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
173 enum tree_code);
174 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
175 static tree fold_builtin_0 (location_t, tree, bool);
176 static tree fold_builtin_1 (location_t, tree, tree, bool);
177 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
178 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
179 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
180 static tree fold_builtin_varargs (location_t, tree, tree, bool);
181
182 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
183 static tree fold_builtin_strstr (location_t, tree, tree, tree);
184 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
185 static tree fold_builtin_strcat (location_t, tree, tree);
186 static tree fold_builtin_strncat (location_t, tree, tree, tree);
187 static tree fold_builtin_strspn (location_t, tree, tree);
188 static tree fold_builtin_strcspn (location_t, tree, tree);
189 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
190 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
191
192 static rtx expand_builtin_object_size (tree);
193 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
194 enum built_in_function);
195 static void maybe_emit_chk_warning (tree, enum built_in_function);
196 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
197 static void maybe_emit_free_warning (tree);
198 static tree fold_builtin_object_size (tree, tree);
199 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
200 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
201 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
202 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
203 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
204 enum built_in_function);
205 static bool init_target_chars (void);
206
207 static unsigned HOST_WIDE_INT target_newline;
208 static unsigned HOST_WIDE_INT target_percent;
209 static unsigned HOST_WIDE_INT target_c;
210 static unsigned HOST_WIDE_INT target_s;
211 static char target_percent_c[3];
212 static char target_percent_s[3];
213 static char target_percent_s_newline[4];
214 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
215 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
216 static tree do_mpfr_arg2 (tree, tree, tree,
217 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
218 static tree do_mpfr_arg3 (tree, tree, tree, tree,
219 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
220 static tree do_mpfr_sincos (tree, tree, tree);
221 static tree do_mpfr_bessel_n (tree, tree, tree,
222 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
223 const REAL_VALUE_TYPE *, bool);
224 static tree do_mpfr_remquo (tree, tree, tree);
225 static tree do_mpfr_lgamma_r (tree, tree, tree);
226 static void expand_builtin_sync_synchronize (void);
227
228 /* Return true if NAME starts with __builtin_ or __sync_. */
229
230 static bool
231 is_builtin_name (const char *name)
232 {
233 if (strncmp (name, "__builtin_", 10) == 0)
234 return true;
235 if (strncmp (name, "__sync_", 7) == 0)
236 return true;
237 if (strncmp (name, "__atomic_", 9) == 0)
238 return true;
239 if (flag_enable_cilkplus
240 && (!strcmp (name, "__cilkrts_detach")
241 || !strcmp (name, "__cilkrts_pop_frame")))
242 return true;
243 return false;
244 }
245
246
247 /* Return true if DECL is a function symbol representing a built-in. */
248
249 bool
250 is_builtin_fn (tree decl)
251 {
252 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
253 }
254
255 /* By default we assume that c99 functions are present at the runtime,
256 but sincos is not. */
257 bool
258 default_libc_has_function (enum function_class fn_class)
259 {
260 if (fn_class == function_c94
261 || fn_class == function_c99_misc
262 || fn_class == function_c99_math_complex)
263 return true;
264
265 return false;
266 }
267
268 bool
269 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
270 {
271 return true;
272 }
273
274 bool
275 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
276 {
277 return false;
278 }
279
280 /* Return true if NODE should be considered for inline expansion regardless
281 of the optimization level. This means whenever a function is invoked with
282 its "internal" name, which normally contains the prefix "__builtin". */
283
284 static bool
285 called_as_built_in (tree node)
286 {
287 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
288 we want the name used to call the function, not the name it
289 will have. */
290 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
291 return is_builtin_name (name);
292 }
293
294 /* Compute values M and N such that M divides (address of EXP - N) and such
295 that N < M. If these numbers can be determined, store M in alignp and N in
296 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
297 *alignp and any bit-offset to *bitposp.
298
299 Note that the address (and thus the alignment) computed here is based
300 on the address to which a symbol resolves, whereas DECL_ALIGN is based
301 on the address at which an object is actually located. These two
302 addresses are not always the same. For example, on ARM targets,
303 the address &foo of a Thumb function foo() has the lowest bit set,
304 whereas foo() itself starts on an even address.
305
306 If ADDR_P is true we are taking the address of the memory reference EXP
307 and thus cannot rely on the access taking place. */
308
309 static bool
310 get_object_alignment_2 (tree exp, unsigned int *alignp,
311 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
312 {
313 HOST_WIDE_INT bitsize, bitpos;
314 tree offset;
315 enum machine_mode mode;
316 int unsignedp, volatilep;
317 unsigned int align = BITS_PER_UNIT;
318 bool known_alignment = false;
319
320 /* Get the innermost object and the constant (bitpos) and possibly
321 variable (offset) offset of the access. */
322 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
323 &mode, &unsignedp, &volatilep, true);
324
325 /* Extract alignment information from the innermost object and
326 possibly adjust bitpos and offset. */
327 if (TREE_CODE (exp) == FUNCTION_DECL)
328 {
329 /* Function addresses can encode extra information besides their
330 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
331 allows the low bit to be used as a virtual bit, we know
332 that the address itself must be at least 2-byte aligned. */
333 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
334 align = 2 * BITS_PER_UNIT;
335 }
336 else if (TREE_CODE (exp) == LABEL_DECL)
337 ;
338 else if (TREE_CODE (exp) == CONST_DECL)
339 {
340 /* The alignment of a CONST_DECL is determined by its initializer. */
341 exp = DECL_INITIAL (exp);
342 align = TYPE_ALIGN (TREE_TYPE (exp));
343 #ifdef CONSTANT_ALIGNMENT
344 if (CONSTANT_CLASS_P (exp))
345 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
346 #endif
347 known_alignment = true;
348 }
349 else if (DECL_P (exp))
350 {
351 align = DECL_ALIGN (exp);
352 known_alignment = true;
353 }
354 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
355 {
356 align = TYPE_ALIGN (TREE_TYPE (exp));
357 }
358 else if (TREE_CODE (exp) == INDIRECT_REF
359 || TREE_CODE (exp) == MEM_REF
360 || TREE_CODE (exp) == TARGET_MEM_REF)
361 {
362 tree addr = TREE_OPERAND (exp, 0);
363 unsigned ptr_align;
364 unsigned HOST_WIDE_INT ptr_bitpos;
365
366 if (TREE_CODE (addr) == BIT_AND_EXPR
367 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
368 {
369 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
370 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
371 align *= BITS_PER_UNIT;
372 addr = TREE_OPERAND (addr, 0);
373 }
374
375 known_alignment
376 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
377 align = MAX (ptr_align, align);
378
379 /* The alignment of the pointer operand in a TARGET_MEM_REF
380 has to take the variable offset parts into account. */
381 if (TREE_CODE (exp) == TARGET_MEM_REF)
382 {
383 if (TMR_INDEX (exp))
384 {
385 unsigned HOST_WIDE_INT step = 1;
386 if (TMR_STEP (exp))
387 step = TREE_INT_CST_LOW (TMR_STEP (exp));
388 align = MIN (align, (step & -step) * BITS_PER_UNIT);
389 }
390 if (TMR_INDEX2 (exp))
391 align = BITS_PER_UNIT;
392 known_alignment = false;
393 }
394
395 /* When EXP is an actual memory reference then we can use
396 TYPE_ALIGN of a pointer indirection to derive alignment.
397 Do so only if get_pointer_alignment_1 did not reveal absolute
398 alignment knowledge and if using that alignment would
399 improve the situation. */
400 if (!addr_p && !known_alignment
401 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
402 align = TYPE_ALIGN (TREE_TYPE (exp));
403 else
404 {
405 /* Else adjust bitpos accordingly. */
406 bitpos += ptr_bitpos;
407 if (TREE_CODE (exp) == MEM_REF
408 || TREE_CODE (exp) == TARGET_MEM_REF)
409 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
410 }
411 }
412 else if (TREE_CODE (exp) == STRING_CST)
413 {
414 /* STRING_CST are the only constant objects we allow to be not
415 wrapped inside a CONST_DECL. */
416 align = TYPE_ALIGN (TREE_TYPE (exp));
417 #ifdef CONSTANT_ALIGNMENT
418 if (CONSTANT_CLASS_P (exp))
419 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
420 #endif
421 known_alignment = true;
422 }
423
424 /* If there is a non-constant offset part extract the maximum
425 alignment that can prevail. */
426 if (offset)
427 {
428 int trailing_zeros = tree_ctz (offset);
429 if (trailing_zeros < HOST_BITS_PER_INT)
430 {
431 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
432 if (inner)
433 align = MIN (align, inner);
434 }
435 }
436
437 *alignp = align;
438 *bitposp = bitpos & (*alignp - 1);
439 return known_alignment;
440 }
441
442 /* For a memory reference expression EXP compute values M and N such that M
443 divides (&EXP - N) and such that N < M. If these numbers can be determined,
444 store M in alignp and N in *BITPOSP and return true. Otherwise return false
445 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
446
447 bool
448 get_object_alignment_1 (tree exp, unsigned int *alignp,
449 unsigned HOST_WIDE_INT *bitposp)
450 {
451 return get_object_alignment_2 (exp, alignp, bitposp, false);
452 }
453
454 /* Return the alignment in bits of EXP, an object. */
455
456 unsigned int
457 get_object_alignment (tree exp)
458 {
459 unsigned HOST_WIDE_INT bitpos = 0;
460 unsigned int align;
461
462 get_object_alignment_1 (exp, &align, &bitpos);
463
464 /* align and bitpos now specify known low bits of the pointer.
465 ptr & (align - 1) == bitpos. */
466
467 if (bitpos != 0)
468 align = (bitpos & -bitpos);
469 return align;
470 }
471
472 /* For a pointer valued expression EXP compute values M and N such that M
473 divides (EXP - N) and such that N < M. If these numbers can be determined,
474 store M in alignp and N in *BITPOSP and return true. Return false if
475 the results are just a conservative approximation.
476
477 If EXP is not a pointer, false is returned too. */
478
479 bool
480 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
481 unsigned HOST_WIDE_INT *bitposp)
482 {
483 STRIP_NOPS (exp);
484
485 if (TREE_CODE (exp) == ADDR_EXPR)
486 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
487 alignp, bitposp, true);
488 else if (TREE_CODE (exp) == SSA_NAME
489 && POINTER_TYPE_P (TREE_TYPE (exp)))
490 {
491 unsigned int ptr_align, ptr_misalign;
492 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
493
494 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
495 {
496 *bitposp = ptr_misalign * BITS_PER_UNIT;
497 *alignp = ptr_align * BITS_PER_UNIT;
498 /* We cannot really tell whether this result is an approximation. */
499 return true;
500 }
501 else
502 {
503 *bitposp = 0;
504 *alignp = BITS_PER_UNIT;
505 return false;
506 }
507 }
508 else if (TREE_CODE (exp) == INTEGER_CST)
509 {
510 *alignp = BIGGEST_ALIGNMENT;
511 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
512 & (BIGGEST_ALIGNMENT - 1));
513 return true;
514 }
515
516 *bitposp = 0;
517 *alignp = BITS_PER_UNIT;
518 return false;
519 }
520
521 /* Return the alignment in bits of EXP, a pointer valued expression.
522 The alignment returned is, by default, the alignment of the thing that
523 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
524
525 Otherwise, look at the expression to see if we can do better, i.e., if the
526 expression is actually pointing at an object whose alignment is tighter. */
527
528 unsigned int
529 get_pointer_alignment (tree exp)
530 {
531 unsigned HOST_WIDE_INT bitpos = 0;
532 unsigned int align;
533
534 get_pointer_alignment_1 (exp, &align, &bitpos);
535
536 /* align and bitpos now specify known low bits of the pointer.
537 ptr & (align - 1) == bitpos. */
538
539 if (bitpos != 0)
540 align = (bitpos & -bitpos);
541
542 return align;
543 }
544
545 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
546 way, because it could contain a zero byte in the middle.
547 TREE_STRING_LENGTH is the size of the character array, not the string.
548
549 ONLY_VALUE should be nonzero if the result is not going to be emitted
550 into the instruction stream and zero if it is going to be expanded.
551 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
552 is returned, otherwise NULL, since
553 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
554 evaluate the side-effects.
555
556 The value returned is of type `ssizetype'.
557
558 Unfortunately, string_constant can't access the values of const char
559 arrays with initializers, so neither can we do so here. */
560
561 tree
562 c_strlen (tree src, int only_value)
563 {
564 tree offset_node;
565 HOST_WIDE_INT offset;
566 int max;
567 const char *ptr;
568 location_t loc;
569
570 STRIP_NOPS (src);
571 if (TREE_CODE (src) == COND_EXPR
572 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
573 {
574 tree len1, len2;
575
576 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
577 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
578 if (tree_int_cst_equal (len1, len2))
579 return len1;
580 }
581
582 if (TREE_CODE (src) == COMPOUND_EXPR
583 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
584 return c_strlen (TREE_OPERAND (src, 1), only_value);
585
586 loc = EXPR_LOC_OR_HERE (src);
587
588 src = string_constant (src, &offset_node);
589 if (src == 0)
590 return NULL_TREE;
591
592 max = TREE_STRING_LENGTH (src) - 1;
593 ptr = TREE_STRING_POINTER (src);
594
595 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
596 {
597 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
598 compute the offset to the following null if we don't know where to
599 start searching for it. */
600 int i;
601
602 for (i = 0; i < max; i++)
603 if (ptr[i] == 0)
604 return NULL_TREE;
605
606 /* We don't know the starting offset, but we do know that the string
607 has no internal zero bytes. We can assume that the offset falls
608 within the bounds of the string; otherwise, the programmer deserves
609 what he gets. Subtract the offset from the length of the string,
610 and return that. This would perhaps not be valid if we were dealing
611 with named arrays in addition to literal string constants. */
612
613 return size_diffop_loc (loc, size_int (max), offset_node);
614 }
615
616 /* We have a known offset into the string. Start searching there for
617 a null character if we can represent it as a single HOST_WIDE_INT. */
618 if (offset_node == 0)
619 offset = 0;
620 else if (! host_integerp (offset_node, 0))
621 offset = -1;
622 else
623 offset = tree_low_cst (offset_node, 0);
624
625 /* If the offset is known to be out of bounds, warn, and call strlen at
626 runtime. */
627 if (offset < 0 || offset > max)
628 {
629 /* Suppress multiple warnings for propagated constant strings. */
630 if (! TREE_NO_WARNING (src))
631 {
632 warning_at (loc, 0, "offset outside bounds of constant string");
633 TREE_NO_WARNING (src) = 1;
634 }
635 return NULL_TREE;
636 }
637
638 /* Use strlen to search for the first zero byte. Since any strings
639 constructed with build_string will have nulls appended, we win even
640 if we get handed something like (char[4])"abcd".
641
642 Since OFFSET is our starting index into the string, no further
643 calculation is needed. */
644 return ssize_int (strlen (ptr + offset));
645 }
646
647 /* Return a char pointer for a C string if it is a string constant
648 or sum of string constant and integer constant. */
649
650 static const char *
651 c_getstr (tree src)
652 {
653 tree offset_node;
654
655 src = string_constant (src, &offset_node);
656 if (src == 0)
657 return 0;
658
659 if (offset_node == 0)
660 return TREE_STRING_POINTER (src);
661 else if (!host_integerp (offset_node, 1)
662 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
663 return 0;
664
665 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
666 }
667
668 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
669 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
670
671 static rtx
672 c_readstr (const char *str, enum machine_mode mode)
673 {
674 HOST_WIDE_INT c[2];
675 HOST_WIDE_INT ch;
676 unsigned int i, j;
677
678 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
679
680 c[0] = 0;
681 c[1] = 0;
682 ch = 1;
683 for (i = 0; i < GET_MODE_SIZE (mode); i++)
684 {
685 j = i;
686 if (WORDS_BIG_ENDIAN)
687 j = GET_MODE_SIZE (mode) - i - 1;
688 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
689 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
690 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
691 j *= BITS_PER_UNIT;
692 gcc_assert (j < HOST_BITS_PER_DOUBLE_INT);
693
694 if (ch)
695 ch = (unsigned char) str[i];
696 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
697 }
698 return immed_double_const (c[0], c[1], mode);
699 }
700
701 /* Cast a target constant CST to target CHAR and if that value fits into
702 host char type, return zero and put that value into variable pointed to by
703 P. */
704
705 static int
706 target_char_cast (tree cst, char *p)
707 {
708 unsigned HOST_WIDE_INT val, hostval;
709
710 if (TREE_CODE (cst) != INTEGER_CST
711 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
712 return 1;
713
714 val = TREE_INT_CST_LOW (cst);
715 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
716 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
717
718 hostval = val;
719 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
720 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
721
722 if (val != hostval)
723 return 1;
724
725 *p = hostval;
726 return 0;
727 }
728
729 /* Similar to save_expr, but assumes that arbitrary code is not executed
730 in between the multiple evaluations. In particular, we assume that a
731 non-addressable local variable will not be modified. */
732
733 static tree
734 builtin_save_expr (tree exp)
735 {
736 if (TREE_CODE (exp) == SSA_NAME
737 || (TREE_ADDRESSABLE (exp) == 0
738 && (TREE_CODE (exp) == PARM_DECL
739 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
740 return exp;
741
742 return save_expr (exp);
743 }
744
745 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
746 times to get the address of either a higher stack frame, or a return
747 address located within it (depending on FNDECL_CODE). */
748
749 static rtx
750 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
751 {
752 int i;
753
754 #ifdef INITIAL_FRAME_ADDRESS_RTX
755 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
756 #else
757 rtx tem;
758
759 /* For a zero count with __builtin_return_address, we don't care what
760 frame address we return, because target-specific definitions will
761 override us. Therefore frame pointer elimination is OK, and using
762 the soft frame pointer is OK.
763
764 For a nonzero count, or a zero count with __builtin_frame_address,
765 we require a stable offset from the current frame pointer to the
766 previous one, so we must use the hard frame pointer, and
767 we must disable frame pointer elimination. */
768 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
769 tem = frame_pointer_rtx;
770 else
771 {
772 tem = hard_frame_pointer_rtx;
773
774 /* Tell reload not to eliminate the frame pointer. */
775 crtl->accesses_prior_frames = 1;
776 }
777 #endif
778
779 /* Some machines need special handling before we can access
780 arbitrary frames. For example, on the SPARC, we must first flush
781 all register windows to the stack. */
782 #ifdef SETUP_FRAME_ADDRESSES
783 if (count > 0)
784 SETUP_FRAME_ADDRESSES ();
785 #endif
786
787 /* On the SPARC, the return address is not in the frame, it is in a
788 register. There is no way to access it off of the current frame
789 pointer, but it can be accessed off the previous frame pointer by
790 reading the value from the register window save area. */
791 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
792 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
793 count--;
794 #endif
795
796 /* Scan back COUNT frames to the specified frame. */
797 for (i = 0; i < count; i++)
798 {
799 /* Assume the dynamic chain pointer is in the word that the
800 frame address points to, unless otherwise specified. */
801 #ifdef DYNAMIC_CHAIN_ADDRESS
802 tem = DYNAMIC_CHAIN_ADDRESS (tem);
803 #endif
804 tem = memory_address (Pmode, tem);
805 tem = gen_frame_mem (Pmode, tem);
806 tem = copy_to_reg (tem);
807 }
808
809 /* For __builtin_frame_address, return what we've got. But, on
810 the SPARC for example, we may have to add a bias. */
811 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
812 #ifdef FRAME_ADDR_RTX
813 return FRAME_ADDR_RTX (tem);
814 #else
815 return tem;
816 #endif
817
818 /* For __builtin_return_address, get the return address from that frame. */
819 #ifdef RETURN_ADDR_RTX
820 tem = RETURN_ADDR_RTX (count, tem);
821 #else
822 tem = memory_address (Pmode,
823 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
824 tem = gen_frame_mem (Pmode, tem);
825 #endif
826 return tem;
827 }
828
829 /* Alias set used for setjmp buffer. */
830 static alias_set_type setjmp_alias_set = -1;
831
832 /* Construct the leading half of a __builtin_setjmp call. Control will
833 return to RECEIVER_LABEL. This is also called directly by the SJLJ
834 exception handling code. */
835
836 void
837 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
838 {
839 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
840 rtx stack_save;
841 rtx mem;
842
843 if (setjmp_alias_set == -1)
844 setjmp_alias_set = new_alias_set ();
845
846 buf_addr = convert_memory_address (Pmode, buf_addr);
847
848 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
849
850 /* We store the frame pointer and the address of receiver_label in
851 the buffer and use the rest of it for the stack save area, which
852 is machine-dependent. */
853
854 mem = gen_rtx_MEM (Pmode, buf_addr);
855 set_mem_alias_set (mem, setjmp_alias_set);
856 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
857
858 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
859 GET_MODE_SIZE (Pmode))),
860 set_mem_alias_set (mem, setjmp_alias_set);
861
862 emit_move_insn (validize_mem (mem),
863 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
864
865 stack_save = gen_rtx_MEM (sa_mode,
866 plus_constant (Pmode, buf_addr,
867 2 * GET_MODE_SIZE (Pmode)));
868 set_mem_alias_set (stack_save, setjmp_alias_set);
869 emit_stack_save (SAVE_NONLOCAL, &stack_save);
870
871 /* If there is further processing to do, do it. */
872 #ifdef HAVE_builtin_setjmp_setup
873 if (HAVE_builtin_setjmp_setup)
874 emit_insn (gen_builtin_setjmp_setup (buf_addr));
875 #endif
876
877 /* We have a nonlocal label. */
878 cfun->has_nonlocal_label = 1;
879 }
880
881 /* Construct the trailing part of a __builtin_setjmp call. This is
882 also called directly by the SJLJ exception handling code.
883 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
884
885 void
886 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
887 {
888 rtx chain;
889
890 /* Mark the FP as used when we get here, so we have to make sure it's
891 marked as used by this function. */
892 emit_use (hard_frame_pointer_rtx);
893
894 /* Mark the static chain as clobbered here so life information
895 doesn't get messed up for it. */
896 chain = targetm.calls.static_chain (current_function_decl, true);
897 if (chain && REG_P (chain))
898 emit_clobber (chain);
899
900 /* Now put in the code to restore the frame pointer, and argument
901 pointer, if needed. */
902 #ifdef HAVE_nonlocal_goto
903 if (! HAVE_nonlocal_goto)
904 #endif
905 /* First adjust our frame pointer to its actual value. It was
906 previously set to the start of the virtual area corresponding to
907 the stacked variables when we branched here and now needs to be
908 adjusted to the actual hardware fp value.
909
910 Assignments to virtual registers are converted by
911 instantiate_virtual_regs into the corresponding assignment
912 to the underlying register (fp in this case) that makes
913 the original assignment true.
914 So the following insn will actually be decrementing fp by
915 STARTING_FRAME_OFFSET. */
916 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
917
918 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
919 if (fixed_regs[ARG_POINTER_REGNUM])
920 {
921 #ifdef ELIMINABLE_REGS
922 /* If the argument pointer can be eliminated in favor of the
923 frame pointer, we don't need to restore it. We assume here
924 that if such an elimination is present, it can always be used.
925 This is the case on all known machines; if we don't make this
926 assumption, we do unnecessary saving on many machines. */
927 size_t i;
928 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
929
930 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
931 if (elim_regs[i].from == ARG_POINTER_REGNUM
932 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
933 break;
934
935 if (i == ARRAY_SIZE (elim_regs))
936 #endif
937 {
938 /* Now restore our arg pointer from the address at which it
939 was saved in our stack frame. */
940 emit_move_insn (crtl->args.internal_arg_pointer,
941 copy_to_reg (get_arg_pointer_save_area ()));
942 }
943 }
944 #endif
945
946 #ifdef HAVE_builtin_setjmp_receiver
947 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
948 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
949 else
950 #endif
951 #ifdef HAVE_nonlocal_goto_receiver
952 if (HAVE_nonlocal_goto_receiver)
953 emit_insn (gen_nonlocal_goto_receiver ());
954 else
955 #endif
956 { /* Nothing */ }
957
958 /* We must not allow the code we just generated to be reordered by
959 scheduling. Specifically, the update of the frame pointer must
960 happen immediately, not later. Similarly, we must block
961 (frame-related) register values to be used across this code. */
962 emit_insn (gen_blockage ());
963 }
964
965 /* __builtin_longjmp is passed a pointer to an array of five words (not
966 all will be used on all machines). It operates similarly to the C
967 library function of the same name, but is more efficient. Much of
968 the code below is copied from the handling of non-local gotos. */
969
970 static void
971 expand_builtin_longjmp (rtx buf_addr, rtx value)
972 {
973 rtx fp, lab, stack, insn, last;
974 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
975
976 /* DRAP is needed for stack realign if longjmp is expanded to current
977 function */
978 if (SUPPORTS_STACK_ALIGNMENT)
979 crtl->need_drap = true;
980
981 if (setjmp_alias_set == -1)
982 setjmp_alias_set = new_alias_set ();
983
984 buf_addr = convert_memory_address (Pmode, buf_addr);
985
986 buf_addr = force_reg (Pmode, buf_addr);
987
988 /* We require that the user must pass a second argument of 1, because
989 that is what builtin_setjmp will return. */
990 gcc_assert (value == const1_rtx);
991
992 last = get_last_insn ();
993 #ifdef HAVE_builtin_longjmp
994 if (HAVE_builtin_longjmp)
995 emit_insn (gen_builtin_longjmp (buf_addr));
996 else
997 #endif
998 {
999 fp = gen_rtx_MEM (Pmode, buf_addr);
1000 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1001 GET_MODE_SIZE (Pmode)));
1002
1003 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1004 2 * GET_MODE_SIZE (Pmode)));
1005 set_mem_alias_set (fp, setjmp_alias_set);
1006 set_mem_alias_set (lab, setjmp_alias_set);
1007 set_mem_alias_set (stack, setjmp_alias_set);
1008
1009 /* Pick up FP, label, and SP from the block and jump. This code is
1010 from expand_goto in stmt.c; see there for detailed comments. */
1011 #ifdef HAVE_nonlocal_goto
1012 if (HAVE_nonlocal_goto)
1013 /* We have to pass a value to the nonlocal_goto pattern that will
1014 get copied into the static_chain pointer, but it does not matter
1015 what that value is, because builtin_setjmp does not use it. */
1016 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1017 else
1018 #endif
1019 {
1020 lab = copy_to_reg (lab);
1021
1022 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1023 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1024
1025 emit_move_insn (hard_frame_pointer_rtx, fp);
1026 emit_stack_restore (SAVE_NONLOCAL, stack);
1027
1028 emit_use (hard_frame_pointer_rtx);
1029 emit_use (stack_pointer_rtx);
1030 emit_indirect_jump (lab);
1031 }
1032 }
1033
1034 /* Search backwards and mark the jump insn as a non-local goto.
1035 Note that this precludes the use of __builtin_longjmp to a
1036 __builtin_setjmp target in the same function. However, we've
1037 already cautioned the user that these functions are for
1038 internal exception handling use only. */
1039 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1040 {
1041 gcc_assert (insn != last);
1042
1043 if (JUMP_P (insn))
1044 {
1045 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1046 break;
1047 }
1048 else if (CALL_P (insn))
1049 break;
1050 }
1051 }
1052
1053 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1054 and the address of the save area. */
1055
1056 static rtx
1057 expand_builtin_nonlocal_goto (tree exp)
1058 {
1059 tree t_label, t_save_area;
1060 rtx r_label, r_save_area, r_fp, r_sp, insn;
1061
1062 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1063 return NULL_RTX;
1064
1065 t_label = CALL_EXPR_ARG (exp, 0);
1066 t_save_area = CALL_EXPR_ARG (exp, 1);
1067
1068 r_label = expand_normal (t_label);
1069 r_label = convert_memory_address (Pmode, r_label);
1070 r_save_area = expand_normal (t_save_area);
1071 r_save_area = convert_memory_address (Pmode, r_save_area);
1072 /* Copy the address of the save location to a register just in case it was
1073 based on the frame pointer. */
1074 r_save_area = copy_to_reg (r_save_area);
1075 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1076 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1077 plus_constant (Pmode, r_save_area,
1078 GET_MODE_SIZE (Pmode)));
1079
1080 crtl->has_nonlocal_goto = 1;
1081
1082 #ifdef HAVE_nonlocal_goto
1083 /* ??? We no longer need to pass the static chain value, afaik. */
1084 if (HAVE_nonlocal_goto)
1085 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1086 else
1087 #endif
1088 {
1089 r_label = copy_to_reg (r_label);
1090
1091 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1092 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1093
1094 /* Restore frame pointer for containing function. */
1095 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1096 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1097
1098 /* USE of hard_frame_pointer_rtx added for consistency;
1099 not clear if really needed. */
1100 emit_use (hard_frame_pointer_rtx);
1101 emit_use (stack_pointer_rtx);
1102
1103 /* If the architecture is using a GP register, we must
1104 conservatively assume that the target function makes use of it.
1105 The prologue of functions with nonlocal gotos must therefore
1106 initialize the GP register to the appropriate value, and we
1107 must then make sure that this value is live at the point
1108 of the jump. (Note that this doesn't necessarily apply
1109 to targets with a nonlocal_goto pattern; they are free
1110 to implement it in their own way. Note also that this is
1111 a no-op if the GP register is a global invariant.) */
1112 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1113 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1114 emit_use (pic_offset_table_rtx);
1115
1116 emit_indirect_jump (r_label);
1117 }
1118
1119 /* Search backwards to the jump insn and mark it as a
1120 non-local goto. */
1121 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1122 {
1123 if (JUMP_P (insn))
1124 {
1125 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1126 break;
1127 }
1128 else if (CALL_P (insn))
1129 break;
1130 }
1131
1132 return const0_rtx;
1133 }
1134
1135 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1136 (not all will be used on all machines) that was passed to __builtin_setjmp.
1137 It updates the stack pointer in that block to correspond to the current
1138 stack pointer. */
1139
1140 static void
1141 expand_builtin_update_setjmp_buf (rtx buf_addr)
1142 {
1143 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1144 rtx stack_save
1145 = gen_rtx_MEM (sa_mode,
1146 memory_address
1147 (sa_mode,
1148 plus_constant (Pmode, buf_addr,
1149 2 * GET_MODE_SIZE (Pmode))));
1150
1151 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1152 }
1153
1154 /* Expand a call to __builtin_prefetch. For a target that does not support
1155 data prefetch, evaluate the memory address argument in case it has side
1156 effects. */
1157
1158 static void
1159 expand_builtin_prefetch (tree exp)
1160 {
1161 tree arg0, arg1, arg2;
1162 int nargs;
1163 rtx op0, op1, op2;
1164
1165 if (!validate_arglist (exp, POINTER_TYPE, 0))
1166 return;
1167
1168 arg0 = CALL_EXPR_ARG (exp, 0);
1169
1170 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1171 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1172 locality). */
1173 nargs = call_expr_nargs (exp);
1174 if (nargs > 1)
1175 arg1 = CALL_EXPR_ARG (exp, 1);
1176 else
1177 arg1 = integer_zero_node;
1178 if (nargs > 2)
1179 arg2 = CALL_EXPR_ARG (exp, 2);
1180 else
1181 arg2 = integer_three_node;
1182
1183 /* Argument 0 is an address. */
1184 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1185
1186 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1187 if (TREE_CODE (arg1) != INTEGER_CST)
1188 {
1189 error ("second argument to %<__builtin_prefetch%> must be a constant");
1190 arg1 = integer_zero_node;
1191 }
1192 op1 = expand_normal (arg1);
1193 /* Argument 1 must be either zero or one. */
1194 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1195 {
1196 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1197 " using zero");
1198 op1 = const0_rtx;
1199 }
1200
1201 /* Argument 2 (locality) must be a compile-time constant int. */
1202 if (TREE_CODE (arg2) != INTEGER_CST)
1203 {
1204 error ("third argument to %<__builtin_prefetch%> must be a constant");
1205 arg2 = integer_zero_node;
1206 }
1207 op2 = expand_normal (arg2);
1208 /* Argument 2 must be 0, 1, 2, or 3. */
1209 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1210 {
1211 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1212 op2 = const0_rtx;
1213 }
1214
1215 #ifdef HAVE_prefetch
1216 if (HAVE_prefetch)
1217 {
1218 struct expand_operand ops[3];
1219
1220 create_address_operand (&ops[0], op0);
1221 create_integer_operand (&ops[1], INTVAL (op1));
1222 create_integer_operand (&ops[2], INTVAL (op2));
1223 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1224 return;
1225 }
1226 #endif
1227
1228 /* Don't do anything with direct references to volatile memory, but
1229 generate code to handle other side effects. */
1230 if (!MEM_P (op0) && side_effects_p (op0))
1231 emit_insn (op0);
1232 }
1233
1234 /* Get a MEM rtx for expression EXP which is the address of an operand
1235 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1236 the maximum length of the block of memory that might be accessed or
1237 NULL if unknown. */
1238
1239 static rtx
1240 get_memory_rtx (tree exp, tree len)
1241 {
1242 tree orig_exp = exp;
1243 rtx addr, mem;
1244
1245 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1246 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1247 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1248 exp = TREE_OPERAND (exp, 0);
1249
1250 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1251 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1252
1253 /* Get an expression we can use to find the attributes to assign to MEM.
1254 First remove any nops. */
1255 while (CONVERT_EXPR_P (exp)
1256 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1257 exp = TREE_OPERAND (exp, 0);
1258
1259 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1260 (as builtin stringops may alias with anything). */
1261 exp = fold_build2 (MEM_REF,
1262 build_array_type (char_type_node,
1263 build_range_type (sizetype,
1264 size_one_node, len)),
1265 exp, build_int_cst (ptr_type_node, 0));
1266
1267 /* If the MEM_REF has no acceptable address, try to get the base object
1268 from the original address we got, and build an all-aliasing
1269 unknown-sized access to that one. */
1270 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1271 set_mem_attributes (mem, exp, 0);
1272 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1273 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1274 0))))
1275 {
1276 exp = build_fold_addr_expr (exp);
1277 exp = fold_build2 (MEM_REF,
1278 build_array_type (char_type_node,
1279 build_range_type (sizetype,
1280 size_zero_node,
1281 NULL)),
1282 exp, build_int_cst (ptr_type_node, 0));
1283 set_mem_attributes (mem, exp, 0);
1284 }
1285 set_mem_alias_set (mem, 0);
1286 return mem;
1287 }
1288 \f
1289 /* Built-in functions to perform an untyped call and return. */
1290
1291 #define apply_args_mode \
1292 (this_target_builtins->x_apply_args_mode)
1293 #define apply_result_mode \
1294 (this_target_builtins->x_apply_result_mode)
1295
1296 /* Return the size required for the block returned by __builtin_apply_args,
1297 and initialize apply_args_mode. */
1298
1299 static int
1300 apply_args_size (void)
1301 {
1302 static int size = -1;
1303 int align;
1304 unsigned int regno;
1305 enum machine_mode mode;
1306
1307 /* The values computed by this function never change. */
1308 if (size < 0)
1309 {
1310 /* The first value is the incoming arg-pointer. */
1311 size = GET_MODE_SIZE (Pmode);
1312
1313 /* The second value is the structure value address unless this is
1314 passed as an "invisible" first argument. */
1315 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1316 size += GET_MODE_SIZE (Pmode);
1317
1318 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1319 if (FUNCTION_ARG_REGNO_P (regno))
1320 {
1321 mode = targetm.calls.get_raw_arg_mode (regno);
1322
1323 gcc_assert (mode != VOIDmode);
1324
1325 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1326 if (size % align != 0)
1327 size = CEIL (size, align) * align;
1328 size += GET_MODE_SIZE (mode);
1329 apply_args_mode[regno] = mode;
1330 }
1331 else
1332 {
1333 apply_args_mode[regno] = VOIDmode;
1334 }
1335 }
1336 return size;
1337 }
1338
1339 /* Return the size required for the block returned by __builtin_apply,
1340 and initialize apply_result_mode. */
1341
1342 static int
1343 apply_result_size (void)
1344 {
1345 static int size = -1;
1346 int align, regno;
1347 enum machine_mode mode;
1348
1349 /* The values computed by this function never change. */
1350 if (size < 0)
1351 {
1352 size = 0;
1353
1354 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1355 if (targetm.calls.function_value_regno_p (regno))
1356 {
1357 mode = targetm.calls.get_raw_result_mode (regno);
1358
1359 gcc_assert (mode != VOIDmode);
1360
1361 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1362 if (size % align != 0)
1363 size = CEIL (size, align) * align;
1364 size += GET_MODE_SIZE (mode);
1365 apply_result_mode[regno] = mode;
1366 }
1367 else
1368 apply_result_mode[regno] = VOIDmode;
1369
1370 /* Allow targets that use untyped_call and untyped_return to override
1371 the size so that machine-specific information can be stored here. */
1372 #ifdef APPLY_RESULT_SIZE
1373 size = APPLY_RESULT_SIZE;
1374 #endif
1375 }
1376 return size;
1377 }
1378
1379 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1380 /* Create a vector describing the result block RESULT. If SAVEP is true,
1381 the result block is used to save the values; otherwise it is used to
1382 restore the values. */
1383
1384 static rtx
1385 result_vector (int savep, rtx result)
1386 {
1387 int regno, size, align, nelts;
1388 enum machine_mode mode;
1389 rtx reg, mem;
1390 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1391
1392 size = nelts = 0;
1393 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1394 if ((mode = apply_result_mode[regno]) != VOIDmode)
1395 {
1396 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1397 if (size % align != 0)
1398 size = CEIL (size, align) * align;
1399 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1400 mem = adjust_address (result, mode, size);
1401 savevec[nelts++] = (savep
1402 ? gen_rtx_SET (VOIDmode, mem, reg)
1403 : gen_rtx_SET (VOIDmode, reg, mem));
1404 size += GET_MODE_SIZE (mode);
1405 }
1406 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1407 }
1408 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1409
1410 /* Save the state required to perform an untyped call with the same
1411 arguments as were passed to the current function. */
1412
1413 static rtx
1414 expand_builtin_apply_args_1 (void)
1415 {
1416 rtx registers, tem;
1417 int size, align, regno;
1418 enum machine_mode mode;
1419 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1420
1421 /* Create a block where the arg-pointer, structure value address,
1422 and argument registers can be saved. */
1423 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1424
1425 /* Walk past the arg-pointer and structure value address. */
1426 size = GET_MODE_SIZE (Pmode);
1427 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1428 size += GET_MODE_SIZE (Pmode);
1429
1430 /* Save each register used in calling a function to the block. */
1431 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1432 if ((mode = apply_args_mode[regno]) != VOIDmode)
1433 {
1434 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1435 if (size % align != 0)
1436 size = CEIL (size, align) * align;
1437
1438 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1439
1440 emit_move_insn (adjust_address (registers, mode, size), tem);
1441 size += GET_MODE_SIZE (mode);
1442 }
1443
1444 /* Save the arg pointer to the block. */
1445 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1446 #ifdef STACK_GROWS_DOWNWARD
1447 /* We need the pointer as the caller actually passed them to us, not
1448 as we might have pretended they were passed. Make sure it's a valid
1449 operand, as emit_move_insn isn't expected to handle a PLUS. */
1450 tem
1451 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1452 NULL_RTX);
1453 #endif
1454 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1455
1456 size = GET_MODE_SIZE (Pmode);
1457
1458 /* Save the structure value address unless this is passed as an
1459 "invisible" first argument. */
1460 if (struct_incoming_value)
1461 {
1462 emit_move_insn (adjust_address (registers, Pmode, size),
1463 copy_to_reg (struct_incoming_value));
1464 size += GET_MODE_SIZE (Pmode);
1465 }
1466
1467 /* Return the address of the block. */
1468 return copy_addr_to_reg (XEXP (registers, 0));
1469 }
1470
1471 /* __builtin_apply_args returns block of memory allocated on
1472 the stack into which is stored the arg pointer, structure
1473 value address, static chain, and all the registers that might
1474 possibly be used in performing a function call. The code is
1475 moved to the start of the function so the incoming values are
1476 saved. */
1477
1478 static rtx
1479 expand_builtin_apply_args (void)
1480 {
1481 /* Don't do __builtin_apply_args more than once in a function.
1482 Save the result of the first call and reuse it. */
1483 if (apply_args_value != 0)
1484 return apply_args_value;
1485 {
1486 /* When this function is called, it means that registers must be
1487 saved on entry to this function. So we migrate the
1488 call to the first insn of this function. */
1489 rtx temp;
1490 rtx seq;
1491
1492 start_sequence ();
1493 temp = expand_builtin_apply_args_1 ();
1494 seq = get_insns ();
1495 end_sequence ();
1496
1497 apply_args_value = temp;
1498
1499 /* Put the insns after the NOTE that starts the function.
1500 If this is inside a start_sequence, make the outer-level insn
1501 chain current, so the code is placed at the start of the
1502 function. If internal_arg_pointer is a non-virtual pseudo,
1503 it needs to be placed after the function that initializes
1504 that pseudo. */
1505 push_topmost_sequence ();
1506 if (REG_P (crtl->args.internal_arg_pointer)
1507 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1508 emit_insn_before (seq, parm_birth_insn);
1509 else
1510 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1511 pop_topmost_sequence ();
1512 return temp;
1513 }
1514 }
1515
1516 /* Perform an untyped call and save the state required to perform an
1517 untyped return of whatever value was returned by the given function. */
1518
1519 static rtx
1520 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1521 {
1522 int size, align, regno;
1523 enum machine_mode mode;
1524 rtx incoming_args, result, reg, dest, src, call_insn;
1525 rtx old_stack_level = 0;
1526 rtx call_fusage = 0;
1527 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1528
1529 arguments = convert_memory_address (Pmode, arguments);
1530
1531 /* Create a block where the return registers can be saved. */
1532 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1533
1534 /* Fetch the arg pointer from the ARGUMENTS block. */
1535 incoming_args = gen_reg_rtx (Pmode);
1536 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1537 #ifndef STACK_GROWS_DOWNWARD
1538 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1539 incoming_args, 0, OPTAB_LIB_WIDEN);
1540 #endif
1541
1542 /* Push a new argument block and copy the arguments. Do not allow
1543 the (potential) memcpy call below to interfere with our stack
1544 manipulations. */
1545 do_pending_stack_adjust ();
1546 NO_DEFER_POP;
1547
1548 /* Save the stack with nonlocal if available. */
1549 #ifdef HAVE_save_stack_nonlocal
1550 if (HAVE_save_stack_nonlocal)
1551 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1552 else
1553 #endif
1554 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1555
1556 /* Allocate a block of memory onto the stack and copy the memory
1557 arguments to the outgoing arguments address. We can pass TRUE
1558 as the 4th argument because we just saved the stack pointer
1559 and will restore it right after the call. */
1560 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1561
1562 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1563 may have already set current_function_calls_alloca to true.
1564 current_function_calls_alloca won't be set if argsize is zero,
1565 so we have to guarantee need_drap is true here. */
1566 if (SUPPORTS_STACK_ALIGNMENT)
1567 crtl->need_drap = true;
1568
1569 dest = virtual_outgoing_args_rtx;
1570 #ifndef STACK_GROWS_DOWNWARD
1571 if (CONST_INT_P (argsize))
1572 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1573 else
1574 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1575 #endif
1576 dest = gen_rtx_MEM (BLKmode, dest);
1577 set_mem_align (dest, PARM_BOUNDARY);
1578 src = gen_rtx_MEM (BLKmode, incoming_args);
1579 set_mem_align (src, PARM_BOUNDARY);
1580 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1581
1582 /* Refer to the argument block. */
1583 apply_args_size ();
1584 arguments = gen_rtx_MEM (BLKmode, arguments);
1585 set_mem_align (arguments, PARM_BOUNDARY);
1586
1587 /* Walk past the arg-pointer and structure value address. */
1588 size = GET_MODE_SIZE (Pmode);
1589 if (struct_value)
1590 size += GET_MODE_SIZE (Pmode);
1591
1592 /* Restore each of the registers previously saved. Make USE insns
1593 for each of these registers for use in making the call. */
1594 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1595 if ((mode = apply_args_mode[regno]) != VOIDmode)
1596 {
1597 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1598 if (size % align != 0)
1599 size = CEIL (size, align) * align;
1600 reg = gen_rtx_REG (mode, regno);
1601 emit_move_insn (reg, adjust_address (arguments, mode, size));
1602 use_reg (&call_fusage, reg);
1603 size += GET_MODE_SIZE (mode);
1604 }
1605
1606 /* Restore the structure value address unless this is passed as an
1607 "invisible" first argument. */
1608 size = GET_MODE_SIZE (Pmode);
1609 if (struct_value)
1610 {
1611 rtx value = gen_reg_rtx (Pmode);
1612 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1613 emit_move_insn (struct_value, value);
1614 if (REG_P (struct_value))
1615 use_reg (&call_fusage, struct_value);
1616 size += GET_MODE_SIZE (Pmode);
1617 }
1618
1619 /* All arguments and registers used for the call are set up by now! */
1620 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1621
1622 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1623 and we don't want to load it into a register as an optimization,
1624 because prepare_call_address already did it if it should be done. */
1625 if (GET_CODE (function) != SYMBOL_REF)
1626 function = memory_address (FUNCTION_MODE, function);
1627
1628 /* Generate the actual call instruction and save the return value. */
1629 #ifdef HAVE_untyped_call
1630 if (HAVE_untyped_call)
1631 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1632 result, result_vector (1, result)));
1633 else
1634 #endif
1635 #ifdef HAVE_call_value
1636 if (HAVE_call_value)
1637 {
1638 rtx valreg = 0;
1639
1640 /* Locate the unique return register. It is not possible to
1641 express a call that sets more than one return register using
1642 call_value; use untyped_call for that. In fact, untyped_call
1643 only needs to save the return registers in the given block. */
1644 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1645 if ((mode = apply_result_mode[regno]) != VOIDmode)
1646 {
1647 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1648
1649 valreg = gen_rtx_REG (mode, regno);
1650 }
1651
1652 emit_call_insn (GEN_CALL_VALUE (valreg,
1653 gen_rtx_MEM (FUNCTION_MODE, function),
1654 const0_rtx, NULL_RTX, const0_rtx));
1655
1656 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1657 }
1658 else
1659 #endif
1660 gcc_unreachable ();
1661
1662 /* Find the CALL insn we just emitted, and attach the register usage
1663 information. */
1664 call_insn = last_call_insn ();
1665 add_function_usage_to (call_insn, call_fusage);
1666
1667 /* Restore the stack. */
1668 #ifdef HAVE_save_stack_nonlocal
1669 if (HAVE_save_stack_nonlocal)
1670 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1671 else
1672 #endif
1673 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1674 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1675
1676 OK_DEFER_POP;
1677
1678 /* Return the address of the result block. */
1679 result = copy_addr_to_reg (XEXP (result, 0));
1680 return convert_memory_address (ptr_mode, result);
1681 }
1682
1683 /* Perform an untyped return. */
1684
1685 static void
1686 expand_builtin_return (rtx result)
1687 {
1688 int size, align, regno;
1689 enum machine_mode mode;
1690 rtx reg;
1691 rtx call_fusage = 0;
1692
1693 result = convert_memory_address (Pmode, result);
1694
1695 apply_result_size ();
1696 result = gen_rtx_MEM (BLKmode, result);
1697
1698 #ifdef HAVE_untyped_return
1699 if (HAVE_untyped_return)
1700 {
1701 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1702 emit_barrier ();
1703 return;
1704 }
1705 #endif
1706
1707 /* Restore the return value and note that each value is used. */
1708 size = 0;
1709 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1710 if ((mode = apply_result_mode[regno]) != VOIDmode)
1711 {
1712 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1713 if (size % align != 0)
1714 size = CEIL (size, align) * align;
1715 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1716 emit_move_insn (reg, adjust_address (result, mode, size));
1717
1718 push_to_sequence (call_fusage);
1719 emit_use (reg);
1720 call_fusage = get_insns ();
1721 end_sequence ();
1722 size += GET_MODE_SIZE (mode);
1723 }
1724
1725 /* Put the USE insns before the return. */
1726 emit_insn (call_fusage);
1727
1728 /* Return whatever values was restored by jumping directly to the end
1729 of the function. */
1730 expand_naked_return ();
1731 }
1732
1733 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1734
1735 static enum type_class
1736 type_to_class (tree type)
1737 {
1738 switch (TREE_CODE (type))
1739 {
1740 case VOID_TYPE: return void_type_class;
1741 case INTEGER_TYPE: return integer_type_class;
1742 case ENUMERAL_TYPE: return enumeral_type_class;
1743 case BOOLEAN_TYPE: return boolean_type_class;
1744 case POINTER_TYPE: return pointer_type_class;
1745 case REFERENCE_TYPE: return reference_type_class;
1746 case OFFSET_TYPE: return offset_type_class;
1747 case REAL_TYPE: return real_type_class;
1748 case COMPLEX_TYPE: return complex_type_class;
1749 case FUNCTION_TYPE: return function_type_class;
1750 case METHOD_TYPE: return method_type_class;
1751 case RECORD_TYPE: return record_type_class;
1752 case UNION_TYPE:
1753 case QUAL_UNION_TYPE: return union_type_class;
1754 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1755 ? string_type_class : array_type_class);
1756 case LANG_TYPE: return lang_type_class;
1757 default: return no_type_class;
1758 }
1759 }
1760
1761 /* Expand a call EXP to __builtin_classify_type. */
1762
1763 static rtx
1764 expand_builtin_classify_type (tree exp)
1765 {
1766 if (call_expr_nargs (exp))
1767 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1768 return GEN_INT (no_type_class);
1769 }
1770
1771 /* This helper macro, meant to be used in mathfn_built_in below,
1772 determines which among a set of three builtin math functions is
1773 appropriate for a given type mode. The `F' and `L' cases are
1774 automatically generated from the `double' case. */
1775 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1776 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1777 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1778 fcodel = BUILT_IN_MATHFN##L ; break;
1779 /* Similar to above, but appends _R after any F/L suffix. */
1780 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1781 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1782 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1783 fcodel = BUILT_IN_MATHFN##L_R ; break;
1784
1785 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1786 if available. If IMPLICIT is true use the implicit builtin declaration,
1787 otherwise use the explicit declaration. If we can't do the conversion,
1788 return zero. */
1789
1790 static tree
1791 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1792 {
1793 enum built_in_function fcode, fcodef, fcodel, fcode2;
1794
1795 switch (fn)
1796 {
1797 CASE_MATHFN (BUILT_IN_ACOS)
1798 CASE_MATHFN (BUILT_IN_ACOSH)
1799 CASE_MATHFN (BUILT_IN_ASIN)
1800 CASE_MATHFN (BUILT_IN_ASINH)
1801 CASE_MATHFN (BUILT_IN_ATAN)
1802 CASE_MATHFN (BUILT_IN_ATAN2)
1803 CASE_MATHFN (BUILT_IN_ATANH)
1804 CASE_MATHFN (BUILT_IN_CBRT)
1805 CASE_MATHFN (BUILT_IN_CEIL)
1806 CASE_MATHFN (BUILT_IN_CEXPI)
1807 CASE_MATHFN (BUILT_IN_COPYSIGN)
1808 CASE_MATHFN (BUILT_IN_COS)
1809 CASE_MATHFN (BUILT_IN_COSH)
1810 CASE_MATHFN (BUILT_IN_DREM)
1811 CASE_MATHFN (BUILT_IN_ERF)
1812 CASE_MATHFN (BUILT_IN_ERFC)
1813 CASE_MATHFN (BUILT_IN_EXP)
1814 CASE_MATHFN (BUILT_IN_EXP10)
1815 CASE_MATHFN (BUILT_IN_EXP2)
1816 CASE_MATHFN (BUILT_IN_EXPM1)
1817 CASE_MATHFN (BUILT_IN_FABS)
1818 CASE_MATHFN (BUILT_IN_FDIM)
1819 CASE_MATHFN (BUILT_IN_FLOOR)
1820 CASE_MATHFN (BUILT_IN_FMA)
1821 CASE_MATHFN (BUILT_IN_FMAX)
1822 CASE_MATHFN (BUILT_IN_FMIN)
1823 CASE_MATHFN (BUILT_IN_FMOD)
1824 CASE_MATHFN (BUILT_IN_FREXP)
1825 CASE_MATHFN (BUILT_IN_GAMMA)
1826 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1827 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1828 CASE_MATHFN (BUILT_IN_HYPOT)
1829 CASE_MATHFN (BUILT_IN_ILOGB)
1830 CASE_MATHFN (BUILT_IN_ICEIL)
1831 CASE_MATHFN (BUILT_IN_IFLOOR)
1832 CASE_MATHFN (BUILT_IN_INF)
1833 CASE_MATHFN (BUILT_IN_IRINT)
1834 CASE_MATHFN (BUILT_IN_IROUND)
1835 CASE_MATHFN (BUILT_IN_ISINF)
1836 CASE_MATHFN (BUILT_IN_J0)
1837 CASE_MATHFN (BUILT_IN_J1)
1838 CASE_MATHFN (BUILT_IN_JN)
1839 CASE_MATHFN (BUILT_IN_LCEIL)
1840 CASE_MATHFN (BUILT_IN_LDEXP)
1841 CASE_MATHFN (BUILT_IN_LFLOOR)
1842 CASE_MATHFN (BUILT_IN_LGAMMA)
1843 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1844 CASE_MATHFN (BUILT_IN_LLCEIL)
1845 CASE_MATHFN (BUILT_IN_LLFLOOR)
1846 CASE_MATHFN (BUILT_IN_LLRINT)
1847 CASE_MATHFN (BUILT_IN_LLROUND)
1848 CASE_MATHFN (BUILT_IN_LOG)
1849 CASE_MATHFN (BUILT_IN_LOG10)
1850 CASE_MATHFN (BUILT_IN_LOG1P)
1851 CASE_MATHFN (BUILT_IN_LOG2)
1852 CASE_MATHFN (BUILT_IN_LOGB)
1853 CASE_MATHFN (BUILT_IN_LRINT)
1854 CASE_MATHFN (BUILT_IN_LROUND)
1855 CASE_MATHFN (BUILT_IN_MODF)
1856 CASE_MATHFN (BUILT_IN_NAN)
1857 CASE_MATHFN (BUILT_IN_NANS)
1858 CASE_MATHFN (BUILT_IN_NEARBYINT)
1859 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1860 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1861 CASE_MATHFN (BUILT_IN_POW)
1862 CASE_MATHFN (BUILT_IN_POWI)
1863 CASE_MATHFN (BUILT_IN_POW10)
1864 CASE_MATHFN (BUILT_IN_REMAINDER)
1865 CASE_MATHFN (BUILT_IN_REMQUO)
1866 CASE_MATHFN (BUILT_IN_RINT)
1867 CASE_MATHFN (BUILT_IN_ROUND)
1868 CASE_MATHFN (BUILT_IN_SCALB)
1869 CASE_MATHFN (BUILT_IN_SCALBLN)
1870 CASE_MATHFN (BUILT_IN_SCALBN)
1871 CASE_MATHFN (BUILT_IN_SIGNBIT)
1872 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1873 CASE_MATHFN (BUILT_IN_SIN)
1874 CASE_MATHFN (BUILT_IN_SINCOS)
1875 CASE_MATHFN (BUILT_IN_SINH)
1876 CASE_MATHFN (BUILT_IN_SQRT)
1877 CASE_MATHFN (BUILT_IN_TAN)
1878 CASE_MATHFN (BUILT_IN_TANH)
1879 CASE_MATHFN (BUILT_IN_TGAMMA)
1880 CASE_MATHFN (BUILT_IN_TRUNC)
1881 CASE_MATHFN (BUILT_IN_Y0)
1882 CASE_MATHFN (BUILT_IN_Y1)
1883 CASE_MATHFN (BUILT_IN_YN)
1884
1885 default:
1886 return NULL_TREE;
1887 }
1888
1889 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1890 fcode2 = fcode;
1891 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1892 fcode2 = fcodef;
1893 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1894 fcode2 = fcodel;
1895 else
1896 return NULL_TREE;
1897
1898 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1899 return NULL_TREE;
1900
1901 return builtin_decl_explicit (fcode2);
1902 }
1903
1904 /* Like mathfn_built_in_1(), but always use the implicit array. */
1905
1906 tree
1907 mathfn_built_in (tree type, enum built_in_function fn)
1908 {
1909 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1910 }
1911
1912 /* If errno must be maintained, expand the RTL to check if the result,
1913 TARGET, of a built-in function call, EXP, is NaN, and if so set
1914 errno to EDOM. */
1915
1916 static void
1917 expand_errno_check (tree exp, rtx target)
1918 {
1919 rtx lab = gen_label_rtx ();
1920
1921 /* Test the result; if it is NaN, set errno=EDOM because
1922 the argument was not in the domain. */
1923 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1924 NULL_RTX, NULL_RTX, lab,
1925 /* The jump is very likely. */
1926 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1927
1928 #ifdef TARGET_EDOM
1929 /* If this built-in doesn't throw an exception, set errno directly. */
1930 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1931 {
1932 #ifdef GEN_ERRNO_RTX
1933 rtx errno_rtx = GEN_ERRNO_RTX;
1934 #else
1935 rtx errno_rtx
1936 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1937 #endif
1938 emit_move_insn (errno_rtx,
1939 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1940 emit_label (lab);
1941 return;
1942 }
1943 #endif
1944
1945 /* Make sure the library call isn't expanded as a tail call. */
1946 CALL_EXPR_TAILCALL (exp) = 0;
1947
1948 /* We can't set errno=EDOM directly; let the library call do it.
1949 Pop the arguments right away in case the call gets deleted. */
1950 NO_DEFER_POP;
1951 expand_call (exp, target, 0);
1952 OK_DEFER_POP;
1953 emit_label (lab);
1954 }
1955
1956 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1957 Return NULL_RTX if a normal call should be emitted rather than expanding
1958 the function in-line. EXP is the expression that is a call to the builtin
1959 function; if convenient, the result should be placed in TARGET.
1960 SUBTARGET may be used as the target for computing one of EXP's operands. */
1961
1962 static rtx
1963 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1964 {
1965 optab builtin_optab;
1966 rtx op0, insns;
1967 tree fndecl = get_callee_fndecl (exp);
1968 enum machine_mode mode;
1969 bool errno_set = false;
1970 bool try_widening = false;
1971 tree arg;
1972
1973 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1974 return NULL_RTX;
1975
1976 arg = CALL_EXPR_ARG (exp, 0);
1977
1978 switch (DECL_FUNCTION_CODE (fndecl))
1979 {
1980 CASE_FLT_FN (BUILT_IN_SQRT):
1981 errno_set = ! tree_expr_nonnegative_p (arg);
1982 try_widening = true;
1983 builtin_optab = sqrt_optab;
1984 break;
1985 CASE_FLT_FN (BUILT_IN_EXP):
1986 errno_set = true; builtin_optab = exp_optab; break;
1987 CASE_FLT_FN (BUILT_IN_EXP10):
1988 CASE_FLT_FN (BUILT_IN_POW10):
1989 errno_set = true; builtin_optab = exp10_optab; break;
1990 CASE_FLT_FN (BUILT_IN_EXP2):
1991 errno_set = true; builtin_optab = exp2_optab; break;
1992 CASE_FLT_FN (BUILT_IN_EXPM1):
1993 errno_set = true; builtin_optab = expm1_optab; break;
1994 CASE_FLT_FN (BUILT_IN_LOGB):
1995 errno_set = true; builtin_optab = logb_optab; break;
1996 CASE_FLT_FN (BUILT_IN_LOG):
1997 errno_set = true; builtin_optab = log_optab; break;
1998 CASE_FLT_FN (BUILT_IN_LOG10):
1999 errno_set = true; builtin_optab = log10_optab; break;
2000 CASE_FLT_FN (BUILT_IN_LOG2):
2001 errno_set = true; builtin_optab = log2_optab; break;
2002 CASE_FLT_FN (BUILT_IN_LOG1P):
2003 errno_set = true; builtin_optab = log1p_optab; break;
2004 CASE_FLT_FN (BUILT_IN_ASIN):
2005 builtin_optab = asin_optab; break;
2006 CASE_FLT_FN (BUILT_IN_ACOS):
2007 builtin_optab = acos_optab; break;
2008 CASE_FLT_FN (BUILT_IN_TAN):
2009 builtin_optab = tan_optab; break;
2010 CASE_FLT_FN (BUILT_IN_ATAN):
2011 builtin_optab = atan_optab; break;
2012 CASE_FLT_FN (BUILT_IN_FLOOR):
2013 builtin_optab = floor_optab; break;
2014 CASE_FLT_FN (BUILT_IN_CEIL):
2015 builtin_optab = ceil_optab; break;
2016 CASE_FLT_FN (BUILT_IN_TRUNC):
2017 builtin_optab = btrunc_optab; break;
2018 CASE_FLT_FN (BUILT_IN_ROUND):
2019 builtin_optab = round_optab; break;
2020 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2021 builtin_optab = nearbyint_optab;
2022 if (flag_trapping_math)
2023 break;
2024 /* Else fallthrough and expand as rint. */
2025 CASE_FLT_FN (BUILT_IN_RINT):
2026 builtin_optab = rint_optab; break;
2027 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2028 builtin_optab = significand_optab; break;
2029 default:
2030 gcc_unreachable ();
2031 }
2032
2033 /* Make a suitable register to place result in. */
2034 mode = TYPE_MODE (TREE_TYPE (exp));
2035
2036 if (! flag_errno_math || ! HONOR_NANS (mode))
2037 errno_set = false;
2038
2039 /* Before working hard, check whether the instruction is available, but try
2040 to widen the mode for specific operations. */
2041 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2042 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2043 && (!errno_set || !optimize_insn_for_size_p ()))
2044 {
2045 rtx result = gen_reg_rtx (mode);
2046
2047 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2048 need to expand the argument again. This way, we will not perform
2049 side-effects more the once. */
2050 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2051
2052 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2053
2054 start_sequence ();
2055
2056 /* Compute into RESULT.
2057 Set RESULT to wherever the result comes back. */
2058 result = expand_unop (mode, builtin_optab, op0, result, 0);
2059
2060 if (result != 0)
2061 {
2062 if (errno_set)
2063 expand_errno_check (exp, result);
2064
2065 /* Output the entire sequence. */
2066 insns = get_insns ();
2067 end_sequence ();
2068 emit_insn (insns);
2069 return result;
2070 }
2071
2072 /* If we were unable to expand via the builtin, stop the sequence
2073 (without outputting the insns) and call to the library function
2074 with the stabilized argument list. */
2075 end_sequence ();
2076 }
2077
2078 return expand_call (exp, target, target == const0_rtx);
2079 }
2080
2081 /* Expand a call to the builtin binary math functions (pow and atan2).
2082 Return NULL_RTX if a normal call should be emitted rather than expanding the
2083 function in-line. EXP is the expression that is a call to the builtin
2084 function; if convenient, the result should be placed in TARGET.
2085 SUBTARGET may be used as the target for computing one of EXP's
2086 operands. */
2087
2088 static rtx
2089 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2090 {
2091 optab builtin_optab;
2092 rtx op0, op1, insns, result;
2093 int op1_type = REAL_TYPE;
2094 tree fndecl = get_callee_fndecl (exp);
2095 tree arg0, arg1;
2096 enum machine_mode mode;
2097 bool errno_set = true;
2098
2099 switch (DECL_FUNCTION_CODE (fndecl))
2100 {
2101 CASE_FLT_FN (BUILT_IN_SCALBN):
2102 CASE_FLT_FN (BUILT_IN_SCALBLN):
2103 CASE_FLT_FN (BUILT_IN_LDEXP):
2104 op1_type = INTEGER_TYPE;
2105 default:
2106 break;
2107 }
2108
2109 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2110 return NULL_RTX;
2111
2112 arg0 = CALL_EXPR_ARG (exp, 0);
2113 arg1 = CALL_EXPR_ARG (exp, 1);
2114
2115 switch (DECL_FUNCTION_CODE (fndecl))
2116 {
2117 CASE_FLT_FN (BUILT_IN_POW):
2118 builtin_optab = pow_optab; break;
2119 CASE_FLT_FN (BUILT_IN_ATAN2):
2120 builtin_optab = atan2_optab; break;
2121 CASE_FLT_FN (BUILT_IN_SCALB):
2122 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2123 return 0;
2124 builtin_optab = scalb_optab; break;
2125 CASE_FLT_FN (BUILT_IN_SCALBN):
2126 CASE_FLT_FN (BUILT_IN_SCALBLN):
2127 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2128 return 0;
2129 /* Fall through... */
2130 CASE_FLT_FN (BUILT_IN_LDEXP):
2131 builtin_optab = ldexp_optab; break;
2132 CASE_FLT_FN (BUILT_IN_FMOD):
2133 builtin_optab = fmod_optab; break;
2134 CASE_FLT_FN (BUILT_IN_REMAINDER):
2135 CASE_FLT_FN (BUILT_IN_DREM):
2136 builtin_optab = remainder_optab; break;
2137 default:
2138 gcc_unreachable ();
2139 }
2140
2141 /* Make a suitable register to place result in. */
2142 mode = TYPE_MODE (TREE_TYPE (exp));
2143
2144 /* Before working hard, check whether the instruction is available. */
2145 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2146 return NULL_RTX;
2147
2148 result = gen_reg_rtx (mode);
2149
2150 if (! flag_errno_math || ! HONOR_NANS (mode))
2151 errno_set = false;
2152
2153 if (errno_set && optimize_insn_for_size_p ())
2154 return 0;
2155
2156 /* Always stabilize the argument list. */
2157 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2158 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2159
2160 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2161 op1 = expand_normal (arg1);
2162
2163 start_sequence ();
2164
2165 /* Compute into RESULT.
2166 Set RESULT to wherever the result comes back. */
2167 result = expand_binop (mode, builtin_optab, op0, op1,
2168 result, 0, OPTAB_DIRECT);
2169
2170 /* If we were unable to expand via the builtin, stop the sequence
2171 (without outputting the insns) and call to the library function
2172 with the stabilized argument list. */
2173 if (result == 0)
2174 {
2175 end_sequence ();
2176 return expand_call (exp, target, target == const0_rtx);
2177 }
2178
2179 if (errno_set)
2180 expand_errno_check (exp, result);
2181
2182 /* Output the entire sequence. */
2183 insns = get_insns ();
2184 end_sequence ();
2185 emit_insn (insns);
2186
2187 return result;
2188 }
2189
2190 /* Expand a call to the builtin trinary math functions (fma).
2191 Return NULL_RTX if a normal call should be emitted rather than expanding the
2192 function in-line. EXP is the expression that is a call to the builtin
2193 function; if convenient, the result should be placed in TARGET.
2194 SUBTARGET may be used as the target for computing one of EXP's
2195 operands. */
2196
2197 static rtx
2198 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2199 {
2200 optab builtin_optab;
2201 rtx op0, op1, op2, insns, result;
2202 tree fndecl = get_callee_fndecl (exp);
2203 tree arg0, arg1, arg2;
2204 enum machine_mode mode;
2205
2206 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2207 return NULL_RTX;
2208
2209 arg0 = CALL_EXPR_ARG (exp, 0);
2210 arg1 = CALL_EXPR_ARG (exp, 1);
2211 arg2 = CALL_EXPR_ARG (exp, 2);
2212
2213 switch (DECL_FUNCTION_CODE (fndecl))
2214 {
2215 CASE_FLT_FN (BUILT_IN_FMA):
2216 builtin_optab = fma_optab; break;
2217 default:
2218 gcc_unreachable ();
2219 }
2220
2221 /* Make a suitable register to place result in. */
2222 mode = TYPE_MODE (TREE_TYPE (exp));
2223
2224 /* Before working hard, check whether the instruction is available. */
2225 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2226 return NULL_RTX;
2227
2228 result = gen_reg_rtx (mode);
2229
2230 /* Always stabilize the argument list. */
2231 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2232 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2233 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2234
2235 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2236 op1 = expand_normal (arg1);
2237 op2 = expand_normal (arg2);
2238
2239 start_sequence ();
2240
2241 /* Compute into RESULT.
2242 Set RESULT to wherever the result comes back. */
2243 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2244 result, 0);
2245
2246 /* If we were unable to expand via the builtin, stop the sequence
2247 (without outputting the insns) and call to the library function
2248 with the stabilized argument list. */
2249 if (result == 0)
2250 {
2251 end_sequence ();
2252 return expand_call (exp, target, target == const0_rtx);
2253 }
2254
2255 /* Output the entire sequence. */
2256 insns = get_insns ();
2257 end_sequence ();
2258 emit_insn (insns);
2259
2260 return result;
2261 }
2262
2263 /* Expand a call to the builtin sin and cos math functions.
2264 Return NULL_RTX if a normal call should be emitted rather than expanding the
2265 function in-line. EXP is the expression that is a call to the builtin
2266 function; if convenient, the result should be placed in TARGET.
2267 SUBTARGET may be used as the target for computing one of EXP's
2268 operands. */
2269
2270 static rtx
2271 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2272 {
2273 optab builtin_optab;
2274 rtx op0, insns;
2275 tree fndecl = get_callee_fndecl (exp);
2276 enum machine_mode mode;
2277 tree arg;
2278
2279 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2280 return NULL_RTX;
2281
2282 arg = CALL_EXPR_ARG (exp, 0);
2283
2284 switch (DECL_FUNCTION_CODE (fndecl))
2285 {
2286 CASE_FLT_FN (BUILT_IN_SIN):
2287 CASE_FLT_FN (BUILT_IN_COS):
2288 builtin_optab = sincos_optab; break;
2289 default:
2290 gcc_unreachable ();
2291 }
2292
2293 /* Make a suitable register to place result in. */
2294 mode = TYPE_MODE (TREE_TYPE (exp));
2295
2296 /* Check if sincos insn is available, otherwise fallback
2297 to sin or cos insn. */
2298 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2299 switch (DECL_FUNCTION_CODE (fndecl))
2300 {
2301 CASE_FLT_FN (BUILT_IN_SIN):
2302 builtin_optab = sin_optab; break;
2303 CASE_FLT_FN (BUILT_IN_COS):
2304 builtin_optab = cos_optab; break;
2305 default:
2306 gcc_unreachable ();
2307 }
2308
2309 /* Before working hard, check whether the instruction is available. */
2310 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2311 {
2312 rtx result = gen_reg_rtx (mode);
2313
2314 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2315 need to expand the argument again. This way, we will not perform
2316 side-effects more the once. */
2317 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2318
2319 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2320
2321 start_sequence ();
2322
2323 /* Compute into RESULT.
2324 Set RESULT to wherever the result comes back. */
2325 if (builtin_optab == sincos_optab)
2326 {
2327 int ok;
2328
2329 switch (DECL_FUNCTION_CODE (fndecl))
2330 {
2331 CASE_FLT_FN (BUILT_IN_SIN):
2332 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2333 break;
2334 CASE_FLT_FN (BUILT_IN_COS):
2335 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2336 break;
2337 default:
2338 gcc_unreachable ();
2339 }
2340 gcc_assert (ok);
2341 }
2342 else
2343 result = expand_unop (mode, builtin_optab, op0, result, 0);
2344
2345 if (result != 0)
2346 {
2347 /* Output the entire sequence. */
2348 insns = get_insns ();
2349 end_sequence ();
2350 emit_insn (insns);
2351 return result;
2352 }
2353
2354 /* If we were unable to expand via the builtin, stop the sequence
2355 (without outputting the insns) and call to the library function
2356 with the stabilized argument list. */
2357 end_sequence ();
2358 }
2359
2360 return expand_call (exp, target, target == const0_rtx);
2361 }
2362
2363 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2364 return an RTL instruction code that implements the functionality.
2365 If that isn't possible or available return CODE_FOR_nothing. */
2366
2367 static enum insn_code
2368 interclass_mathfn_icode (tree arg, tree fndecl)
2369 {
2370 bool errno_set = false;
2371 optab builtin_optab = unknown_optab;
2372 enum machine_mode mode;
2373
2374 switch (DECL_FUNCTION_CODE (fndecl))
2375 {
2376 CASE_FLT_FN (BUILT_IN_ILOGB):
2377 errno_set = true; builtin_optab = ilogb_optab; break;
2378 CASE_FLT_FN (BUILT_IN_ISINF):
2379 builtin_optab = isinf_optab; break;
2380 case BUILT_IN_ISNORMAL:
2381 case BUILT_IN_ISFINITE:
2382 CASE_FLT_FN (BUILT_IN_FINITE):
2383 case BUILT_IN_FINITED32:
2384 case BUILT_IN_FINITED64:
2385 case BUILT_IN_FINITED128:
2386 case BUILT_IN_ISINFD32:
2387 case BUILT_IN_ISINFD64:
2388 case BUILT_IN_ISINFD128:
2389 /* These builtins have no optabs (yet). */
2390 break;
2391 default:
2392 gcc_unreachable ();
2393 }
2394
2395 /* There's no easy way to detect the case we need to set EDOM. */
2396 if (flag_errno_math && errno_set)
2397 return CODE_FOR_nothing;
2398
2399 /* Optab mode depends on the mode of the input argument. */
2400 mode = TYPE_MODE (TREE_TYPE (arg));
2401
2402 if (builtin_optab)
2403 return optab_handler (builtin_optab, mode);
2404 return CODE_FOR_nothing;
2405 }
2406
2407 /* Expand a call to one of the builtin math functions that operate on
2408 floating point argument and output an integer result (ilogb, isinf,
2409 isnan, etc).
2410 Return 0 if a normal call should be emitted rather than expanding the
2411 function in-line. EXP is the expression that is a call to the builtin
2412 function; if convenient, the result should be placed in TARGET. */
2413
2414 static rtx
2415 expand_builtin_interclass_mathfn (tree exp, rtx target)
2416 {
2417 enum insn_code icode = CODE_FOR_nothing;
2418 rtx op0;
2419 tree fndecl = get_callee_fndecl (exp);
2420 enum machine_mode mode;
2421 tree arg;
2422
2423 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2424 return NULL_RTX;
2425
2426 arg = CALL_EXPR_ARG (exp, 0);
2427 icode = interclass_mathfn_icode (arg, fndecl);
2428 mode = TYPE_MODE (TREE_TYPE (arg));
2429
2430 if (icode != CODE_FOR_nothing)
2431 {
2432 struct expand_operand ops[1];
2433 rtx last = get_last_insn ();
2434 tree orig_arg = arg;
2435
2436 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2437 need to expand the argument again. This way, we will not perform
2438 side-effects more the once. */
2439 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2440
2441 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2442
2443 if (mode != GET_MODE (op0))
2444 op0 = convert_to_mode (mode, op0, 0);
2445
2446 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2447 if (maybe_legitimize_operands (icode, 0, 1, ops)
2448 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2449 return ops[0].value;
2450
2451 delete_insns_since (last);
2452 CALL_EXPR_ARG (exp, 0) = orig_arg;
2453 }
2454
2455 return NULL_RTX;
2456 }
2457
2458 /* Expand a call to the builtin sincos math function.
2459 Return NULL_RTX if a normal call should be emitted rather than expanding the
2460 function in-line. EXP is the expression that is a call to the builtin
2461 function. */
2462
2463 static rtx
2464 expand_builtin_sincos (tree exp)
2465 {
2466 rtx op0, op1, op2, target1, target2;
2467 enum machine_mode mode;
2468 tree arg, sinp, cosp;
2469 int result;
2470 location_t loc = EXPR_LOCATION (exp);
2471 tree alias_type, alias_off;
2472
2473 if (!validate_arglist (exp, REAL_TYPE,
2474 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2475 return NULL_RTX;
2476
2477 arg = CALL_EXPR_ARG (exp, 0);
2478 sinp = CALL_EXPR_ARG (exp, 1);
2479 cosp = CALL_EXPR_ARG (exp, 2);
2480
2481 /* Make a suitable register to place result in. */
2482 mode = TYPE_MODE (TREE_TYPE (arg));
2483
2484 /* Check if sincos insn is available, otherwise emit the call. */
2485 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2486 return NULL_RTX;
2487
2488 target1 = gen_reg_rtx (mode);
2489 target2 = gen_reg_rtx (mode);
2490
2491 op0 = expand_normal (arg);
2492 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2493 alias_off = build_int_cst (alias_type, 0);
2494 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2495 sinp, alias_off));
2496 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2497 cosp, alias_off));
2498
2499 /* Compute into target1 and target2.
2500 Set TARGET to wherever the result comes back. */
2501 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2502 gcc_assert (result);
2503
2504 /* Move target1 and target2 to the memory locations indicated
2505 by op1 and op2. */
2506 emit_move_insn (op1, target1);
2507 emit_move_insn (op2, target2);
2508
2509 return const0_rtx;
2510 }
2511
2512 /* Expand a call to the internal cexpi builtin to the sincos math function.
2513 EXP is the expression that is a call to the builtin function; if convenient,
2514 the result should be placed in TARGET. */
2515
2516 static rtx
2517 expand_builtin_cexpi (tree exp, rtx target)
2518 {
2519 tree fndecl = get_callee_fndecl (exp);
2520 tree arg, type;
2521 enum machine_mode mode;
2522 rtx op0, op1, op2;
2523 location_t loc = EXPR_LOCATION (exp);
2524
2525 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2526 return NULL_RTX;
2527
2528 arg = CALL_EXPR_ARG (exp, 0);
2529 type = TREE_TYPE (arg);
2530 mode = TYPE_MODE (TREE_TYPE (arg));
2531
2532 /* Try expanding via a sincos optab, fall back to emitting a libcall
2533 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2534 is only generated from sincos, cexp or if we have either of them. */
2535 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2536 {
2537 op1 = gen_reg_rtx (mode);
2538 op2 = gen_reg_rtx (mode);
2539
2540 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2541
2542 /* Compute into op1 and op2. */
2543 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2544 }
2545 else if (targetm.libc_has_function (function_sincos))
2546 {
2547 tree call, fn = NULL_TREE;
2548 tree top1, top2;
2549 rtx op1a, op2a;
2550
2551 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2552 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2553 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2554 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2555 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2556 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2557 else
2558 gcc_unreachable ();
2559
2560 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2561 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2562 op1a = copy_addr_to_reg (XEXP (op1, 0));
2563 op2a = copy_addr_to_reg (XEXP (op2, 0));
2564 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2565 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2566
2567 /* Make sure not to fold the sincos call again. */
2568 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2569 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2570 call, 3, arg, top1, top2));
2571 }
2572 else
2573 {
2574 tree call, fn = NULL_TREE, narg;
2575 tree ctype = build_complex_type (type);
2576
2577 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2578 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2579 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2580 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2581 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2582 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2583 else
2584 gcc_unreachable ();
2585
2586 /* If we don't have a decl for cexp create one. This is the
2587 friendliest fallback if the user calls __builtin_cexpi
2588 without full target C99 function support. */
2589 if (fn == NULL_TREE)
2590 {
2591 tree fntype;
2592 const char *name = NULL;
2593
2594 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2595 name = "cexpf";
2596 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2597 name = "cexp";
2598 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2599 name = "cexpl";
2600
2601 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2602 fn = build_fn_decl (name, fntype);
2603 }
2604
2605 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2606 build_real (type, dconst0), arg);
2607
2608 /* Make sure not to fold the cexp call again. */
2609 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2610 return expand_expr (build_call_nary (ctype, call, 1, narg),
2611 target, VOIDmode, EXPAND_NORMAL);
2612 }
2613
2614 /* Now build the proper return type. */
2615 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2616 make_tree (TREE_TYPE (arg), op2),
2617 make_tree (TREE_TYPE (arg), op1)),
2618 target, VOIDmode, EXPAND_NORMAL);
2619 }
2620
2621 /* Conveniently construct a function call expression. FNDECL names the
2622 function to be called, N is the number of arguments, and the "..."
2623 parameters are the argument expressions. Unlike build_call_exr
2624 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2625
2626 static tree
2627 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2628 {
2629 va_list ap;
2630 tree fntype = TREE_TYPE (fndecl);
2631 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2632
2633 va_start (ap, n);
2634 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2635 va_end (ap);
2636 SET_EXPR_LOCATION (fn, loc);
2637 return fn;
2638 }
2639
2640 /* Expand a call to one of the builtin rounding functions gcc defines
2641 as an extension (lfloor and lceil). As these are gcc extensions we
2642 do not need to worry about setting errno to EDOM.
2643 If expanding via optab fails, lower expression to (int)(floor(x)).
2644 EXP is the expression that is a call to the builtin function;
2645 if convenient, the result should be placed in TARGET. */
2646
2647 static rtx
2648 expand_builtin_int_roundingfn (tree exp, rtx target)
2649 {
2650 convert_optab builtin_optab;
2651 rtx op0, insns, tmp;
2652 tree fndecl = get_callee_fndecl (exp);
2653 enum built_in_function fallback_fn;
2654 tree fallback_fndecl;
2655 enum machine_mode mode;
2656 tree arg;
2657
2658 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2659 gcc_unreachable ();
2660
2661 arg = CALL_EXPR_ARG (exp, 0);
2662
2663 switch (DECL_FUNCTION_CODE (fndecl))
2664 {
2665 CASE_FLT_FN (BUILT_IN_ICEIL):
2666 CASE_FLT_FN (BUILT_IN_LCEIL):
2667 CASE_FLT_FN (BUILT_IN_LLCEIL):
2668 builtin_optab = lceil_optab;
2669 fallback_fn = BUILT_IN_CEIL;
2670 break;
2671
2672 CASE_FLT_FN (BUILT_IN_IFLOOR):
2673 CASE_FLT_FN (BUILT_IN_LFLOOR):
2674 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2675 builtin_optab = lfloor_optab;
2676 fallback_fn = BUILT_IN_FLOOR;
2677 break;
2678
2679 default:
2680 gcc_unreachable ();
2681 }
2682
2683 /* Make a suitable register to place result in. */
2684 mode = TYPE_MODE (TREE_TYPE (exp));
2685
2686 target = gen_reg_rtx (mode);
2687
2688 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2689 need to expand the argument again. This way, we will not perform
2690 side-effects more the once. */
2691 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2692
2693 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2694
2695 start_sequence ();
2696
2697 /* Compute into TARGET. */
2698 if (expand_sfix_optab (target, op0, builtin_optab))
2699 {
2700 /* Output the entire sequence. */
2701 insns = get_insns ();
2702 end_sequence ();
2703 emit_insn (insns);
2704 return target;
2705 }
2706
2707 /* If we were unable to expand via the builtin, stop the sequence
2708 (without outputting the insns). */
2709 end_sequence ();
2710
2711 /* Fall back to floating point rounding optab. */
2712 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2713
2714 /* For non-C99 targets we may end up without a fallback fndecl here
2715 if the user called __builtin_lfloor directly. In this case emit
2716 a call to the floor/ceil variants nevertheless. This should result
2717 in the best user experience for not full C99 targets. */
2718 if (fallback_fndecl == NULL_TREE)
2719 {
2720 tree fntype;
2721 const char *name = NULL;
2722
2723 switch (DECL_FUNCTION_CODE (fndecl))
2724 {
2725 case BUILT_IN_ICEIL:
2726 case BUILT_IN_LCEIL:
2727 case BUILT_IN_LLCEIL:
2728 name = "ceil";
2729 break;
2730 case BUILT_IN_ICEILF:
2731 case BUILT_IN_LCEILF:
2732 case BUILT_IN_LLCEILF:
2733 name = "ceilf";
2734 break;
2735 case BUILT_IN_ICEILL:
2736 case BUILT_IN_LCEILL:
2737 case BUILT_IN_LLCEILL:
2738 name = "ceill";
2739 break;
2740 case BUILT_IN_IFLOOR:
2741 case BUILT_IN_LFLOOR:
2742 case BUILT_IN_LLFLOOR:
2743 name = "floor";
2744 break;
2745 case BUILT_IN_IFLOORF:
2746 case BUILT_IN_LFLOORF:
2747 case BUILT_IN_LLFLOORF:
2748 name = "floorf";
2749 break;
2750 case BUILT_IN_IFLOORL:
2751 case BUILT_IN_LFLOORL:
2752 case BUILT_IN_LLFLOORL:
2753 name = "floorl";
2754 break;
2755 default:
2756 gcc_unreachable ();
2757 }
2758
2759 fntype = build_function_type_list (TREE_TYPE (arg),
2760 TREE_TYPE (arg), NULL_TREE);
2761 fallback_fndecl = build_fn_decl (name, fntype);
2762 }
2763
2764 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2765
2766 tmp = expand_normal (exp);
2767 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2768
2769 /* Truncate the result of floating point optab to integer
2770 via expand_fix (). */
2771 target = gen_reg_rtx (mode);
2772 expand_fix (target, tmp, 0);
2773
2774 return target;
2775 }
2776
2777 /* Expand a call to one of the builtin math functions doing integer
2778 conversion (lrint).
2779 Return 0 if a normal call should be emitted rather than expanding the
2780 function in-line. EXP is the expression that is a call to the builtin
2781 function; if convenient, the result should be placed in TARGET. */
2782
2783 static rtx
2784 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2785 {
2786 convert_optab builtin_optab;
2787 rtx op0, insns;
2788 tree fndecl = get_callee_fndecl (exp);
2789 tree arg;
2790 enum machine_mode mode;
2791 enum built_in_function fallback_fn = BUILT_IN_NONE;
2792
2793 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2794 gcc_unreachable ();
2795
2796 arg = CALL_EXPR_ARG (exp, 0);
2797
2798 switch (DECL_FUNCTION_CODE (fndecl))
2799 {
2800 CASE_FLT_FN (BUILT_IN_IRINT):
2801 fallback_fn = BUILT_IN_LRINT;
2802 /* FALLTHRU */
2803 CASE_FLT_FN (BUILT_IN_LRINT):
2804 CASE_FLT_FN (BUILT_IN_LLRINT):
2805 builtin_optab = lrint_optab;
2806 break;
2807
2808 CASE_FLT_FN (BUILT_IN_IROUND):
2809 fallback_fn = BUILT_IN_LROUND;
2810 /* FALLTHRU */
2811 CASE_FLT_FN (BUILT_IN_LROUND):
2812 CASE_FLT_FN (BUILT_IN_LLROUND):
2813 builtin_optab = lround_optab;
2814 break;
2815
2816 default:
2817 gcc_unreachable ();
2818 }
2819
2820 /* There's no easy way to detect the case we need to set EDOM. */
2821 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2822 return NULL_RTX;
2823
2824 /* Make a suitable register to place result in. */
2825 mode = TYPE_MODE (TREE_TYPE (exp));
2826
2827 /* There's no easy way to detect the case we need to set EDOM. */
2828 if (!flag_errno_math)
2829 {
2830 rtx result = gen_reg_rtx (mode);
2831
2832 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2833 need to expand the argument again. This way, we will not perform
2834 side-effects more the once. */
2835 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2836
2837 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2838
2839 start_sequence ();
2840
2841 if (expand_sfix_optab (result, op0, builtin_optab))
2842 {
2843 /* Output the entire sequence. */
2844 insns = get_insns ();
2845 end_sequence ();
2846 emit_insn (insns);
2847 return result;
2848 }
2849
2850 /* If we were unable to expand via the builtin, stop the sequence
2851 (without outputting the insns) and call to the library function
2852 with the stabilized argument list. */
2853 end_sequence ();
2854 }
2855
2856 if (fallback_fn != BUILT_IN_NONE)
2857 {
2858 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2859 targets, (int) round (x) should never be transformed into
2860 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2861 a call to lround in the hope that the target provides at least some
2862 C99 functions. This should result in the best user experience for
2863 not full C99 targets. */
2864 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2865 fallback_fn, 0);
2866
2867 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2868 fallback_fndecl, 1, arg);
2869
2870 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2871 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2872 return convert_to_mode (mode, target, 0);
2873 }
2874
2875 return expand_call (exp, target, target == const0_rtx);
2876 }
2877
2878 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2879 a normal call should be emitted rather than expanding the function
2880 in-line. EXP is the expression that is a call to the builtin
2881 function; if convenient, the result should be placed in TARGET. */
2882
2883 static rtx
2884 expand_builtin_powi (tree exp, rtx target)
2885 {
2886 tree arg0, arg1;
2887 rtx op0, op1;
2888 enum machine_mode mode;
2889 enum machine_mode mode2;
2890
2891 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2892 return NULL_RTX;
2893
2894 arg0 = CALL_EXPR_ARG (exp, 0);
2895 arg1 = CALL_EXPR_ARG (exp, 1);
2896 mode = TYPE_MODE (TREE_TYPE (exp));
2897
2898 /* Emit a libcall to libgcc. */
2899
2900 /* Mode of the 2nd argument must match that of an int. */
2901 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2902
2903 if (target == NULL_RTX)
2904 target = gen_reg_rtx (mode);
2905
2906 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2907 if (GET_MODE (op0) != mode)
2908 op0 = convert_to_mode (mode, op0, 0);
2909 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2910 if (GET_MODE (op1) != mode2)
2911 op1 = convert_to_mode (mode2, op1, 0);
2912
2913 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2914 target, LCT_CONST, mode, 2,
2915 op0, mode, op1, mode2);
2916
2917 return target;
2918 }
2919
2920 /* Expand expression EXP which is a call to the strlen builtin. Return
2921 NULL_RTX if we failed the caller should emit a normal call, otherwise
2922 try to get the result in TARGET, if convenient. */
2923
2924 static rtx
2925 expand_builtin_strlen (tree exp, rtx target,
2926 enum machine_mode target_mode)
2927 {
2928 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2929 return NULL_RTX;
2930 else
2931 {
2932 struct expand_operand ops[4];
2933 rtx pat;
2934 tree len;
2935 tree src = CALL_EXPR_ARG (exp, 0);
2936 rtx src_reg, before_strlen;
2937 enum machine_mode insn_mode = target_mode;
2938 enum insn_code icode = CODE_FOR_nothing;
2939 unsigned int align;
2940
2941 /* If the length can be computed at compile-time, return it. */
2942 len = c_strlen (src, 0);
2943 if (len)
2944 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2945
2946 /* If the length can be computed at compile-time and is constant
2947 integer, but there are side-effects in src, evaluate
2948 src for side-effects, then return len.
2949 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2950 can be optimized into: i++; x = 3; */
2951 len = c_strlen (src, 1);
2952 if (len && TREE_CODE (len) == INTEGER_CST)
2953 {
2954 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2955 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2956 }
2957
2958 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2959
2960 /* If SRC is not a pointer type, don't do this operation inline. */
2961 if (align == 0)
2962 return NULL_RTX;
2963
2964 /* Bail out if we can't compute strlen in the right mode. */
2965 while (insn_mode != VOIDmode)
2966 {
2967 icode = optab_handler (strlen_optab, insn_mode);
2968 if (icode != CODE_FOR_nothing)
2969 break;
2970
2971 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2972 }
2973 if (insn_mode == VOIDmode)
2974 return NULL_RTX;
2975
2976 /* Make a place to hold the source address. We will not expand
2977 the actual source until we are sure that the expansion will
2978 not fail -- there are trees that cannot be expanded twice. */
2979 src_reg = gen_reg_rtx (Pmode);
2980
2981 /* Mark the beginning of the strlen sequence so we can emit the
2982 source operand later. */
2983 before_strlen = get_last_insn ();
2984
2985 create_output_operand (&ops[0], target, insn_mode);
2986 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2987 create_integer_operand (&ops[2], 0);
2988 create_integer_operand (&ops[3], align);
2989 if (!maybe_expand_insn (icode, 4, ops))
2990 return NULL_RTX;
2991
2992 /* Now that we are assured of success, expand the source. */
2993 start_sequence ();
2994 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2995 if (pat != src_reg)
2996 {
2997 #ifdef POINTERS_EXTEND_UNSIGNED
2998 if (GET_MODE (pat) != Pmode)
2999 pat = convert_to_mode (Pmode, pat,
3000 POINTERS_EXTEND_UNSIGNED);
3001 #endif
3002 emit_move_insn (src_reg, pat);
3003 }
3004 pat = get_insns ();
3005 end_sequence ();
3006
3007 if (before_strlen)
3008 emit_insn_after (pat, before_strlen);
3009 else
3010 emit_insn_before (pat, get_insns ());
3011
3012 /* Return the value in the proper mode for this function. */
3013 if (GET_MODE (ops[0].value) == target_mode)
3014 target = ops[0].value;
3015 else if (target != 0)
3016 convert_move (target, ops[0].value, 0);
3017 else
3018 target = convert_to_mode (target_mode, ops[0].value, 0);
3019
3020 return target;
3021 }
3022 }
3023
3024 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3025 bytes from constant string DATA + OFFSET and return it as target
3026 constant. */
3027
3028 static rtx
3029 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3030 enum machine_mode mode)
3031 {
3032 const char *str = (const char *) data;
3033
3034 gcc_assert (offset >= 0
3035 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3036 <= strlen (str) + 1));
3037
3038 return c_readstr (str + offset, mode);
3039 }
3040
3041 /* Expand a call EXP to the memcpy builtin.
3042 Return NULL_RTX if we failed, the caller should emit a normal call,
3043 otherwise try to get the result in TARGET, if convenient (and in
3044 mode MODE if that's convenient). */
3045
3046 static rtx
3047 expand_builtin_memcpy (tree exp, rtx target)
3048 {
3049 if (!validate_arglist (exp,
3050 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3051 return NULL_RTX;
3052 else
3053 {
3054 tree dest = CALL_EXPR_ARG (exp, 0);
3055 tree src = CALL_EXPR_ARG (exp, 1);
3056 tree len = CALL_EXPR_ARG (exp, 2);
3057 const char *src_str;
3058 unsigned int src_align = get_pointer_alignment (src);
3059 unsigned int dest_align = get_pointer_alignment (dest);
3060 rtx dest_mem, src_mem, dest_addr, len_rtx;
3061 HOST_WIDE_INT expected_size = -1;
3062 unsigned int expected_align = 0;
3063
3064 /* If DEST is not a pointer type, call the normal function. */
3065 if (dest_align == 0)
3066 return NULL_RTX;
3067
3068 /* If either SRC is not a pointer type, don't do this
3069 operation in-line. */
3070 if (src_align == 0)
3071 return NULL_RTX;
3072
3073 if (currently_expanding_gimple_stmt)
3074 stringop_block_profile (currently_expanding_gimple_stmt,
3075 &expected_align, &expected_size);
3076
3077 if (expected_align < dest_align)
3078 expected_align = dest_align;
3079 dest_mem = get_memory_rtx (dest, len);
3080 set_mem_align (dest_mem, dest_align);
3081 len_rtx = expand_normal (len);
3082 src_str = c_getstr (src);
3083
3084 /* If SRC is a string constant and block move would be done
3085 by pieces, we can avoid loading the string from memory
3086 and only stored the computed constants. */
3087 if (src_str
3088 && CONST_INT_P (len_rtx)
3089 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3090 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3091 CONST_CAST (char *, src_str),
3092 dest_align, false))
3093 {
3094 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3095 builtin_memcpy_read_str,
3096 CONST_CAST (char *, src_str),
3097 dest_align, false, 0);
3098 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3099 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3100 return dest_mem;
3101 }
3102
3103 src_mem = get_memory_rtx (src, len);
3104 set_mem_align (src_mem, src_align);
3105
3106 /* Copy word part most expediently. */
3107 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3108 CALL_EXPR_TAILCALL (exp)
3109 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3110 expected_align, expected_size);
3111
3112 if (dest_addr == 0)
3113 {
3114 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3115 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3116 }
3117 return dest_addr;
3118 }
3119 }
3120
3121 /* Expand a call EXP to the mempcpy builtin.
3122 Return NULL_RTX if we failed; the caller should emit a normal call,
3123 otherwise try to get the result in TARGET, if convenient (and in
3124 mode MODE if that's convenient). If ENDP is 0 return the
3125 destination pointer, if ENDP is 1 return the end pointer ala
3126 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3127 stpcpy. */
3128
3129 static rtx
3130 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3131 {
3132 if (!validate_arglist (exp,
3133 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3134 return NULL_RTX;
3135 else
3136 {
3137 tree dest = CALL_EXPR_ARG (exp, 0);
3138 tree src = CALL_EXPR_ARG (exp, 1);
3139 tree len = CALL_EXPR_ARG (exp, 2);
3140 return expand_builtin_mempcpy_args (dest, src, len,
3141 target, mode, /*endp=*/ 1);
3142 }
3143 }
3144
3145 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3146 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3147 so that this can also be called without constructing an actual CALL_EXPR.
3148 The other arguments and return value are the same as for
3149 expand_builtin_mempcpy. */
3150
3151 static rtx
3152 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3153 rtx target, enum machine_mode mode, int endp)
3154 {
3155 /* If return value is ignored, transform mempcpy into memcpy. */
3156 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3157 {
3158 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3159 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3160 dest, src, len);
3161 return expand_expr (result, target, mode, EXPAND_NORMAL);
3162 }
3163 else
3164 {
3165 const char *src_str;
3166 unsigned int src_align = get_pointer_alignment (src);
3167 unsigned int dest_align = get_pointer_alignment (dest);
3168 rtx dest_mem, src_mem, len_rtx;
3169
3170 /* If either SRC or DEST is not a pointer type, don't do this
3171 operation in-line. */
3172 if (dest_align == 0 || src_align == 0)
3173 return NULL_RTX;
3174
3175 /* If LEN is not constant, call the normal function. */
3176 if (! host_integerp (len, 1))
3177 return NULL_RTX;
3178
3179 len_rtx = expand_normal (len);
3180 src_str = c_getstr (src);
3181
3182 /* If SRC is a string constant and block move would be done
3183 by pieces, we can avoid loading the string from memory
3184 and only stored the computed constants. */
3185 if (src_str
3186 && CONST_INT_P (len_rtx)
3187 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3188 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3189 CONST_CAST (char *, src_str),
3190 dest_align, false))
3191 {
3192 dest_mem = get_memory_rtx (dest, len);
3193 set_mem_align (dest_mem, dest_align);
3194 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3195 builtin_memcpy_read_str,
3196 CONST_CAST (char *, src_str),
3197 dest_align, false, endp);
3198 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3199 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3200 return dest_mem;
3201 }
3202
3203 if (CONST_INT_P (len_rtx)
3204 && can_move_by_pieces (INTVAL (len_rtx),
3205 MIN (dest_align, src_align)))
3206 {
3207 dest_mem = get_memory_rtx (dest, len);
3208 set_mem_align (dest_mem, dest_align);
3209 src_mem = get_memory_rtx (src, len);
3210 set_mem_align (src_mem, src_align);
3211 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3212 MIN (dest_align, src_align), endp);
3213 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3214 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3215 return dest_mem;
3216 }
3217
3218 return NULL_RTX;
3219 }
3220 }
3221
3222 #ifndef HAVE_movstr
3223 # define HAVE_movstr 0
3224 # define CODE_FOR_movstr CODE_FOR_nothing
3225 #endif
3226
3227 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3228 we failed, the caller should emit a normal call, otherwise try to
3229 get the result in TARGET, if convenient. If ENDP is 0 return the
3230 destination pointer, if ENDP is 1 return the end pointer ala
3231 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3232 stpcpy. */
3233
3234 static rtx
3235 expand_movstr (tree dest, tree src, rtx target, int endp)
3236 {
3237 struct expand_operand ops[3];
3238 rtx dest_mem;
3239 rtx src_mem;
3240
3241 if (!HAVE_movstr)
3242 return NULL_RTX;
3243
3244 dest_mem = get_memory_rtx (dest, NULL);
3245 src_mem = get_memory_rtx (src, NULL);
3246 if (!endp)
3247 {
3248 target = force_reg (Pmode, XEXP (dest_mem, 0));
3249 dest_mem = replace_equiv_address (dest_mem, target);
3250 }
3251
3252 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3253 create_fixed_operand (&ops[1], dest_mem);
3254 create_fixed_operand (&ops[2], src_mem);
3255 expand_insn (CODE_FOR_movstr, 3, ops);
3256
3257 if (endp && target != const0_rtx)
3258 {
3259 target = ops[0].value;
3260 /* movstr is supposed to set end to the address of the NUL
3261 terminator. If the caller requested a mempcpy-like return value,
3262 adjust it. */
3263 if (endp == 1)
3264 {
3265 rtx tem = plus_constant (GET_MODE (target),
3266 gen_lowpart (GET_MODE (target), target), 1);
3267 emit_move_insn (target, force_operand (tem, NULL_RTX));
3268 }
3269 }
3270 return target;
3271 }
3272
3273 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3274 NULL_RTX if we failed the caller should emit a normal call, otherwise
3275 try to get the result in TARGET, if convenient (and in mode MODE if that's
3276 convenient). */
3277
3278 static rtx
3279 expand_builtin_strcpy (tree exp, rtx target)
3280 {
3281 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3282 {
3283 tree dest = CALL_EXPR_ARG (exp, 0);
3284 tree src = CALL_EXPR_ARG (exp, 1);
3285 return expand_builtin_strcpy_args (dest, src, target);
3286 }
3287 return NULL_RTX;
3288 }
3289
3290 /* Helper function to do the actual work for expand_builtin_strcpy. The
3291 arguments to the builtin_strcpy call DEST and SRC are broken out
3292 so that this can also be called without constructing an actual CALL_EXPR.
3293 The other arguments and return value are the same as for
3294 expand_builtin_strcpy. */
3295
3296 static rtx
3297 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3298 {
3299 return expand_movstr (dest, src, target, /*endp=*/0);
3300 }
3301
3302 /* Expand a call EXP to the stpcpy builtin.
3303 Return NULL_RTX if we failed the caller should emit a normal call,
3304 otherwise try to get the result in TARGET, if convenient (and in
3305 mode MODE if that's convenient). */
3306
3307 static rtx
3308 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3309 {
3310 tree dst, src;
3311 location_t loc = EXPR_LOCATION (exp);
3312
3313 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3314 return NULL_RTX;
3315
3316 dst = CALL_EXPR_ARG (exp, 0);
3317 src = CALL_EXPR_ARG (exp, 1);
3318
3319 /* If return value is ignored, transform stpcpy into strcpy. */
3320 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3321 {
3322 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3323 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3324 return expand_expr (result, target, mode, EXPAND_NORMAL);
3325 }
3326 else
3327 {
3328 tree len, lenp1;
3329 rtx ret;
3330
3331 /* Ensure we get an actual string whose length can be evaluated at
3332 compile-time, not an expression containing a string. This is
3333 because the latter will potentially produce pessimized code
3334 when used to produce the return value. */
3335 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3336 return expand_movstr (dst, src, target, /*endp=*/2);
3337
3338 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3339 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3340 target, mode, /*endp=*/2);
3341
3342 if (ret)
3343 return ret;
3344
3345 if (TREE_CODE (len) == INTEGER_CST)
3346 {
3347 rtx len_rtx = expand_normal (len);
3348
3349 if (CONST_INT_P (len_rtx))
3350 {
3351 ret = expand_builtin_strcpy_args (dst, src, target);
3352
3353 if (ret)
3354 {
3355 if (! target)
3356 {
3357 if (mode != VOIDmode)
3358 target = gen_reg_rtx (mode);
3359 else
3360 target = gen_reg_rtx (GET_MODE (ret));
3361 }
3362 if (GET_MODE (target) != GET_MODE (ret))
3363 ret = gen_lowpart (GET_MODE (target), ret);
3364
3365 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3366 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3367 gcc_assert (ret);
3368
3369 return target;
3370 }
3371 }
3372 }
3373
3374 return expand_movstr (dst, src, target, /*endp=*/2);
3375 }
3376 }
3377
3378 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3379 bytes from constant string DATA + OFFSET and return it as target
3380 constant. */
3381
3382 rtx
3383 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3384 enum machine_mode mode)
3385 {
3386 const char *str = (const char *) data;
3387
3388 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3389 return const0_rtx;
3390
3391 return c_readstr (str + offset, mode);
3392 }
3393
3394 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3395 NULL_RTX if we failed the caller should emit a normal call. */
3396
3397 static rtx
3398 expand_builtin_strncpy (tree exp, rtx target)
3399 {
3400 location_t loc = EXPR_LOCATION (exp);
3401
3402 if (validate_arglist (exp,
3403 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3404 {
3405 tree dest = CALL_EXPR_ARG (exp, 0);
3406 tree src = CALL_EXPR_ARG (exp, 1);
3407 tree len = CALL_EXPR_ARG (exp, 2);
3408 tree slen = c_strlen (src, 1);
3409
3410 /* We must be passed a constant len and src parameter. */
3411 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3412 return NULL_RTX;
3413
3414 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3415
3416 /* We're required to pad with trailing zeros if the requested
3417 len is greater than strlen(s2)+1. In that case try to
3418 use store_by_pieces, if it fails, punt. */
3419 if (tree_int_cst_lt (slen, len))
3420 {
3421 unsigned int dest_align = get_pointer_alignment (dest);
3422 const char *p = c_getstr (src);
3423 rtx dest_mem;
3424
3425 if (!p || dest_align == 0 || !host_integerp (len, 1)
3426 || !can_store_by_pieces (tree_low_cst (len, 1),
3427 builtin_strncpy_read_str,
3428 CONST_CAST (char *, p),
3429 dest_align, false))
3430 return NULL_RTX;
3431
3432 dest_mem = get_memory_rtx (dest, len);
3433 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3434 builtin_strncpy_read_str,
3435 CONST_CAST (char *, p), dest_align, false, 0);
3436 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3437 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3438 return dest_mem;
3439 }
3440 }
3441 return NULL_RTX;
3442 }
3443
3444 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3445 bytes from constant string DATA + OFFSET and return it as target
3446 constant. */
3447
3448 rtx
3449 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3450 enum machine_mode mode)
3451 {
3452 const char *c = (const char *) data;
3453 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3454
3455 memset (p, *c, GET_MODE_SIZE (mode));
3456
3457 return c_readstr (p, mode);
3458 }
3459
3460 /* Callback routine for store_by_pieces. Return the RTL of a register
3461 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3462 char value given in the RTL register data. For example, if mode is
3463 4 bytes wide, return the RTL for 0x01010101*data. */
3464
3465 static rtx
3466 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3467 enum machine_mode mode)
3468 {
3469 rtx target, coeff;
3470 size_t size;
3471 char *p;
3472
3473 size = GET_MODE_SIZE (mode);
3474 if (size == 1)
3475 return (rtx) data;
3476
3477 p = XALLOCAVEC (char, size);
3478 memset (p, 1, size);
3479 coeff = c_readstr (p, mode);
3480
3481 target = convert_to_mode (mode, (rtx) data, 1);
3482 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3483 return force_reg (mode, target);
3484 }
3485
3486 /* Expand expression EXP, which is a call to the memset builtin. Return
3487 NULL_RTX if we failed the caller should emit a normal call, otherwise
3488 try to get the result in TARGET, if convenient (and in mode MODE if that's
3489 convenient). */
3490
3491 static rtx
3492 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3493 {
3494 if (!validate_arglist (exp,
3495 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3496 return NULL_RTX;
3497 else
3498 {
3499 tree dest = CALL_EXPR_ARG (exp, 0);
3500 tree val = CALL_EXPR_ARG (exp, 1);
3501 tree len = CALL_EXPR_ARG (exp, 2);
3502 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3503 }
3504 }
3505
3506 /* Helper function to do the actual work for expand_builtin_memset. The
3507 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3508 so that this can also be called without constructing an actual CALL_EXPR.
3509 The other arguments and return value are the same as for
3510 expand_builtin_memset. */
3511
3512 static rtx
3513 expand_builtin_memset_args (tree dest, tree val, tree len,
3514 rtx target, enum machine_mode mode, tree orig_exp)
3515 {
3516 tree fndecl, fn;
3517 enum built_in_function fcode;
3518 enum machine_mode val_mode;
3519 char c;
3520 unsigned int dest_align;
3521 rtx dest_mem, dest_addr, len_rtx;
3522 HOST_WIDE_INT expected_size = -1;
3523 unsigned int expected_align = 0;
3524
3525 dest_align = get_pointer_alignment (dest);
3526
3527 /* If DEST is not a pointer type, don't do this operation in-line. */
3528 if (dest_align == 0)
3529 return NULL_RTX;
3530
3531 if (currently_expanding_gimple_stmt)
3532 stringop_block_profile (currently_expanding_gimple_stmt,
3533 &expected_align, &expected_size);
3534
3535 if (expected_align < dest_align)
3536 expected_align = dest_align;
3537
3538 /* If the LEN parameter is zero, return DEST. */
3539 if (integer_zerop (len))
3540 {
3541 /* Evaluate and ignore VAL in case it has side-effects. */
3542 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3543 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3544 }
3545
3546 /* Stabilize the arguments in case we fail. */
3547 dest = builtin_save_expr (dest);
3548 val = builtin_save_expr (val);
3549 len = builtin_save_expr (len);
3550
3551 len_rtx = expand_normal (len);
3552 dest_mem = get_memory_rtx (dest, len);
3553 val_mode = TYPE_MODE (unsigned_char_type_node);
3554
3555 if (TREE_CODE (val) != INTEGER_CST)
3556 {
3557 rtx val_rtx;
3558
3559 val_rtx = expand_normal (val);
3560 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3561
3562 /* Assume that we can memset by pieces if we can store
3563 * the coefficients by pieces (in the required modes).
3564 * We can't pass builtin_memset_gen_str as that emits RTL. */
3565 c = 1;
3566 if (host_integerp (len, 1)
3567 && can_store_by_pieces (tree_low_cst (len, 1),
3568 builtin_memset_read_str, &c, dest_align,
3569 true))
3570 {
3571 val_rtx = force_reg (val_mode, val_rtx);
3572 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3573 builtin_memset_gen_str, val_rtx, dest_align,
3574 true, 0);
3575 }
3576 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3577 dest_align, expected_align,
3578 expected_size))
3579 goto do_libcall;
3580
3581 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3582 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3583 return dest_mem;
3584 }
3585
3586 if (target_char_cast (val, &c))
3587 goto do_libcall;
3588
3589 if (c)
3590 {
3591 if (host_integerp (len, 1)
3592 && can_store_by_pieces (tree_low_cst (len, 1),
3593 builtin_memset_read_str, &c, dest_align,
3594 true))
3595 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3596 builtin_memset_read_str, &c, dest_align, true, 0);
3597 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3598 gen_int_mode (c, val_mode),
3599 dest_align, expected_align,
3600 expected_size))
3601 goto do_libcall;
3602
3603 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3604 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3605 return dest_mem;
3606 }
3607
3608 set_mem_align (dest_mem, dest_align);
3609 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3610 CALL_EXPR_TAILCALL (orig_exp)
3611 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3612 expected_align, expected_size);
3613
3614 if (dest_addr == 0)
3615 {
3616 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3617 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3618 }
3619
3620 return dest_addr;
3621
3622 do_libcall:
3623 fndecl = get_callee_fndecl (orig_exp);
3624 fcode = DECL_FUNCTION_CODE (fndecl);
3625 if (fcode == BUILT_IN_MEMSET)
3626 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3627 dest, val, len);
3628 else if (fcode == BUILT_IN_BZERO)
3629 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3630 dest, len);
3631 else
3632 gcc_unreachable ();
3633 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3634 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3635 return expand_call (fn, target, target == const0_rtx);
3636 }
3637
3638 /* Expand expression EXP, which is a call to the bzero builtin. Return
3639 NULL_RTX if we failed the caller should emit a normal call. */
3640
3641 static rtx
3642 expand_builtin_bzero (tree exp)
3643 {
3644 tree dest, size;
3645 location_t loc = EXPR_LOCATION (exp);
3646
3647 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3648 return NULL_RTX;
3649
3650 dest = CALL_EXPR_ARG (exp, 0);
3651 size = CALL_EXPR_ARG (exp, 1);
3652
3653 /* New argument list transforming bzero(ptr x, int y) to
3654 memset(ptr x, int 0, size_t y). This is done this way
3655 so that if it isn't expanded inline, we fallback to
3656 calling bzero instead of memset. */
3657
3658 return expand_builtin_memset_args (dest, integer_zero_node,
3659 fold_convert_loc (loc,
3660 size_type_node, size),
3661 const0_rtx, VOIDmode, exp);
3662 }
3663
3664 /* Expand expression EXP, which is a call to the memcmp built-in function.
3665 Return NULL_RTX if we failed and the caller should emit a normal call,
3666 otherwise try to get the result in TARGET, if convenient (and in mode
3667 MODE, if that's convenient). */
3668
3669 static rtx
3670 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3671 ATTRIBUTE_UNUSED enum machine_mode mode)
3672 {
3673 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3674
3675 if (!validate_arglist (exp,
3676 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3677 return NULL_RTX;
3678
3679 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3680 implementing memcmp because it will stop if it encounters two
3681 zero bytes. */
3682 #if defined HAVE_cmpmemsi
3683 {
3684 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3685 rtx result;
3686 rtx insn;
3687 tree arg1 = CALL_EXPR_ARG (exp, 0);
3688 tree arg2 = CALL_EXPR_ARG (exp, 1);
3689 tree len = CALL_EXPR_ARG (exp, 2);
3690
3691 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3692 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3693 enum machine_mode insn_mode;
3694
3695 if (HAVE_cmpmemsi)
3696 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3697 else
3698 return NULL_RTX;
3699
3700 /* If we don't have POINTER_TYPE, call the function. */
3701 if (arg1_align == 0 || arg2_align == 0)
3702 return NULL_RTX;
3703
3704 /* Make a place to write the result of the instruction. */
3705 result = target;
3706 if (! (result != 0
3707 && REG_P (result) && GET_MODE (result) == insn_mode
3708 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3709 result = gen_reg_rtx (insn_mode);
3710
3711 arg1_rtx = get_memory_rtx (arg1, len);
3712 arg2_rtx = get_memory_rtx (arg2, len);
3713 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3714
3715 /* Set MEM_SIZE as appropriate. */
3716 if (CONST_INT_P (arg3_rtx))
3717 {
3718 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3719 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3720 }
3721
3722 if (HAVE_cmpmemsi)
3723 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3724 GEN_INT (MIN (arg1_align, arg2_align)));
3725 else
3726 gcc_unreachable ();
3727
3728 if (insn)
3729 emit_insn (insn);
3730 else
3731 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3732 TYPE_MODE (integer_type_node), 3,
3733 XEXP (arg1_rtx, 0), Pmode,
3734 XEXP (arg2_rtx, 0), Pmode,
3735 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3736 TYPE_UNSIGNED (sizetype)),
3737 TYPE_MODE (sizetype));
3738
3739 /* Return the value in the proper mode for this function. */
3740 mode = TYPE_MODE (TREE_TYPE (exp));
3741 if (GET_MODE (result) == mode)
3742 return result;
3743 else if (target != 0)
3744 {
3745 convert_move (target, result, 0);
3746 return target;
3747 }
3748 else
3749 return convert_to_mode (mode, result, 0);
3750 }
3751 #endif /* HAVE_cmpmemsi. */
3752
3753 return NULL_RTX;
3754 }
3755
3756 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3757 if we failed the caller should emit a normal call, otherwise try to get
3758 the result in TARGET, if convenient. */
3759
3760 static rtx
3761 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3762 {
3763 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3764 return NULL_RTX;
3765
3766 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3767 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3768 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3769 {
3770 rtx arg1_rtx, arg2_rtx;
3771 rtx result, insn = NULL_RTX;
3772 tree fndecl, fn;
3773 tree arg1 = CALL_EXPR_ARG (exp, 0);
3774 tree arg2 = CALL_EXPR_ARG (exp, 1);
3775
3776 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3777 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3778
3779 /* If we don't have POINTER_TYPE, call the function. */
3780 if (arg1_align == 0 || arg2_align == 0)
3781 return NULL_RTX;
3782
3783 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3784 arg1 = builtin_save_expr (arg1);
3785 arg2 = builtin_save_expr (arg2);
3786
3787 arg1_rtx = get_memory_rtx (arg1, NULL);
3788 arg2_rtx = get_memory_rtx (arg2, NULL);
3789
3790 #ifdef HAVE_cmpstrsi
3791 /* Try to call cmpstrsi. */
3792 if (HAVE_cmpstrsi)
3793 {
3794 enum machine_mode insn_mode
3795 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3796
3797 /* Make a place to write the result of the instruction. */
3798 result = target;
3799 if (! (result != 0
3800 && REG_P (result) && GET_MODE (result) == insn_mode
3801 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3802 result = gen_reg_rtx (insn_mode);
3803
3804 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3805 GEN_INT (MIN (arg1_align, arg2_align)));
3806 }
3807 #endif
3808 #ifdef HAVE_cmpstrnsi
3809 /* Try to determine at least one length and call cmpstrnsi. */
3810 if (!insn && HAVE_cmpstrnsi)
3811 {
3812 tree len;
3813 rtx arg3_rtx;
3814
3815 enum machine_mode insn_mode
3816 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3817 tree len1 = c_strlen (arg1, 1);
3818 tree len2 = c_strlen (arg2, 1);
3819
3820 if (len1)
3821 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3822 if (len2)
3823 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3824
3825 /* If we don't have a constant length for the first, use the length
3826 of the second, if we know it. We don't require a constant for
3827 this case; some cost analysis could be done if both are available
3828 but neither is constant. For now, assume they're equally cheap,
3829 unless one has side effects. If both strings have constant lengths,
3830 use the smaller. */
3831
3832 if (!len1)
3833 len = len2;
3834 else if (!len2)
3835 len = len1;
3836 else if (TREE_SIDE_EFFECTS (len1))
3837 len = len2;
3838 else if (TREE_SIDE_EFFECTS (len2))
3839 len = len1;
3840 else if (TREE_CODE (len1) != INTEGER_CST)
3841 len = len2;
3842 else if (TREE_CODE (len2) != INTEGER_CST)
3843 len = len1;
3844 else if (tree_int_cst_lt (len1, len2))
3845 len = len1;
3846 else
3847 len = len2;
3848
3849 /* If both arguments have side effects, we cannot optimize. */
3850 if (!len || TREE_SIDE_EFFECTS (len))
3851 goto do_libcall;
3852
3853 arg3_rtx = expand_normal (len);
3854
3855 /* Make a place to write the result of the instruction. */
3856 result = target;
3857 if (! (result != 0
3858 && REG_P (result) && GET_MODE (result) == insn_mode
3859 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3860 result = gen_reg_rtx (insn_mode);
3861
3862 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3863 GEN_INT (MIN (arg1_align, arg2_align)));
3864 }
3865 #endif
3866
3867 if (insn)
3868 {
3869 enum machine_mode mode;
3870 emit_insn (insn);
3871
3872 /* Return the value in the proper mode for this function. */
3873 mode = TYPE_MODE (TREE_TYPE (exp));
3874 if (GET_MODE (result) == mode)
3875 return result;
3876 if (target == 0)
3877 return convert_to_mode (mode, result, 0);
3878 convert_move (target, result, 0);
3879 return target;
3880 }
3881
3882 /* Expand the library call ourselves using a stabilized argument
3883 list to avoid re-evaluating the function's arguments twice. */
3884 #ifdef HAVE_cmpstrnsi
3885 do_libcall:
3886 #endif
3887 fndecl = get_callee_fndecl (exp);
3888 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3889 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3890 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3891 return expand_call (fn, target, target == const0_rtx);
3892 }
3893 #endif
3894 return NULL_RTX;
3895 }
3896
3897 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3898 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3899 the result in TARGET, if convenient. */
3900
3901 static rtx
3902 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3903 ATTRIBUTE_UNUSED enum machine_mode mode)
3904 {
3905 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3906
3907 if (!validate_arglist (exp,
3908 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3909 return NULL_RTX;
3910
3911 /* If c_strlen can determine an expression for one of the string
3912 lengths, and it doesn't have side effects, then emit cmpstrnsi
3913 using length MIN(strlen(string)+1, arg3). */
3914 #ifdef HAVE_cmpstrnsi
3915 if (HAVE_cmpstrnsi)
3916 {
3917 tree len, len1, len2;
3918 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3919 rtx result, insn;
3920 tree fndecl, fn;
3921 tree arg1 = CALL_EXPR_ARG (exp, 0);
3922 tree arg2 = CALL_EXPR_ARG (exp, 1);
3923 tree arg3 = CALL_EXPR_ARG (exp, 2);
3924
3925 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3926 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3927 enum machine_mode insn_mode
3928 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3929
3930 len1 = c_strlen (arg1, 1);
3931 len2 = c_strlen (arg2, 1);
3932
3933 if (len1)
3934 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3935 if (len2)
3936 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3937
3938 /* If we don't have a constant length for the first, use the length
3939 of the second, if we know it. We don't require a constant for
3940 this case; some cost analysis could be done if both are available
3941 but neither is constant. For now, assume they're equally cheap,
3942 unless one has side effects. If both strings have constant lengths,
3943 use the smaller. */
3944
3945 if (!len1)
3946 len = len2;
3947 else if (!len2)
3948 len = len1;
3949 else if (TREE_SIDE_EFFECTS (len1))
3950 len = len2;
3951 else if (TREE_SIDE_EFFECTS (len2))
3952 len = len1;
3953 else if (TREE_CODE (len1) != INTEGER_CST)
3954 len = len2;
3955 else if (TREE_CODE (len2) != INTEGER_CST)
3956 len = len1;
3957 else if (tree_int_cst_lt (len1, len2))
3958 len = len1;
3959 else
3960 len = len2;
3961
3962 /* If both arguments have side effects, we cannot optimize. */
3963 if (!len || TREE_SIDE_EFFECTS (len))
3964 return NULL_RTX;
3965
3966 /* The actual new length parameter is MIN(len,arg3). */
3967 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
3968 fold_convert_loc (loc, TREE_TYPE (len), arg3));
3969
3970 /* If we don't have POINTER_TYPE, call the function. */
3971 if (arg1_align == 0 || arg2_align == 0)
3972 return NULL_RTX;
3973
3974 /* Make a place to write the result of the instruction. */
3975 result = target;
3976 if (! (result != 0
3977 && REG_P (result) && GET_MODE (result) == insn_mode
3978 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3979 result = gen_reg_rtx (insn_mode);
3980
3981 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
3982 arg1 = builtin_save_expr (arg1);
3983 arg2 = builtin_save_expr (arg2);
3984 len = builtin_save_expr (len);
3985
3986 arg1_rtx = get_memory_rtx (arg1, len);
3987 arg2_rtx = get_memory_rtx (arg2, len);
3988 arg3_rtx = expand_normal (len);
3989 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3990 GEN_INT (MIN (arg1_align, arg2_align)));
3991 if (insn)
3992 {
3993 emit_insn (insn);
3994
3995 /* Return the value in the proper mode for this function. */
3996 mode = TYPE_MODE (TREE_TYPE (exp));
3997 if (GET_MODE (result) == mode)
3998 return result;
3999 if (target == 0)
4000 return convert_to_mode (mode, result, 0);
4001 convert_move (target, result, 0);
4002 return target;
4003 }
4004
4005 /* Expand the library call ourselves using a stabilized argument
4006 list to avoid re-evaluating the function's arguments twice. */
4007 fndecl = get_callee_fndecl (exp);
4008 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4009 arg1, arg2, len);
4010 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4011 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4012 return expand_call (fn, target, target == const0_rtx);
4013 }
4014 #endif
4015 return NULL_RTX;
4016 }
4017
4018 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4019 if that's convenient. */
4020
4021 rtx
4022 expand_builtin_saveregs (void)
4023 {
4024 rtx val, seq;
4025
4026 /* Don't do __builtin_saveregs more than once in a function.
4027 Save the result of the first call and reuse it. */
4028 if (saveregs_value != 0)
4029 return saveregs_value;
4030
4031 /* When this function is called, it means that registers must be
4032 saved on entry to this function. So we migrate the call to the
4033 first insn of this function. */
4034
4035 start_sequence ();
4036
4037 /* Do whatever the machine needs done in this case. */
4038 val = targetm.calls.expand_builtin_saveregs ();
4039
4040 seq = get_insns ();
4041 end_sequence ();
4042
4043 saveregs_value = val;
4044
4045 /* Put the insns after the NOTE that starts the function. If this
4046 is inside a start_sequence, make the outer-level insn chain current, so
4047 the code is placed at the start of the function. */
4048 push_topmost_sequence ();
4049 emit_insn_after (seq, entry_of_function ());
4050 pop_topmost_sequence ();
4051
4052 return val;
4053 }
4054
4055 /* Expand a call to __builtin_next_arg. */
4056
4057 static rtx
4058 expand_builtin_next_arg (void)
4059 {
4060 /* Checking arguments is already done in fold_builtin_next_arg
4061 that must be called before this function. */
4062 return expand_binop (ptr_mode, add_optab,
4063 crtl->args.internal_arg_pointer,
4064 crtl->args.arg_offset_rtx,
4065 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4066 }
4067
4068 /* Make it easier for the backends by protecting the valist argument
4069 from multiple evaluations. */
4070
4071 static tree
4072 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4073 {
4074 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4075
4076 /* The current way of determining the type of valist is completely
4077 bogus. We should have the information on the va builtin instead. */
4078 if (!vatype)
4079 vatype = targetm.fn_abi_va_list (cfun->decl);
4080
4081 if (TREE_CODE (vatype) == ARRAY_TYPE)
4082 {
4083 if (TREE_SIDE_EFFECTS (valist))
4084 valist = save_expr (valist);
4085
4086 /* For this case, the backends will be expecting a pointer to
4087 vatype, but it's possible we've actually been given an array
4088 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4089 So fix it. */
4090 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4091 {
4092 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4093 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4094 }
4095 }
4096 else
4097 {
4098 tree pt = build_pointer_type (vatype);
4099
4100 if (! needs_lvalue)
4101 {
4102 if (! TREE_SIDE_EFFECTS (valist))
4103 return valist;
4104
4105 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4106 TREE_SIDE_EFFECTS (valist) = 1;
4107 }
4108
4109 if (TREE_SIDE_EFFECTS (valist))
4110 valist = save_expr (valist);
4111 valist = fold_build2_loc (loc, MEM_REF,
4112 vatype, valist, build_int_cst (pt, 0));
4113 }
4114
4115 return valist;
4116 }
4117
4118 /* The "standard" definition of va_list is void*. */
4119
4120 tree
4121 std_build_builtin_va_list (void)
4122 {
4123 return ptr_type_node;
4124 }
4125
4126 /* The "standard" abi va_list is va_list_type_node. */
4127
4128 tree
4129 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4130 {
4131 return va_list_type_node;
4132 }
4133
4134 /* The "standard" type of va_list is va_list_type_node. */
4135
4136 tree
4137 std_canonical_va_list_type (tree type)
4138 {
4139 tree wtype, htype;
4140
4141 if (INDIRECT_REF_P (type))
4142 type = TREE_TYPE (type);
4143 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4144 type = TREE_TYPE (type);
4145 wtype = va_list_type_node;
4146 htype = type;
4147 /* Treat structure va_list types. */
4148 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4149 htype = TREE_TYPE (htype);
4150 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4151 {
4152 /* If va_list is an array type, the argument may have decayed
4153 to a pointer type, e.g. by being passed to another function.
4154 In that case, unwrap both types so that we can compare the
4155 underlying records. */
4156 if (TREE_CODE (htype) == ARRAY_TYPE
4157 || POINTER_TYPE_P (htype))
4158 {
4159 wtype = TREE_TYPE (wtype);
4160 htype = TREE_TYPE (htype);
4161 }
4162 }
4163 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4164 return va_list_type_node;
4165
4166 return NULL_TREE;
4167 }
4168
4169 /* The "standard" implementation of va_start: just assign `nextarg' to
4170 the variable. */
4171
4172 void
4173 std_expand_builtin_va_start (tree valist, rtx nextarg)
4174 {
4175 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4176 convert_move (va_r, nextarg, 0);
4177 }
4178
4179 /* Expand EXP, a call to __builtin_va_start. */
4180
4181 static rtx
4182 expand_builtin_va_start (tree exp)
4183 {
4184 rtx nextarg;
4185 tree valist;
4186 location_t loc = EXPR_LOCATION (exp);
4187
4188 if (call_expr_nargs (exp) < 2)
4189 {
4190 error_at (loc, "too few arguments to function %<va_start%>");
4191 return const0_rtx;
4192 }
4193
4194 if (fold_builtin_next_arg (exp, true))
4195 return const0_rtx;
4196
4197 nextarg = expand_builtin_next_arg ();
4198 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4199
4200 if (targetm.expand_builtin_va_start)
4201 targetm.expand_builtin_va_start (valist, nextarg);
4202 else
4203 std_expand_builtin_va_start (valist, nextarg);
4204
4205 return const0_rtx;
4206 }
4207
4208 /* Expand EXP, a call to __builtin_va_end. */
4209
4210 static rtx
4211 expand_builtin_va_end (tree exp)
4212 {
4213 tree valist = CALL_EXPR_ARG (exp, 0);
4214
4215 /* Evaluate for side effects, if needed. I hate macros that don't
4216 do that. */
4217 if (TREE_SIDE_EFFECTS (valist))
4218 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4219
4220 return const0_rtx;
4221 }
4222
4223 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4224 builtin rather than just as an assignment in stdarg.h because of the
4225 nastiness of array-type va_list types. */
4226
4227 static rtx
4228 expand_builtin_va_copy (tree exp)
4229 {
4230 tree dst, src, t;
4231 location_t loc = EXPR_LOCATION (exp);
4232
4233 dst = CALL_EXPR_ARG (exp, 0);
4234 src = CALL_EXPR_ARG (exp, 1);
4235
4236 dst = stabilize_va_list_loc (loc, dst, 1);
4237 src = stabilize_va_list_loc (loc, src, 0);
4238
4239 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4240
4241 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4242 {
4243 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4244 TREE_SIDE_EFFECTS (t) = 1;
4245 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4246 }
4247 else
4248 {
4249 rtx dstb, srcb, size;
4250
4251 /* Evaluate to pointers. */
4252 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4253 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4254 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4255 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4256
4257 dstb = convert_memory_address (Pmode, dstb);
4258 srcb = convert_memory_address (Pmode, srcb);
4259
4260 /* "Dereference" to BLKmode memories. */
4261 dstb = gen_rtx_MEM (BLKmode, dstb);
4262 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4263 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4264 srcb = gen_rtx_MEM (BLKmode, srcb);
4265 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4266 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4267
4268 /* Copy. */
4269 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4270 }
4271
4272 return const0_rtx;
4273 }
4274
4275 /* Expand a call to one of the builtin functions __builtin_frame_address or
4276 __builtin_return_address. */
4277
4278 static rtx
4279 expand_builtin_frame_address (tree fndecl, tree exp)
4280 {
4281 /* The argument must be a nonnegative integer constant.
4282 It counts the number of frames to scan up the stack.
4283 The value is the return address saved in that frame. */
4284 if (call_expr_nargs (exp) == 0)
4285 /* Warning about missing arg was already issued. */
4286 return const0_rtx;
4287 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4288 {
4289 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4290 error ("invalid argument to %<__builtin_frame_address%>");
4291 else
4292 error ("invalid argument to %<__builtin_return_address%>");
4293 return const0_rtx;
4294 }
4295 else
4296 {
4297 rtx tem
4298 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4299 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4300
4301 /* Some ports cannot access arbitrary stack frames. */
4302 if (tem == NULL)
4303 {
4304 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4305 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4306 else
4307 warning (0, "unsupported argument to %<__builtin_return_address%>");
4308 return const0_rtx;
4309 }
4310
4311 /* For __builtin_frame_address, return what we've got. */
4312 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4313 return tem;
4314
4315 if (!REG_P (tem)
4316 && ! CONSTANT_P (tem))
4317 tem = copy_addr_to_reg (tem);
4318 return tem;
4319 }
4320 }
4321
4322 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4323 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4324 is the same as for allocate_dynamic_stack_space. */
4325
4326 static rtx
4327 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4328 {
4329 rtx op0;
4330 rtx result;
4331 bool valid_arglist;
4332 unsigned int align;
4333 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4334 == BUILT_IN_ALLOCA_WITH_ALIGN);
4335
4336 valid_arglist
4337 = (alloca_with_align
4338 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4339 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4340
4341 if (!valid_arglist)
4342 return NULL_RTX;
4343
4344 /* Compute the argument. */
4345 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4346
4347 /* Compute the alignment. */
4348 align = (alloca_with_align
4349 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4350 : BIGGEST_ALIGNMENT);
4351
4352 /* Allocate the desired space. */
4353 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4354 result = convert_memory_address (ptr_mode, result);
4355
4356 return result;
4357 }
4358
4359 /* Expand a call to bswap builtin in EXP.
4360 Return NULL_RTX if a normal call should be emitted rather than expanding the
4361 function in-line. If convenient, the result should be placed in TARGET.
4362 SUBTARGET may be used as the target for computing one of EXP's operands. */
4363
4364 static rtx
4365 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4366 rtx subtarget)
4367 {
4368 tree arg;
4369 rtx op0;
4370
4371 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4372 return NULL_RTX;
4373
4374 arg = CALL_EXPR_ARG (exp, 0);
4375 op0 = expand_expr (arg,
4376 subtarget && GET_MODE (subtarget) == target_mode
4377 ? subtarget : NULL_RTX,
4378 target_mode, EXPAND_NORMAL);
4379 if (GET_MODE (op0) != target_mode)
4380 op0 = convert_to_mode (target_mode, op0, 1);
4381
4382 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4383
4384 gcc_assert (target);
4385
4386 return convert_to_mode (target_mode, target, 1);
4387 }
4388
4389 /* Expand a call to a unary builtin in EXP.
4390 Return NULL_RTX if a normal call should be emitted rather than expanding the
4391 function in-line. If convenient, the result should be placed in TARGET.
4392 SUBTARGET may be used as the target for computing one of EXP's operands. */
4393
4394 static rtx
4395 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4396 rtx subtarget, optab op_optab)
4397 {
4398 rtx op0;
4399
4400 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4401 return NULL_RTX;
4402
4403 /* Compute the argument. */
4404 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4405 (subtarget
4406 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4407 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4408 VOIDmode, EXPAND_NORMAL);
4409 /* Compute op, into TARGET if possible.
4410 Set TARGET to wherever the result comes back. */
4411 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4412 op_optab, op0, target, op_optab != clrsb_optab);
4413 gcc_assert (target);
4414
4415 return convert_to_mode (target_mode, target, 0);
4416 }
4417
4418 /* Expand a call to __builtin_expect. We just return our argument
4419 as the builtin_expect semantic should've been already executed by
4420 tree branch prediction pass. */
4421
4422 static rtx
4423 expand_builtin_expect (tree exp, rtx target)
4424 {
4425 tree arg;
4426
4427 if (call_expr_nargs (exp) < 2)
4428 return const0_rtx;
4429 arg = CALL_EXPR_ARG (exp, 0);
4430
4431 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4432 /* When guessing was done, the hints should be already stripped away. */
4433 gcc_assert (!flag_guess_branch_prob
4434 || optimize == 0 || seen_error ());
4435 return target;
4436 }
4437
4438 /* Expand a call to __builtin_assume_aligned. We just return our first
4439 argument as the builtin_assume_aligned semantic should've been already
4440 executed by CCP. */
4441
4442 static rtx
4443 expand_builtin_assume_aligned (tree exp, rtx target)
4444 {
4445 if (call_expr_nargs (exp) < 2)
4446 return const0_rtx;
4447 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4448 EXPAND_NORMAL);
4449 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4450 && (call_expr_nargs (exp) < 3
4451 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4452 return target;
4453 }
4454
4455 void
4456 expand_builtin_trap (void)
4457 {
4458 #ifdef HAVE_trap
4459 if (HAVE_trap)
4460 {
4461 rtx insn = emit_insn (gen_trap ());
4462 /* For trap insns when not accumulating outgoing args force
4463 REG_ARGS_SIZE note to prevent crossjumping of calls with
4464 different args sizes. */
4465 if (!ACCUMULATE_OUTGOING_ARGS)
4466 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4467 }
4468 else
4469 #endif
4470 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4471 emit_barrier ();
4472 }
4473
4474 /* Expand a call to __builtin_unreachable. We do nothing except emit
4475 a barrier saying that control flow will not pass here.
4476
4477 It is the responsibility of the program being compiled to ensure
4478 that control flow does never reach __builtin_unreachable. */
4479 static void
4480 expand_builtin_unreachable (void)
4481 {
4482 emit_barrier ();
4483 }
4484
4485 /* Expand EXP, a call to fabs, fabsf or fabsl.
4486 Return NULL_RTX if a normal call should be emitted rather than expanding
4487 the function inline. If convenient, the result should be placed
4488 in TARGET. SUBTARGET may be used as the target for computing
4489 the operand. */
4490
4491 static rtx
4492 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4493 {
4494 enum machine_mode mode;
4495 tree arg;
4496 rtx op0;
4497
4498 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4499 return NULL_RTX;
4500
4501 arg = CALL_EXPR_ARG (exp, 0);
4502 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4503 mode = TYPE_MODE (TREE_TYPE (arg));
4504 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4505 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4506 }
4507
4508 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4509 Return NULL is a normal call should be emitted rather than expanding the
4510 function inline. If convenient, the result should be placed in TARGET.
4511 SUBTARGET may be used as the target for computing the operand. */
4512
4513 static rtx
4514 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4515 {
4516 rtx op0, op1;
4517 tree arg;
4518
4519 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4520 return NULL_RTX;
4521
4522 arg = CALL_EXPR_ARG (exp, 0);
4523 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4524
4525 arg = CALL_EXPR_ARG (exp, 1);
4526 op1 = expand_normal (arg);
4527
4528 return expand_copysign (op0, op1, target);
4529 }
4530
4531 /* Create a new constant string literal and return a char* pointer to it.
4532 The STRING_CST value is the LEN characters at STR. */
4533 tree
4534 build_string_literal (int len, const char *str)
4535 {
4536 tree t, elem, index, type;
4537
4538 t = build_string (len, str);
4539 elem = build_type_variant (char_type_node, 1, 0);
4540 index = build_index_type (size_int (len - 1));
4541 type = build_array_type (elem, index);
4542 TREE_TYPE (t) = type;
4543 TREE_CONSTANT (t) = 1;
4544 TREE_READONLY (t) = 1;
4545 TREE_STATIC (t) = 1;
4546
4547 type = build_pointer_type (elem);
4548 t = build1 (ADDR_EXPR, type,
4549 build4 (ARRAY_REF, elem,
4550 t, integer_zero_node, NULL_TREE, NULL_TREE));
4551 return t;
4552 }
4553
4554 /* Expand a call to __builtin___clear_cache. */
4555
4556 static rtx
4557 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4558 {
4559 #ifndef HAVE_clear_cache
4560 #ifdef CLEAR_INSN_CACHE
4561 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4562 does something. Just do the default expansion to a call to
4563 __clear_cache(). */
4564 return NULL_RTX;
4565 #else
4566 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4567 does nothing. There is no need to call it. Do nothing. */
4568 return const0_rtx;
4569 #endif /* CLEAR_INSN_CACHE */
4570 #else
4571 /* We have a "clear_cache" insn, and it will handle everything. */
4572 tree begin, end;
4573 rtx begin_rtx, end_rtx;
4574
4575 /* We must not expand to a library call. If we did, any
4576 fallback library function in libgcc that might contain a call to
4577 __builtin___clear_cache() would recurse infinitely. */
4578 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4579 {
4580 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4581 return const0_rtx;
4582 }
4583
4584 if (HAVE_clear_cache)
4585 {
4586 struct expand_operand ops[2];
4587
4588 begin = CALL_EXPR_ARG (exp, 0);
4589 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4590
4591 end = CALL_EXPR_ARG (exp, 1);
4592 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4593
4594 create_address_operand (&ops[0], begin_rtx);
4595 create_address_operand (&ops[1], end_rtx);
4596 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4597 return const0_rtx;
4598 }
4599 return const0_rtx;
4600 #endif /* HAVE_clear_cache */
4601 }
4602
4603 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4604
4605 static rtx
4606 round_trampoline_addr (rtx tramp)
4607 {
4608 rtx temp, addend, mask;
4609
4610 /* If we don't need too much alignment, we'll have been guaranteed
4611 proper alignment by get_trampoline_type. */
4612 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4613 return tramp;
4614
4615 /* Round address up to desired boundary. */
4616 temp = gen_reg_rtx (Pmode);
4617 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4618 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4619
4620 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4621 temp, 0, OPTAB_LIB_WIDEN);
4622 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4623 temp, 0, OPTAB_LIB_WIDEN);
4624
4625 return tramp;
4626 }
4627
4628 static rtx
4629 expand_builtin_init_trampoline (tree exp, bool onstack)
4630 {
4631 tree t_tramp, t_func, t_chain;
4632 rtx m_tramp, r_tramp, r_chain, tmp;
4633
4634 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4635 POINTER_TYPE, VOID_TYPE))
4636 return NULL_RTX;
4637
4638 t_tramp = CALL_EXPR_ARG (exp, 0);
4639 t_func = CALL_EXPR_ARG (exp, 1);
4640 t_chain = CALL_EXPR_ARG (exp, 2);
4641
4642 r_tramp = expand_normal (t_tramp);
4643 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4644 MEM_NOTRAP_P (m_tramp) = 1;
4645
4646 /* If ONSTACK, the TRAMP argument should be the address of a field
4647 within the local function's FRAME decl. Either way, let's see if
4648 we can fill in the MEM_ATTRs for this memory. */
4649 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4650 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4651
4652 /* Creator of a heap trampoline is responsible for making sure the
4653 address is aligned to at least STACK_BOUNDARY. Normally malloc
4654 will ensure this anyhow. */
4655 tmp = round_trampoline_addr (r_tramp);
4656 if (tmp != r_tramp)
4657 {
4658 m_tramp = change_address (m_tramp, BLKmode, tmp);
4659 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4660 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4661 }
4662
4663 /* The FUNC argument should be the address of the nested function.
4664 Extract the actual function decl to pass to the hook. */
4665 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4666 t_func = TREE_OPERAND (t_func, 0);
4667 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4668
4669 r_chain = expand_normal (t_chain);
4670
4671 /* Generate insns to initialize the trampoline. */
4672 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4673
4674 if (onstack)
4675 {
4676 trampolines_created = 1;
4677
4678 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4679 "trampoline generated for nested function %qD", t_func);
4680 }
4681
4682 return const0_rtx;
4683 }
4684
4685 static rtx
4686 expand_builtin_adjust_trampoline (tree exp)
4687 {
4688 rtx tramp;
4689
4690 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4691 return NULL_RTX;
4692
4693 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4694 tramp = round_trampoline_addr (tramp);
4695 if (targetm.calls.trampoline_adjust_address)
4696 tramp = targetm.calls.trampoline_adjust_address (tramp);
4697
4698 return tramp;
4699 }
4700
4701 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4702 function. The function first checks whether the back end provides
4703 an insn to implement signbit for the respective mode. If not, it
4704 checks whether the floating point format of the value is such that
4705 the sign bit can be extracted. If that is not the case, the
4706 function returns NULL_RTX to indicate that a normal call should be
4707 emitted rather than expanding the function in-line. EXP is the
4708 expression that is a call to the builtin function; if convenient,
4709 the result should be placed in TARGET. */
4710 static rtx
4711 expand_builtin_signbit (tree exp, rtx target)
4712 {
4713 const struct real_format *fmt;
4714 enum machine_mode fmode, imode, rmode;
4715 tree arg;
4716 int word, bitpos;
4717 enum insn_code icode;
4718 rtx temp;
4719 location_t loc = EXPR_LOCATION (exp);
4720
4721 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4722 return NULL_RTX;
4723
4724 arg = CALL_EXPR_ARG (exp, 0);
4725 fmode = TYPE_MODE (TREE_TYPE (arg));
4726 rmode = TYPE_MODE (TREE_TYPE (exp));
4727 fmt = REAL_MODE_FORMAT (fmode);
4728
4729 arg = builtin_save_expr (arg);
4730
4731 /* Expand the argument yielding a RTX expression. */
4732 temp = expand_normal (arg);
4733
4734 /* Check if the back end provides an insn that handles signbit for the
4735 argument's mode. */
4736 icode = optab_handler (signbit_optab, fmode);
4737 if (icode != CODE_FOR_nothing)
4738 {
4739 rtx last = get_last_insn ();
4740 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4741 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4742 return target;
4743 delete_insns_since (last);
4744 }
4745
4746 /* For floating point formats without a sign bit, implement signbit
4747 as "ARG < 0.0". */
4748 bitpos = fmt->signbit_ro;
4749 if (bitpos < 0)
4750 {
4751 /* But we can't do this if the format supports signed zero. */
4752 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4753 return NULL_RTX;
4754
4755 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4756 build_real (TREE_TYPE (arg), dconst0));
4757 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4758 }
4759
4760 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4761 {
4762 imode = int_mode_for_mode (fmode);
4763 if (imode == BLKmode)
4764 return NULL_RTX;
4765 temp = gen_lowpart (imode, temp);
4766 }
4767 else
4768 {
4769 imode = word_mode;
4770 /* Handle targets with different FP word orders. */
4771 if (FLOAT_WORDS_BIG_ENDIAN)
4772 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4773 else
4774 word = bitpos / BITS_PER_WORD;
4775 temp = operand_subword_force (temp, word, fmode);
4776 bitpos = bitpos % BITS_PER_WORD;
4777 }
4778
4779 /* Force the intermediate word_mode (or narrower) result into a
4780 register. This avoids attempting to create paradoxical SUBREGs
4781 of floating point modes below. */
4782 temp = force_reg (imode, temp);
4783
4784 /* If the bitpos is within the "result mode" lowpart, the operation
4785 can be implement with a single bitwise AND. Otherwise, we need
4786 a right shift and an AND. */
4787
4788 if (bitpos < GET_MODE_BITSIZE (rmode))
4789 {
4790 double_int mask = double_int_zero.set_bit (bitpos);
4791
4792 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4793 temp = gen_lowpart (rmode, temp);
4794 temp = expand_binop (rmode, and_optab, temp,
4795 immed_double_int_const (mask, rmode),
4796 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4797 }
4798 else
4799 {
4800 /* Perform a logical right shift to place the signbit in the least
4801 significant bit, then truncate the result to the desired mode
4802 and mask just this bit. */
4803 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4804 temp = gen_lowpart (rmode, temp);
4805 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4806 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4807 }
4808
4809 return temp;
4810 }
4811
4812 /* Expand fork or exec calls. TARGET is the desired target of the
4813 call. EXP is the call. FN is the
4814 identificator of the actual function. IGNORE is nonzero if the
4815 value is to be ignored. */
4816
4817 static rtx
4818 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4819 {
4820 tree id, decl;
4821 tree call;
4822
4823 /* If we are not profiling, just call the function. */
4824 if (!profile_arc_flag)
4825 return NULL_RTX;
4826
4827 /* Otherwise call the wrapper. This should be equivalent for the rest of
4828 compiler, so the code does not diverge, and the wrapper may run the
4829 code necessary for keeping the profiling sane. */
4830
4831 switch (DECL_FUNCTION_CODE (fn))
4832 {
4833 case BUILT_IN_FORK:
4834 id = get_identifier ("__gcov_fork");
4835 break;
4836
4837 case BUILT_IN_EXECL:
4838 id = get_identifier ("__gcov_execl");
4839 break;
4840
4841 case BUILT_IN_EXECV:
4842 id = get_identifier ("__gcov_execv");
4843 break;
4844
4845 case BUILT_IN_EXECLP:
4846 id = get_identifier ("__gcov_execlp");
4847 break;
4848
4849 case BUILT_IN_EXECLE:
4850 id = get_identifier ("__gcov_execle");
4851 break;
4852
4853 case BUILT_IN_EXECVP:
4854 id = get_identifier ("__gcov_execvp");
4855 break;
4856
4857 case BUILT_IN_EXECVE:
4858 id = get_identifier ("__gcov_execve");
4859 break;
4860
4861 default:
4862 gcc_unreachable ();
4863 }
4864
4865 decl = build_decl (DECL_SOURCE_LOCATION (fn),
4866 FUNCTION_DECL, id, TREE_TYPE (fn));
4867 DECL_EXTERNAL (decl) = 1;
4868 TREE_PUBLIC (decl) = 1;
4869 DECL_ARTIFICIAL (decl) = 1;
4870 TREE_NOTHROW (decl) = 1;
4871 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
4872 DECL_VISIBILITY_SPECIFIED (decl) = 1;
4873 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
4874 return expand_call (call, target, ignore);
4875 }
4876
4877
4878 \f
4879 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
4880 the pointer in these functions is void*, the tree optimizers may remove
4881 casts. The mode computed in expand_builtin isn't reliable either, due
4882 to __sync_bool_compare_and_swap.
4883
4884 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
4885 group of builtins. This gives us log2 of the mode size. */
4886
4887 static inline enum machine_mode
4888 get_builtin_sync_mode (int fcode_diff)
4889 {
4890 /* The size is not negotiable, so ask not to get BLKmode in return
4891 if the target indicates that a smaller size would be better. */
4892 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
4893 }
4894
4895 /* Expand the memory expression LOC and return the appropriate memory operand
4896 for the builtin_sync operations. */
4897
4898 static rtx
4899 get_builtin_sync_mem (tree loc, enum machine_mode mode)
4900 {
4901 rtx addr, mem;
4902
4903 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
4904 addr = convert_memory_address (Pmode, addr);
4905
4906 /* Note that we explicitly do not want any alias information for this
4907 memory, so that we kill all other live memories. Otherwise we don't
4908 satisfy the full barrier semantics of the intrinsic. */
4909 mem = validize_mem (gen_rtx_MEM (mode, addr));
4910
4911 /* The alignment needs to be at least according to that of the mode. */
4912 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
4913 get_pointer_alignment (loc)));
4914 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
4915 MEM_VOLATILE_P (mem) = 1;
4916
4917 return mem;
4918 }
4919
4920 /* Make sure an argument is in the right mode.
4921 EXP is the tree argument.
4922 MODE is the mode it should be in. */
4923
4924 static rtx
4925 expand_expr_force_mode (tree exp, enum machine_mode mode)
4926 {
4927 rtx val;
4928 enum machine_mode old_mode;
4929
4930 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
4931 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
4932 of CONST_INTs, where we know the old_mode only from the call argument. */
4933
4934 old_mode = GET_MODE (val);
4935 if (old_mode == VOIDmode)
4936 old_mode = TYPE_MODE (TREE_TYPE (exp));
4937 val = convert_modes (mode, old_mode, val, 1);
4938 return val;
4939 }
4940
4941
4942 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
4943 EXP is the CALL_EXPR. CODE is the rtx code
4944 that corresponds to the arithmetic or logical operation from the name;
4945 an exception here is that NOT actually means NAND. TARGET is an optional
4946 place for us to store the results; AFTER is true if this is the
4947 fetch_and_xxx form. */
4948
4949 static rtx
4950 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
4951 enum rtx_code code, bool after,
4952 rtx target)
4953 {
4954 rtx val, mem;
4955 location_t loc = EXPR_LOCATION (exp);
4956
4957 if (code == NOT && warn_sync_nand)
4958 {
4959 tree fndecl = get_callee_fndecl (exp);
4960 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4961
4962 static bool warned_f_a_n, warned_n_a_f;
4963
4964 switch (fcode)
4965 {
4966 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
4967 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
4968 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
4969 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
4970 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
4971 if (warned_f_a_n)
4972 break;
4973
4974 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
4975 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
4976 warned_f_a_n = true;
4977 break;
4978
4979 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
4980 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
4981 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
4982 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
4983 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
4984 if (warned_n_a_f)
4985 break;
4986
4987 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
4988 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
4989 warned_n_a_f = true;
4990 break;
4991
4992 default:
4993 gcc_unreachable ();
4994 }
4995 }
4996
4997 /* Expand the operands. */
4998 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4999 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5000
5001 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5002 after);
5003 }
5004
5005 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5006 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5007 true if this is the boolean form. TARGET is a place for us to store the
5008 results; this is NOT optional if IS_BOOL is true. */
5009
5010 static rtx
5011 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5012 bool is_bool, rtx target)
5013 {
5014 rtx old_val, new_val, mem;
5015 rtx *pbool, *poval;
5016
5017 /* Expand the operands. */
5018 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5019 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5020 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5021
5022 pbool = poval = NULL;
5023 if (target != const0_rtx)
5024 {
5025 if (is_bool)
5026 pbool = &target;
5027 else
5028 poval = &target;
5029 }
5030 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5031 false, MEMMODEL_SEQ_CST,
5032 MEMMODEL_SEQ_CST))
5033 return NULL_RTX;
5034
5035 return target;
5036 }
5037
5038 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5039 general form is actually an atomic exchange, and some targets only
5040 support a reduced form with the second argument being a constant 1.
5041 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5042 the results. */
5043
5044 static rtx
5045 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5046 rtx target)
5047 {
5048 rtx val, mem;
5049
5050 /* Expand the operands. */
5051 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5052 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5053
5054 return expand_sync_lock_test_and_set (target, mem, val);
5055 }
5056
5057 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5058
5059 static void
5060 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5061 {
5062 rtx mem;
5063
5064 /* Expand the operands. */
5065 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5066
5067 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5068 }
5069
5070 /* Given an integer representing an ``enum memmodel'', verify its
5071 correctness and return the memory model enum. */
5072
5073 static enum memmodel
5074 get_memmodel (tree exp)
5075 {
5076 rtx op;
5077 unsigned HOST_WIDE_INT val;
5078
5079 /* If the parameter is not a constant, it's a run time value so we'll just
5080 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5081 if (TREE_CODE (exp) != INTEGER_CST)
5082 return MEMMODEL_SEQ_CST;
5083
5084 op = expand_normal (exp);
5085
5086 val = INTVAL (op);
5087 if (targetm.memmodel_check)
5088 val = targetm.memmodel_check (val);
5089 else if (val & ~MEMMODEL_MASK)
5090 {
5091 warning (OPT_Winvalid_memory_model,
5092 "Unknown architecture specifier in memory model to builtin.");
5093 return MEMMODEL_SEQ_CST;
5094 }
5095
5096 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5097 {
5098 warning (OPT_Winvalid_memory_model,
5099 "invalid memory model argument to builtin");
5100 return MEMMODEL_SEQ_CST;
5101 }
5102
5103 return (enum memmodel) val;
5104 }
5105
5106 /* Expand the __atomic_exchange intrinsic:
5107 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5108 EXP is the CALL_EXPR.
5109 TARGET is an optional place for us to store the results. */
5110
5111 static rtx
5112 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5113 {
5114 rtx val, mem;
5115 enum memmodel model;
5116
5117 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5118 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5119 {
5120 error ("invalid memory model for %<__atomic_exchange%>");
5121 return NULL_RTX;
5122 }
5123
5124 if (!flag_inline_atomics)
5125 return NULL_RTX;
5126
5127 /* Expand the operands. */
5128 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5129 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5130
5131 return expand_atomic_exchange (target, mem, val, model);
5132 }
5133
5134 /* Expand the __atomic_compare_exchange intrinsic:
5135 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5136 TYPE desired, BOOL weak,
5137 enum memmodel success,
5138 enum memmodel failure)
5139 EXP is the CALL_EXPR.
5140 TARGET is an optional place for us to store the results. */
5141
5142 static rtx
5143 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5144 rtx target)
5145 {
5146 rtx expect, desired, mem, oldval;
5147 enum memmodel success, failure;
5148 tree weak;
5149 bool is_weak;
5150
5151 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5152 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5153
5154 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5155 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5156 {
5157 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5158 return NULL_RTX;
5159 }
5160
5161 if (failure > success)
5162 {
5163 error ("failure memory model cannot be stronger than success "
5164 "memory model for %<__atomic_compare_exchange%>");
5165 return NULL_RTX;
5166 }
5167
5168 if (!flag_inline_atomics)
5169 return NULL_RTX;
5170
5171 /* Expand the operands. */
5172 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5173
5174 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5175 expect = convert_memory_address (Pmode, expect);
5176 expect = gen_rtx_MEM (mode, expect);
5177 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5178
5179 weak = CALL_EXPR_ARG (exp, 3);
5180 is_weak = false;
5181 if (host_integerp (weak, 0) && tree_low_cst (weak, 0) != 0)
5182 is_weak = true;
5183
5184 oldval = expect;
5185 if (!expand_atomic_compare_and_swap ((target == const0_rtx ? NULL : &target),
5186 &oldval, mem, oldval, desired,
5187 is_weak, success, failure))
5188 return NULL_RTX;
5189
5190 if (oldval != expect)
5191 emit_move_insn (expect, oldval);
5192
5193 return target;
5194 }
5195
5196 /* Expand the __atomic_load intrinsic:
5197 TYPE __atomic_load (TYPE *object, enum memmodel)
5198 EXP is the CALL_EXPR.
5199 TARGET is an optional place for us to store the results. */
5200
5201 static rtx
5202 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5203 {
5204 rtx mem;
5205 enum memmodel model;
5206
5207 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5208 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5209 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5210 {
5211 error ("invalid memory model for %<__atomic_load%>");
5212 return NULL_RTX;
5213 }
5214
5215 if (!flag_inline_atomics)
5216 return NULL_RTX;
5217
5218 /* Expand the operand. */
5219 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5220
5221 return expand_atomic_load (target, mem, model);
5222 }
5223
5224
5225 /* Expand the __atomic_store intrinsic:
5226 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5227 EXP is the CALL_EXPR.
5228 TARGET is an optional place for us to store the results. */
5229
5230 static rtx
5231 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5232 {
5233 rtx mem, val;
5234 enum memmodel model;
5235
5236 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5237 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5238 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5239 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5240 {
5241 error ("invalid memory model for %<__atomic_store%>");
5242 return NULL_RTX;
5243 }
5244
5245 if (!flag_inline_atomics)
5246 return NULL_RTX;
5247
5248 /* Expand the operands. */
5249 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5250 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5251
5252 return expand_atomic_store (mem, val, model, false);
5253 }
5254
5255 /* Expand the __atomic_fetch_XXX intrinsic:
5256 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5257 EXP is the CALL_EXPR.
5258 TARGET is an optional place for us to store the results.
5259 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5260 FETCH_AFTER is true if returning the result of the operation.
5261 FETCH_AFTER is false if returning the value before the operation.
5262 IGNORE is true if the result is not used.
5263 EXT_CALL is the correct builtin for an external call if this cannot be
5264 resolved to an instruction sequence. */
5265
5266 static rtx
5267 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5268 enum rtx_code code, bool fetch_after,
5269 bool ignore, enum built_in_function ext_call)
5270 {
5271 rtx val, mem, ret;
5272 enum memmodel model;
5273 tree fndecl;
5274 tree addr;
5275
5276 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5277
5278 /* Expand the operands. */
5279 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5280 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5281
5282 /* Only try generating instructions if inlining is turned on. */
5283 if (flag_inline_atomics)
5284 {
5285 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5286 if (ret)
5287 return ret;
5288 }
5289
5290 /* Return if a different routine isn't needed for the library call. */
5291 if (ext_call == BUILT_IN_NONE)
5292 return NULL_RTX;
5293
5294 /* Change the call to the specified function. */
5295 fndecl = get_callee_fndecl (exp);
5296 addr = CALL_EXPR_FN (exp);
5297 STRIP_NOPS (addr);
5298
5299 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5300 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5301
5302 /* Expand the call here so we can emit trailing code. */
5303 ret = expand_call (exp, target, ignore);
5304
5305 /* Replace the original function just in case it matters. */
5306 TREE_OPERAND (addr, 0) = fndecl;
5307
5308 /* Then issue the arithmetic correction to return the right result. */
5309 if (!ignore)
5310 {
5311 if (code == NOT)
5312 {
5313 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5314 OPTAB_LIB_WIDEN);
5315 ret = expand_simple_unop (mode, NOT, ret, target, true);
5316 }
5317 else
5318 ret = expand_simple_binop (mode, code, ret, val, target, true,
5319 OPTAB_LIB_WIDEN);
5320 }
5321 return ret;
5322 }
5323
5324
5325 #ifndef HAVE_atomic_clear
5326 # define HAVE_atomic_clear 0
5327 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5328 #endif
5329
5330 /* Expand an atomic clear operation.
5331 void _atomic_clear (BOOL *obj, enum memmodel)
5332 EXP is the call expression. */
5333
5334 static rtx
5335 expand_builtin_atomic_clear (tree exp)
5336 {
5337 enum machine_mode mode;
5338 rtx mem, ret;
5339 enum memmodel model;
5340
5341 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5342 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5343 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5344
5345 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5346 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5347 {
5348 error ("invalid memory model for %<__atomic_store%>");
5349 return const0_rtx;
5350 }
5351
5352 if (HAVE_atomic_clear)
5353 {
5354 emit_insn (gen_atomic_clear (mem, model));
5355 return const0_rtx;
5356 }
5357
5358 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5359 Failing that, a store is issued by __atomic_store. The only way this can
5360 fail is if the bool type is larger than a word size. Unlikely, but
5361 handle it anyway for completeness. Assume a single threaded model since
5362 there is no atomic support in this case, and no barriers are required. */
5363 ret = expand_atomic_store (mem, const0_rtx, model, true);
5364 if (!ret)
5365 emit_move_insn (mem, const0_rtx);
5366 return const0_rtx;
5367 }
5368
5369 /* Expand an atomic test_and_set operation.
5370 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5371 EXP is the call expression. */
5372
5373 static rtx
5374 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5375 {
5376 rtx mem;
5377 enum memmodel model;
5378 enum machine_mode mode;
5379
5380 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5381 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5382 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5383
5384 return expand_atomic_test_and_set (target, mem, model);
5385 }
5386
5387
5388 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5389 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5390
5391 static tree
5392 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5393 {
5394 int size;
5395 enum machine_mode mode;
5396 unsigned int mode_align, type_align;
5397
5398 if (TREE_CODE (arg0) != INTEGER_CST)
5399 return NULL_TREE;
5400
5401 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5402 mode = mode_for_size (size, MODE_INT, 0);
5403 mode_align = GET_MODE_ALIGNMENT (mode);
5404
5405 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5406 type_align = mode_align;
5407 else
5408 {
5409 tree ttype = TREE_TYPE (arg1);
5410
5411 /* This function is usually invoked and folded immediately by the front
5412 end before anything else has a chance to look at it. The pointer
5413 parameter at this point is usually cast to a void *, so check for that
5414 and look past the cast. */
5415 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5416 && VOID_TYPE_P (TREE_TYPE (ttype)))
5417 arg1 = TREE_OPERAND (arg1, 0);
5418
5419 ttype = TREE_TYPE (arg1);
5420 gcc_assert (POINTER_TYPE_P (ttype));
5421
5422 /* Get the underlying type of the object. */
5423 ttype = TREE_TYPE (ttype);
5424 type_align = TYPE_ALIGN (ttype);
5425 }
5426
5427 /* If the object has smaller alignment, the the lock free routines cannot
5428 be used. */
5429 if (type_align < mode_align)
5430 return boolean_false_node;
5431
5432 /* Check if a compare_and_swap pattern exists for the mode which represents
5433 the required size. The pattern is not allowed to fail, so the existence
5434 of the pattern indicates support is present. */
5435 if (can_compare_and_swap_p (mode, true))
5436 return boolean_true_node;
5437 else
5438 return boolean_false_node;
5439 }
5440
5441 /* Return true if the parameters to call EXP represent an object which will
5442 always generate lock free instructions. The first argument represents the
5443 size of the object, and the second parameter is a pointer to the object
5444 itself. If NULL is passed for the object, then the result is based on
5445 typical alignment for an object of the specified size. Otherwise return
5446 false. */
5447
5448 static rtx
5449 expand_builtin_atomic_always_lock_free (tree exp)
5450 {
5451 tree size;
5452 tree arg0 = CALL_EXPR_ARG (exp, 0);
5453 tree arg1 = CALL_EXPR_ARG (exp, 1);
5454
5455 if (TREE_CODE (arg0) != INTEGER_CST)
5456 {
5457 error ("non-constant argument 1 to __atomic_always_lock_free");
5458 return const0_rtx;
5459 }
5460
5461 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5462 if (size == boolean_true_node)
5463 return const1_rtx;
5464 return const0_rtx;
5465 }
5466
5467 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5468 is lock free on this architecture. */
5469
5470 static tree
5471 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5472 {
5473 if (!flag_inline_atomics)
5474 return NULL_TREE;
5475
5476 /* If it isn't always lock free, don't generate a result. */
5477 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5478 return boolean_true_node;
5479
5480 return NULL_TREE;
5481 }
5482
5483 /* Return true if the parameters to call EXP represent an object which will
5484 always generate lock free instructions. The first argument represents the
5485 size of the object, and the second parameter is a pointer to the object
5486 itself. If NULL is passed for the object, then the result is based on
5487 typical alignment for an object of the specified size. Otherwise return
5488 NULL*/
5489
5490 static rtx
5491 expand_builtin_atomic_is_lock_free (tree exp)
5492 {
5493 tree size;
5494 tree arg0 = CALL_EXPR_ARG (exp, 0);
5495 tree arg1 = CALL_EXPR_ARG (exp, 1);
5496
5497 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5498 {
5499 error ("non-integer argument 1 to __atomic_is_lock_free");
5500 return NULL_RTX;
5501 }
5502
5503 if (!flag_inline_atomics)
5504 return NULL_RTX;
5505
5506 /* If the value is known at compile time, return the RTX for it. */
5507 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5508 if (size == boolean_true_node)
5509 return const1_rtx;
5510
5511 return NULL_RTX;
5512 }
5513
5514 /* Expand the __atomic_thread_fence intrinsic:
5515 void __atomic_thread_fence (enum memmodel)
5516 EXP is the CALL_EXPR. */
5517
5518 static void
5519 expand_builtin_atomic_thread_fence (tree exp)
5520 {
5521 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5522 expand_mem_thread_fence (model);
5523 }
5524
5525 /* Expand the __atomic_signal_fence intrinsic:
5526 void __atomic_signal_fence (enum memmodel)
5527 EXP is the CALL_EXPR. */
5528
5529 static void
5530 expand_builtin_atomic_signal_fence (tree exp)
5531 {
5532 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5533 expand_mem_signal_fence (model);
5534 }
5535
5536 /* Expand the __sync_synchronize intrinsic. */
5537
5538 static void
5539 expand_builtin_sync_synchronize (void)
5540 {
5541 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5542 }
5543
5544 static rtx
5545 expand_builtin_thread_pointer (tree exp, rtx target)
5546 {
5547 enum insn_code icode;
5548 if (!validate_arglist (exp, VOID_TYPE))
5549 return const0_rtx;
5550 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5551 if (icode != CODE_FOR_nothing)
5552 {
5553 struct expand_operand op;
5554 if (!REG_P (target) || GET_MODE (target) != Pmode)
5555 target = gen_reg_rtx (Pmode);
5556 create_output_operand (&op, target, Pmode);
5557 expand_insn (icode, 1, &op);
5558 return target;
5559 }
5560 error ("__builtin_thread_pointer is not supported on this target");
5561 return const0_rtx;
5562 }
5563
5564 static void
5565 expand_builtin_set_thread_pointer (tree exp)
5566 {
5567 enum insn_code icode;
5568 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5569 return;
5570 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5571 if (icode != CODE_FOR_nothing)
5572 {
5573 struct expand_operand op;
5574 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5575 Pmode, EXPAND_NORMAL);
5576 create_input_operand (&op, val, Pmode);
5577 expand_insn (icode, 1, &op);
5578 return;
5579 }
5580 error ("__builtin_set_thread_pointer is not supported on this target");
5581 }
5582
5583 \f
5584 /* Expand an expression EXP that calls a built-in function,
5585 with result going to TARGET if that's convenient
5586 (and in mode MODE if that's convenient).
5587 SUBTARGET may be used as the target for computing one of EXP's operands.
5588 IGNORE is nonzero if the value is to be ignored. */
5589
5590 rtx
5591 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5592 int ignore)
5593 {
5594 tree fndecl = get_callee_fndecl (exp);
5595 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5596 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5597 int flags;
5598
5599 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5600 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5601
5602 /* When not optimizing, generate calls to library functions for a certain
5603 set of builtins. */
5604 if (!optimize
5605 && !called_as_built_in (fndecl)
5606 && fcode != BUILT_IN_FORK
5607 && fcode != BUILT_IN_EXECL
5608 && fcode != BUILT_IN_EXECV
5609 && fcode != BUILT_IN_EXECLP
5610 && fcode != BUILT_IN_EXECLE
5611 && fcode != BUILT_IN_EXECVP
5612 && fcode != BUILT_IN_EXECVE
5613 && fcode != BUILT_IN_ALLOCA
5614 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5615 && fcode != BUILT_IN_FREE
5616 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5617 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5618 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5619 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5620 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5621 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5622 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5623 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5624 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5625 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5626 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND)
5627 return expand_call (exp, target, ignore);
5628
5629 /* The built-in function expanders test for target == const0_rtx
5630 to determine whether the function's result will be ignored. */
5631 if (ignore)
5632 target = const0_rtx;
5633
5634 /* If the result of a pure or const built-in function is ignored, and
5635 none of its arguments are volatile, we can avoid expanding the
5636 built-in call and just evaluate the arguments for side-effects. */
5637 if (target == const0_rtx
5638 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5639 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5640 {
5641 bool volatilep = false;
5642 tree arg;
5643 call_expr_arg_iterator iter;
5644
5645 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5646 if (TREE_THIS_VOLATILE (arg))
5647 {
5648 volatilep = true;
5649 break;
5650 }
5651
5652 if (! volatilep)
5653 {
5654 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5655 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5656 return const0_rtx;
5657 }
5658 }
5659
5660 switch (fcode)
5661 {
5662 CASE_FLT_FN (BUILT_IN_FABS):
5663 case BUILT_IN_FABSD32:
5664 case BUILT_IN_FABSD64:
5665 case BUILT_IN_FABSD128:
5666 target = expand_builtin_fabs (exp, target, subtarget);
5667 if (target)
5668 return target;
5669 break;
5670
5671 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5672 target = expand_builtin_copysign (exp, target, subtarget);
5673 if (target)
5674 return target;
5675 break;
5676
5677 /* Just do a normal library call if we were unable to fold
5678 the values. */
5679 CASE_FLT_FN (BUILT_IN_CABS):
5680 break;
5681
5682 CASE_FLT_FN (BUILT_IN_EXP):
5683 CASE_FLT_FN (BUILT_IN_EXP10):
5684 CASE_FLT_FN (BUILT_IN_POW10):
5685 CASE_FLT_FN (BUILT_IN_EXP2):
5686 CASE_FLT_FN (BUILT_IN_EXPM1):
5687 CASE_FLT_FN (BUILT_IN_LOGB):
5688 CASE_FLT_FN (BUILT_IN_LOG):
5689 CASE_FLT_FN (BUILT_IN_LOG10):
5690 CASE_FLT_FN (BUILT_IN_LOG2):
5691 CASE_FLT_FN (BUILT_IN_LOG1P):
5692 CASE_FLT_FN (BUILT_IN_TAN):
5693 CASE_FLT_FN (BUILT_IN_ASIN):
5694 CASE_FLT_FN (BUILT_IN_ACOS):
5695 CASE_FLT_FN (BUILT_IN_ATAN):
5696 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5697 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5698 because of possible accuracy problems. */
5699 if (! flag_unsafe_math_optimizations)
5700 break;
5701 CASE_FLT_FN (BUILT_IN_SQRT):
5702 CASE_FLT_FN (BUILT_IN_FLOOR):
5703 CASE_FLT_FN (BUILT_IN_CEIL):
5704 CASE_FLT_FN (BUILT_IN_TRUNC):
5705 CASE_FLT_FN (BUILT_IN_ROUND):
5706 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5707 CASE_FLT_FN (BUILT_IN_RINT):
5708 target = expand_builtin_mathfn (exp, target, subtarget);
5709 if (target)
5710 return target;
5711 break;
5712
5713 CASE_FLT_FN (BUILT_IN_FMA):
5714 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5715 if (target)
5716 return target;
5717 break;
5718
5719 CASE_FLT_FN (BUILT_IN_ILOGB):
5720 if (! flag_unsafe_math_optimizations)
5721 break;
5722 CASE_FLT_FN (BUILT_IN_ISINF):
5723 CASE_FLT_FN (BUILT_IN_FINITE):
5724 case BUILT_IN_ISFINITE:
5725 case BUILT_IN_ISNORMAL:
5726 target = expand_builtin_interclass_mathfn (exp, target);
5727 if (target)
5728 return target;
5729 break;
5730
5731 CASE_FLT_FN (BUILT_IN_ICEIL):
5732 CASE_FLT_FN (BUILT_IN_LCEIL):
5733 CASE_FLT_FN (BUILT_IN_LLCEIL):
5734 CASE_FLT_FN (BUILT_IN_LFLOOR):
5735 CASE_FLT_FN (BUILT_IN_IFLOOR):
5736 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5737 target = expand_builtin_int_roundingfn (exp, target);
5738 if (target)
5739 return target;
5740 break;
5741
5742 CASE_FLT_FN (BUILT_IN_IRINT):
5743 CASE_FLT_FN (BUILT_IN_LRINT):
5744 CASE_FLT_FN (BUILT_IN_LLRINT):
5745 CASE_FLT_FN (BUILT_IN_IROUND):
5746 CASE_FLT_FN (BUILT_IN_LROUND):
5747 CASE_FLT_FN (BUILT_IN_LLROUND):
5748 target = expand_builtin_int_roundingfn_2 (exp, target);
5749 if (target)
5750 return target;
5751 break;
5752
5753 CASE_FLT_FN (BUILT_IN_POWI):
5754 target = expand_builtin_powi (exp, target);
5755 if (target)
5756 return target;
5757 break;
5758
5759 CASE_FLT_FN (BUILT_IN_ATAN2):
5760 CASE_FLT_FN (BUILT_IN_LDEXP):
5761 CASE_FLT_FN (BUILT_IN_SCALB):
5762 CASE_FLT_FN (BUILT_IN_SCALBN):
5763 CASE_FLT_FN (BUILT_IN_SCALBLN):
5764 if (! flag_unsafe_math_optimizations)
5765 break;
5766
5767 CASE_FLT_FN (BUILT_IN_FMOD):
5768 CASE_FLT_FN (BUILT_IN_REMAINDER):
5769 CASE_FLT_FN (BUILT_IN_DREM):
5770 CASE_FLT_FN (BUILT_IN_POW):
5771 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5772 if (target)
5773 return target;
5774 break;
5775
5776 CASE_FLT_FN (BUILT_IN_CEXPI):
5777 target = expand_builtin_cexpi (exp, target);
5778 gcc_assert (target);
5779 return target;
5780
5781 CASE_FLT_FN (BUILT_IN_SIN):
5782 CASE_FLT_FN (BUILT_IN_COS):
5783 if (! flag_unsafe_math_optimizations)
5784 break;
5785 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5786 if (target)
5787 return target;
5788 break;
5789
5790 CASE_FLT_FN (BUILT_IN_SINCOS):
5791 if (! flag_unsafe_math_optimizations)
5792 break;
5793 target = expand_builtin_sincos (exp);
5794 if (target)
5795 return target;
5796 break;
5797
5798 case BUILT_IN_APPLY_ARGS:
5799 return expand_builtin_apply_args ();
5800
5801 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5802 FUNCTION with a copy of the parameters described by
5803 ARGUMENTS, and ARGSIZE. It returns a block of memory
5804 allocated on the stack into which is stored all the registers
5805 that might possibly be used for returning the result of a
5806 function. ARGUMENTS is the value returned by
5807 __builtin_apply_args. ARGSIZE is the number of bytes of
5808 arguments that must be copied. ??? How should this value be
5809 computed? We'll also need a safe worst case value for varargs
5810 functions. */
5811 case BUILT_IN_APPLY:
5812 if (!validate_arglist (exp, POINTER_TYPE,
5813 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5814 && !validate_arglist (exp, REFERENCE_TYPE,
5815 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5816 return const0_rtx;
5817 else
5818 {
5819 rtx ops[3];
5820
5821 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5822 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5823 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5824
5825 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5826 }
5827
5828 /* __builtin_return (RESULT) causes the function to return the
5829 value described by RESULT. RESULT is address of the block of
5830 memory returned by __builtin_apply. */
5831 case BUILT_IN_RETURN:
5832 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5833 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5834 return const0_rtx;
5835
5836 case BUILT_IN_SAVEREGS:
5837 return expand_builtin_saveregs ();
5838
5839 case BUILT_IN_VA_ARG_PACK:
5840 /* All valid uses of __builtin_va_arg_pack () are removed during
5841 inlining. */
5842 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5843 return const0_rtx;
5844
5845 case BUILT_IN_VA_ARG_PACK_LEN:
5846 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5847 inlining. */
5848 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5849 return const0_rtx;
5850
5851 /* Return the address of the first anonymous stack arg. */
5852 case BUILT_IN_NEXT_ARG:
5853 if (fold_builtin_next_arg (exp, false))
5854 return const0_rtx;
5855 return expand_builtin_next_arg ();
5856
5857 case BUILT_IN_CLEAR_CACHE:
5858 target = expand_builtin___clear_cache (exp);
5859 if (target)
5860 return target;
5861 break;
5862
5863 case BUILT_IN_CLASSIFY_TYPE:
5864 return expand_builtin_classify_type (exp);
5865
5866 case BUILT_IN_CONSTANT_P:
5867 return const0_rtx;
5868
5869 case BUILT_IN_FRAME_ADDRESS:
5870 case BUILT_IN_RETURN_ADDRESS:
5871 return expand_builtin_frame_address (fndecl, exp);
5872
5873 /* Returns the address of the area where the structure is returned.
5874 0 otherwise. */
5875 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5876 if (call_expr_nargs (exp) != 0
5877 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5878 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5879 return const0_rtx;
5880 else
5881 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5882
5883 case BUILT_IN_ALLOCA:
5884 case BUILT_IN_ALLOCA_WITH_ALIGN:
5885 /* If the allocation stems from the declaration of a variable-sized
5886 object, it cannot accumulate. */
5887 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
5888 if (target)
5889 return target;
5890 break;
5891
5892 case BUILT_IN_STACK_SAVE:
5893 return expand_stack_save ();
5894
5895 case BUILT_IN_STACK_RESTORE:
5896 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5897 return const0_rtx;
5898
5899 case BUILT_IN_BSWAP16:
5900 case BUILT_IN_BSWAP32:
5901 case BUILT_IN_BSWAP64:
5902 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
5903 if (target)
5904 return target;
5905 break;
5906
5907 CASE_INT_FN (BUILT_IN_FFS):
5908 target = expand_builtin_unop (target_mode, exp, target,
5909 subtarget, ffs_optab);
5910 if (target)
5911 return target;
5912 break;
5913
5914 CASE_INT_FN (BUILT_IN_CLZ):
5915 target = expand_builtin_unop (target_mode, exp, target,
5916 subtarget, clz_optab);
5917 if (target)
5918 return target;
5919 break;
5920
5921 CASE_INT_FN (BUILT_IN_CTZ):
5922 target = expand_builtin_unop (target_mode, exp, target,
5923 subtarget, ctz_optab);
5924 if (target)
5925 return target;
5926 break;
5927
5928 CASE_INT_FN (BUILT_IN_CLRSB):
5929 target = expand_builtin_unop (target_mode, exp, target,
5930 subtarget, clrsb_optab);
5931 if (target)
5932 return target;
5933 break;
5934
5935 CASE_INT_FN (BUILT_IN_POPCOUNT):
5936 target = expand_builtin_unop (target_mode, exp, target,
5937 subtarget, popcount_optab);
5938 if (target)
5939 return target;
5940 break;
5941
5942 CASE_INT_FN (BUILT_IN_PARITY):
5943 target = expand_builtin_unop (target_mode, exp, target,
5944 subtarget, parity_optab);
5945 if (target)
5946 return target;
5947 break;
5948
5949 case BUILT_IN_STRLEN:
5950 target = expand_builtin_strlen (exp, target, target_mode);
5951 if (target)
5952 return target;
5953 break;
5954
5955 case BUILT_IN_STRCPY:
5956 target = expand_builtin_strcpy (exp, target);
5957 if (target)
5958 return target;
5959 break;
5960
5961 case BUILT_IN_STRNCPY:
5962 target = expand_builtin_strncpy (exp, target);
5963 if (target)
5964 return target;
5965 break;
5966
5967 case BUILT_IN_STPCPY:
5968 target = expand_builtin_stpcpy (exp, target, mode);
5969 if (target)
5970 return target;
5971 break;
5972
5973 case BUILT_IN_MEMCPY:
5974 target = expand_builtin_memcpy (exp, target);
5975 if (target)
5976 return target;
5977 break;
5978
5979 case BUILT_IN_MEMPCPY:
5980 target = expand_builtin_mempcpy (exp, target, mode);
5981 if (target)
5982 return target;
5983 break;
5984
5985 case BUILT_IN_MEMSET:
5986 target = expand_builtin_memset (exp, target, mode);
5987 if (target)
5988 return target;
5989 break;
5990
5991 case BUILT_IN_BZERO:
5992 target = expand_builtin_bzero (exp);
5993 if (target)
5994 return target;
5995 break;
5996
5997 case BUILT_IN_STRCMP:
5998 target = expand_builtin_strcmp (exp, target);
5999 if (target)
6000 return target;
6001 break;
6002
6003 case BUILT_IN_STRNCMP:
6004 target = expand_builtin_strncmp (exp, target, mode);
6005 if (target)
6006 return target;
6007 break;
6008
6009 case BUILT_IN_BCMP:
6010 case BUILT_IN_MEMCMP:
6011 target = expand_builtin_memcmp (exp, target, mode);
6012 if (target)
6013 return target;
6014 break;
6015
6016 case BUILT_IN_SETJMP:
6017 /* This should have been lowered to the builtins below. */
6018 gcc_unreachable ();
6019
6020 case BUILT_IN_SETJMP_SETUP:
6021 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6022 and the receiver label. */
6023 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6024 {
6025 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6026 VOIDmode, EXPAND_NORMAL);
6027 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6028 rtx label_r = label_rtx (label);
6029
6030 /* This is copied from the handling of non-local gotos. */
6031 expand_builtin_setjmp_setup (buf_addr, label_r);
6032 nonlocal_goto_handler_labels
6033 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6034 nonlocal_goto_handler_labels);
6035 /* ??? Do not let expand_label treat us as such since we would
6036 not want to be both on the list of non-local labels and on
6037 the list of forced labels. */
6038 FORCED_LABEL (label) = 0;
6039 return const0_rtx;
6040 }
6041 break;
6042
6043 case BUILT_IN_SETJMP_DISPATCHER:
6044 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6045 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6046 {
6047 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6048 rtx label_r = label_rtx (label);
6049
6050 /* Remove the dispatcher label from the list of non-local labels
6051 since the receiver labels have been added to it above. */
6052 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6053 return const0_rtx;
6054 }
6055 break;
6056
6057 case BUILT_IN_SETJMP_RECEIVER:
6058 /* __builtin_setjmp_receiver is passed the receiver label. */
6059 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6060 {
6061 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6062 rtx label_r = label_rtx (label);
6063
6064 expand_builtin_setjmp_receiver (label_r);
6065 return const0_rtx;
6066 }
6067 break;
6068
6069 /* __builtin_longjmp is passed a pointer to an array of five words.
6070 It's similar to the C library longjmp function but works with
6071 __builtin_setjmp above. */
6072 case BUILT_IN_LONGJMP:
6073 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6074 {
6075 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6076 VOIDmode, EXPAND_NORMAL);
6077 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6078
6079 if (value != const1_rtx)
6080 {
6081 error ("%<__builtin_longjmp%> second argument must be 1");
6082 return const0_rtx;
6083 }
6084
6085 expand_builtin_longjmp (buf_addr, value);
6086 return const0_rtx;
6087 }
6088 break;
6089
6090 case BUILT_IN_NONLOCAL_GOTO:
6091 target = expand_builtin_nonlocal_goto (exp);
6092 if (target)
6093 return target;
6094 break;
6095
6096 /* This updates the setjmp buffer that is its argument with the value
6097 of the current stack pointer. */
6098 case BUILT_IN_UPDATE_SETJMP_BUF:
6099 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6100 {
6101 rtx buf_addr
6102 = expand_normal (CALL_EXPR_ARG (exp, 0));
6103
6104 expand_builtin_update_setjmp_buf (buf_addr);
6105 return const0_rtx;
6106 }
6107 break;
6108
6109 case BUILT_IN_TRAP:
6110 expand_builtin_trap ();
6111 return const0_rtx;
6112
6113 case BUILT_IN_UNREACHABLE:
6114 expand_builtin_unreachable ();
6115 return const0_rtx;
6116
6117 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6118 case BUILT_IN_SIGNBITD32:
6119 case BUILT_IN_SIGNBITD64:
6120 case BUILT_IN_SIGNBITD128:
6121 target = expand_builtin_signbit (exp, target);
6122 if (target)
6123 return target;
6124 break;
6125
6126 /* Various hooks for the DWARF 2 __throw routine. */
6127 case BUILT_IN_UNWIND_INIT:
6128 expand_builtin_unwind_init ();
6129 return const0_rtx;
6130 case BUILT_IN_DWARF_CFA:
6131 return virtual_cfa_rtx;
6132 #ifdef DWARF2_UNWIND_INFO
6133 case BUILT_IN_DWARF_SP_COLUMN:
6134 return expand_builtin_dwarf_sp_column ();
6135 case BUILT_IN_INIT_DWARF_REG_SIZES:
6136 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6137 return const0_rtx;
6138 #endif
6139 case BUILT_IN_FROB_RETURN_ADDR:
6140 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6141 case BUILT_IN_EXTRACT_RETURN_ADDR:
6142 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6143 case BUILT_IN_EH_RETURN:
6144 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6145 CALL_EXPR_ARG (exp, 1));
6146 return const0_rtx;
6147 #ifdef EH_RETURN_DATA_REGNO
6148 case BUILT_IN_EH_RETURN_DATA_REGNO:
6149 return expand_builtin_eh_return_data_regno (exp);
6150 #endif
6151 case BUILT_IN_EXTEND_POINTER:
6152 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6153 case BUILT_IN_EH_POINTER:
6154 return expand_builtin_eh_pointer (exp);
6155 case BUILT_IN_EH_FILTER:
6156 return expand_builtin_eh_filter (exp);
6157 case BUILT_IN_EH_COPY_VALUES:
6158 return expand_builtin_eh_copy_values (exp);
6159
6160 case BUILT_IN_VA_START:
6161 return expand_builtin_va_start (exp);
6162 case BUILT_IN_VA_END:
6163 return expand_builtin_va_end (exp);
6164 case BUILT_IN_VA_COPY:
6165 return expand_builtin_va_copy (exp);
6166 case BUILT_IN_EXPECT:
6167 return expand_builtin_expect (exp, target);
6168 case BUILT_IN_ASSUME_ALIGNED:
6169 return expand_builtin_assume_aligned (exp, target);
6170 case BUILT_IN_PREFETCH:
6171 expand_builtin_prefetch (exp);
6172 return const0_rtx;
6173
6174 case BUILT_IN_INIT_TRAMPOLINE:
6175 return expand_builtin_init_trampoline (exp, true);
6176 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6177 return expand_builtin_init_trampoline (exp, false);
6178 case BUILT_IN_ADJUST_TRAMPOLINE:
6179 return expand_builtin_adjust_trampoline (exp);
6180
6181 case BUILT_IN_FORK:
6182 case BUILT_IN_EXECL:
6183 case BUILT_IN_EXECV:
6184 case BUILT_IN_EXECLP:
6185 case BUILT_IN_EXECLE:
6186 case BUILT_IN_EXECVP:
6187 case BUILT_IN_EXECVE:
6188 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6189 if (target)
6190 return target;
6191 break;
6192
6193 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6194 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6195 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6196 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6197 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6198 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6199 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6200 if (target)
6201 return target;
6202 break;
6203
6204 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6205 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6206 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6207 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6208 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6209 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6210 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6211 if (target)
6212 return target;
6213 break;
6214
6215 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6216 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6217 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6218 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6219 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6220 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6221 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6222 if (target)
6223 return target;
6224 break;
6225
6226 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6227 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6228 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6229 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6230 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6231 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6232 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6233 if (target)
6234 return target;
6235 break;
6236
6237 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6238 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6239 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6240 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6241 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6242 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6243 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6244 if (target)
6245 return target;
6246 break;
6247
6248 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6249 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6250 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6251 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6252 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6253 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6254 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6255 if (target)
6256 return target;
6257 break;
6258
6259 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6260 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6261 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6262 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6263 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6264 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6265 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6266 if (target)
6267 return target;
6268 break;
6269
6270 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6271 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6272 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6273 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6274 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6275 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6276 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6277 if (target)
6278 return target;
6279 break;
6280
6281 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6282 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6283 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6284 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6285 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6286 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6287 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6288 if (target)
6289 return target;
6290 break;
6291
6292 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6293 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6294 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6295 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6296 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6297 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6298 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6299 if (target)
6300 return target;
6301 break;
6302
6303 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6304 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6305 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6306 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6307 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6308 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6309 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6310 if (target)
6311 return target;
6312 break;
6313
6314 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6315 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6316 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6317 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6318 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6319 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6320 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6321 if (target)
6322 return target;
6323 break;
6324
6325 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6326 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6327 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6328 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6329 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6330 if (mode == VOIDmode)
6331 mode = TYPE_MODE (boolean_type_node);
6332 if (!target || !register_operand (target, mode))
6333 target = gen_reg_rtx (mode);
6334
6335 mode = get_builtin_sync_mode
6336 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6337 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6338 if (target)
6339 return target;
6340 break;
6341
6342 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6343 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6344 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6345 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6346 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6347 mode = get_builtin_sync_mode
6348 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6349 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6350 if (target)
6351 return target;
6352 break;
6353
6354 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6355 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6356 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6357 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6358 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6359 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6360 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6361 if (target)
6362 return target;
6363 break;
6364
6365 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6366 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6367 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6368 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6369 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6370 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6371 expand_builtin_sync_lock_release (mode, exp);
6372 return const0_rtx;
6373
6374 case BUILT_IN_SYNC_SYNCHRONIZE:
6375 expand_builtin_sync_synchronize ();
6376 return const0_rtx;
6377
6378 case BUILT_IN_ATOMIC_EXCHANGE_1:
6379 case BUILT_IN_ATOMIC_EXCHANGE_2:
6380 case BUILT_IN_ATOMIC_EXCHANGE_4:
6381 case BUILT_IN_ATOMIC_EXCHANGE_8:
6382 case BUILT_IN_ATOMIC_EXCHANGE_16:
6383 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6384 target = expand_builtin_atomic_exchange (mode, exp, target);
6385 if (target)
6386 return target;
6387 break;
6388
6389 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6390 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6391 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6392 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6393 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6394 {
6395 unsigned int nargs, z;
6396 vec<tree, va_gc> *vec;
6397
6398 mode =
6399 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6400 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6401 if (target)
6402 return target;
6403
6404 /* If this is turned into an external library call, the weak parameter
6405 must be dropped to match the expected parameter list. */
6406 nargs = call_expr_nargs (exp);
6407 vec_alloc (vec, nargs - 1);
6408 for (z = 0; z < 3; z++)
6409 vec->quick_push (CALL_EXPR_ARG (exp, z));
6410 /* Skip the boolean weak parameter. */
6411 for (z = 4; z < 6; z++)
6412 vec->quick_push (CALL_EXPR_ARG (exp, z));
6413 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6414 break;
6415 }
6416
6417 case BUILT_IN_ATOMIC_LOAD_1:
6418 case BUILT_IN_ATOMIC_LOAD_2:
6419 case BUILT_IN_ATOMIC_LOAD_4:
6420 case BUILT_IN_ATOMIC_LOAD_8:
6421 case BUILT_IN_ATOMIC_LOAD_16:
6422 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6423 target = expand_builtin_atomic_load (mode, exp, target);
6424 if (target)
6425 return target;
6426 break;
6427
6428 case BUILT_IN_ATOMIC_STORE_1:
6429 case BUILT_IN_ATOMIC_STORE_2:
6430 case BUILT_IN_ATOMIC_STORE_4:
6431 case BUILT_IN_ATOMIC_STORE_8:
6432 case BUILT_IN_ATOMIC_STORE_16:
6433 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6434 target = expand_builtin_atomic_store (mode, exp);
6435 if (target)
6436 return const0_rtx;
6437 break;
6438
6439 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6440 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6441 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6442 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6443 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6444 {
6445 enum built_in_function lib;
6446 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6447 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6448 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6449 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6450 ignore, lib);
6451 if (target)
6452 return target;
6453 break;
6454 }
6455 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6456 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6457 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6458 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6459 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6460 {
6461 enum built_in_function lib;
6462 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6463 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6464 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6465 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6466 ignore, lib);
6467 if (target)
6468 return target;
6469 break;
6470 }
6471 case BUILT_IN_ATOMIC_AND_FETCH_1:
6472 case BUILT_IN_ATOMIC_AND_FETCH_2:
6473 case BUILT_IN_ATOMIC_AND_FETCH_4:
6474 case BUILT_IN_ATOMIC_AND_FETCH_8:
6475 case BUILT_IN_ATOMIC_AND_FETCH_16:
6476 {
6477 enum built_in_function lib;
6478 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6479 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6480 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6481 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6482 ignore, lib);
6483 if (target)
6484 return target;
6485 break;
6486 }
6487 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6488 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6489 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6490 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6491 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6492 {
6493 enum built_in_function lib;
6494 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6495 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6496 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6497 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6498 ignore, lib);
6499 if (target)
6500 return target;
6501 break;
6502 }
6503 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6504 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6505 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6506 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6507 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6508 {
6509 enum built_in_function lib;
6510 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6511 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6512 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6513 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6514 ignore, lib);
6515 if (target)
6516 return target;
6517 break;
6518 }
6519 case BUILT_IN_ATOMIC_OR_FETCH_1:
6520 case BUILT_IN_ATOMIC_OR_FETCH_2:
6521 case BUILT_IN_ATOMIC_OR_FETCH_4:
6522 case BUILT_IN_ATOMIC_OR_FETCH_8:
6523 case BUILT_IN_ATOMIC_OR_FETCH_16:
6524 {
6525 enum built_in_function lib;
6526 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6527 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6528 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6529 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6530 ignore, lib);
6531 if (target)
6532 return target;
6533 break;
6534 }
6535 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6536 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6537 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6538 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6539 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6540 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6541 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6542 ignore, BUILT_IN_NONE);
6543 if (target)
6544 return target;
6545 break;
6546
6547 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6548 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6549 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6550 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6551 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6552 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6553 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6554 ignore, BUILT_IN_NONE);
6555 if (target)
6556 return target;
6557 break;
6558
6559 case BUILT_IN_ATOMIC_FETCH_AND_1:
6560 case BUILT_IN_ATOMIC_FETCH_AND_2:
6561 case BUILT_IN_ATOMIC_FETCH_AND_4:
6562 case BUILT_IN_ATOMIC_FETCH_AND_8:
6563 case BUILT_IN_ATOMIC_FETCH_AND_16:
6564 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6565 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6566 ignore, BUILT_IN_NONE);
6567 if (target)
6568 return target;
6569 break;
6570
6571 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6572 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6573 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6574 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6575 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6576 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6577 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6578 ignore, BUILT_IN_NONE);
6579 if (target)
6580 return target;
6581 break;
6582
6583 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6584 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6585 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6586 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6587 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6588 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6589 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6590 ignore, BUILT_IN_NONE);
6591 if (target)
6592 return target;
6593 break;
6594
6595 case BUILT_IN_ATOMIC_FETCH_OR_1:
6596 case BUILT_IN_ATOMIC_FETCH_OR_2:
6597 case BUILT_IN_ATOMIC_FETCH_OR_4:
6598 case BUILT_IN_ATOMIC_FETCH_OR_8:
6599 case BUILT_IN_ATOMIC_FETCH_OR_16:
6600 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6601 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6602 ignore, BUILT_IN_NONE);
6603 if (target)
6604 return target;
6605 break;
6606
6607 case BUILT_IN_ATOMIC_TEST_AND_SET:
6608 return expand_builtin_atomic_test_and_set (exp, target);
6609
6610 case BUILT_IN_ATOMIC_CLEAR:
6611 return expand_builtin_atomic_clear (exp);
6612
6613 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6614 return expand_builtin_atomic_always_lock_free (exp);
6615
6616 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6617 target = expand_builtin_atomic_is_lock_free (exp);
6618 if (target)
6619 return target;
6620 break;
6621
6622 case BUILT_IN_ATOMIC_THREAD_FENCE:
6623 expand_builtin_atomic_thread_fence (exp);
6624 return const0_rtx;
6625
6626 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6627 expand_builtin_atomic_signal_fence (exp);
6628 return const0_rtx;
6629
6630 case BUILT_IN_OBJECT_SIZE:
6631 return expand_builtin_object_size (exp);
6632
6633 case BUILT_IN_MEMCPY_CHK:
6634 case BUILT_IN_MEMPCPY_CHK:
6635 case BUILT_IN_MEMMOVE_CHK:
6636 case BUILT_IN_MEMSET_CHK:
6637 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6638 if (target)
6639 return target;
6640 break;
6641
6642 case BUILT_IN_STRCPY_CHK:
6643 case BUILT_IN_STPCPY_CHK:
6644 case BUILT_IN_STRNCPY_CHK:
6645 case BUILT_IN_STPNCPY_CHK:
6646 case BUILT_IN_STRCAT_CHK:
6647 case BUILT_IN_STRNCAT_CHK:
6648 case BUILT_IN_SNPRINTF_CHK:
6649 case BUILT_IN_VSNPRINTF_CHK:
6650 maybe_emit_chk_warning (exp, fcode);
6651 break;
6652
6653 case BUILT_IN_SPRINTF_CHK:
6654 case BUILT_IN_VSPRINTF_CHK:
6655 maybe_emit_sprintf_chk_warning (exp, fcode);
6656 break;
6657
6658 case BUILT_IN_FREE:
6659 if (warn_free_nonheap_object)
6660 maybe_emit_free_warning (exp);
6661 break;
6662
6663 case BUILT_IN_THREAD_POINTER:
6664 return expand_builtin_thread_pointer (exp, target);
6665
6666 case BUILT_IN_SET_THREAD_POINTER:
6667 expand_builtin_set_thread_pointer (exp);
6668 return const0_rtx;
6669
6670 case BUILT_IN_CILK_DETACH:
6671 expand_builtin_cilk_detach (exp);
6672 return const0_rtx;
6673
6674 case BUILT_IN_CILK_POP_FRAME:
6675 expand_builtin_cilk_pop_frame (exp);
6676 return const0_rtx;
6677
6678 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6679 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6680 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6681 return expand_normal (CALL_EXPR_ARG (exp, 0));
6682
6683 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6684 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6685 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6686 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6687 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6688 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6689 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6690 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6691 /* We allow user CHKP builtins if Pointer Bounds
6692 Checker is off. */
6693 if (!flag_check_pointer_bounds)
6694 {
6695 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6696 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
6697 return expand_normal (CALL_EXPR_ARG (exp, 0));
6698 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6699 return expand_normal (size_zero_node);
6700 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6701 return expand_normal (size_int (-1));
6702 else
6703 return const0_rtx;
6704 }
6705 /* FALLTHROUGH */
6706
6707 case BUILT_IN_CHKP_BNDMK:
6708 case BUILT_IN_CHKP_BNDSTX:
6709 case BUILT_IN_CHKP_BNDCL:
6710 case BUILT_IN_CHKP_BNDCU:
6711 case BUILT_IN_CHKP_BNDLDX:
6712 case BUILT_IN_CHKP_BNDRET:
6713 case BUILT_IN_CHKP_INTERSECT:
6714 case BUILT_IN_CHKP_ARG_BND:
6715 case BUILT_IN_CHKP_NARROW:
6716 case BUILT_IN_CHKP_EXTRACT_LOWER:
6717 case BUILT_IN_CHKP_EXTRACT_UPPER:
6718 /* Software implementation of pointers checker is NYI.
6719 Target support is required. */
6720 error ("Your target platform does not support -fcheck-pointers");
6721 break;
6722
6723 default: /* just do library call, if unknown builtin */
6724 break;
6725 }
6726
6727 /* The switch statement above can drop through to cause the function
6728 to be called normally. */
6729 return expand_call (exp, target, ignore);
6730 }
6731
6732 /* Determine whether a tree node represents a call to a built-in
6733 function. If the tree T is a call to a built-in function with
6734 the right number of arguments of the appropriate types, return
6735 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6736 Otherwise the return value is END_BUILTINS. */
6737
6738 enum built_in_function
6739 builtin_mathfn_code (const_tree t)
6740 {
6741 const_tree fndecl, arg, parmlist;
6742 const_tree argtype, parmtype;
6743 const_call_expr_arg_iterator iter;
6744
6745 if (TREE_CODE (t) != CALL_EXPR
6746 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6747 return END_BUILTINS;
6748
6749 fndecl = get_callee_fndecl (t);
6750 if (fndecl == NULL_TREE
6751 || TREE_CODE (fndecl) != FUNCTION_DECL
6752 || ! DECL_BUILT_IN (fndecl)
6753 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6754 return END_BUILTINS;
6755
6756 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6757 init_const_call_expr_arg_iterator (t, &iter);
6758 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6759 {
6760 /* If a function doesn't take a variable number of arguments,
6761 the last element in the list will have type `void'. */
6762 parmtype = TREE_VALUE (parmlist);
6763 if (VOID_TYPE_P (parmtype))
6764 {
6765 if (more_const_call_expr_args_p (&iter))
6766 return END_BUILTINS;
6767 return DECL_FUNCTION_CODE (fndecl);
6768 }
6769
6770 if (! more_const_call_expr_args_p (&iter))
6771 return END_BUILTINS;
6772
6773 arg = next_const_call_expr_arg (&iter);
6774 argtype = TREE_TYPE (arg);
6775
6776 if (SCALAR_FLOAT_TYPE_P (parmtype))
6777 {
6778 if (! SCALAR_FLOAT_TYPE_P (argtype))
6779 return END_BUILTINS;
6780 }
6781 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6782 {
6783 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6784 return END_BUILTINS;
6785 }
6786 else if (POINTER_TYPE_P (parmtype))
6787 {
6788 if (! POINTER_TYPE_P (argtype))
6789 return END_BUILTINS;
6790 }
6791 else if (INTEGRAL_TYPE_P (parmtype))
6792 {
6793 if (! INTEGRAL_TYPE_P (argtype))
6794 return END_BUILTINS;
6795 }
6796 else
6797 return END_BUILTINS;
6798 }
6799
6800 /* Variable-length argument list. */
6801 return DECL_FUNCTION_CODE (fndecl);
6802 }
6803
6804 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6805 evaluate to a constant. */
6806
6807 static tree
6808 fold_builtin_constant_p (tree arg)
6809 {
6810 /* We return 1 for a numeric type that's known to be a constant
6811 value at compile-time or for an aggregate type that's a
6812 literal constant. */
6813 STRIP_NOPS (arg);
6814
6815 /* If we know this is a constant, emit the constant of one. */
6816 if (CONSTANT_CLASS_P (arg)
6817 || (TREE_CODE (arg) == CONSTRUCTOR
6818 && TREE_CONSTANT (arg)))
6819 return integer_one_node;
6820 if (TREE_CODE (arg) == ADDR_EXPR)
6821 {
6822 tree op = TREE_OPERAND (arg, 0);
6823 if (TREE_CODE (op) == STRING_CST
6824 || (TREE_CODE (op) == ARRAY_REF
6825 && integer_zerop (TREE_OPERAND (op, 1))
6826 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6827 return integer_one_node;
6828 }
6829
6830 /* If this expression has side effects, show we don't know it to be a
6831 constant. Likewise if it's a pointer or aggregate type since in
6832 those case we only want literals, since those are only optimized
6833 when generating RTL, not later.
6834 And finally, if we are compiling an initializer, not code, we
6835 need to return a definite result now; there's not going to be any
6836 more optimization done. */
6837 if (TREE_SIDE_EFFECTS (arg)
6838 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6839 || POINTER_TYPE_P (TREE_TYPE (arg))
6840 || cfun == 0
6841 || folding_initializer
6842 || force_folding_builtin_constant_p)
6843 return integer_zero_node;
6844
6845 return NULL_TREE;
6846 }
6847
6848 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6849 return it as a truthvalue. */
6850
6851 static tree
6852 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6853 {
6854 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6855
6856 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
6857 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6858 ret_type = TREE_TYPE (TREE_TYPE (fn));
6859 pred_type = TREE_VALUE (arg_types);
6860 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6861
6862 pred = fold_convert_loc (loc, pred_type, pred);
6863 expected = fold_convert_loc (loc, expected_type, expected);
6864 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6865
6866 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6867 build_int_cst (ret_type, 0));
6868 }
6869
6870 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6871 NULL_TREE if no simplification is possible. */
6872
6873 static tree
6874 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6875 {
6876 tree inner, fndecl, inner_arg0;
6877 enum tree_code code;
6878
6879 /* Distribute the expected value over short-circuiting operators.
6880 See through the cast from truthvalue_type_node to long. */
6881 inner_arg0 = arg0;
6882 while (TREE_CODE (inner_arg0) == NOP_EXPR
6883 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
6884 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
6885 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
6886
6887 /* If this is a builtin_expect within a builtin_expect keep the
6888 inner one. See through a comparison against a constant. It
6889 might have been added to create a thruthvalue. */
6890 inner = inner_arg0;
6891
6892 if (COMPARISON_CLASS_P (inner)
6893 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6894 inner = TREE_OPERAND (inner, 0);
6895
6896 if (TREE_CODE (inner) == CALL_EXPR
6897 && (fndecl = get_callee_fndecl (inner))
6898 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6899 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6900 return arg0;
6901
6902 inner = inner_arg0;
6903 code = TREE_CODE (inner);
6904 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6905 {
6906 tree op0 = TREE_OPERAND (inner, 0);
6907 tree op1 = TREE_OPERAND (inner, 1);
6908
6909 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6910 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6911 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6912
6913 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6914 }
6915
6916 /* If the argument isn't invariant then there's nothing else we can do. */
6917 if (!TREE_CONSTANT (inner_arg0))
6918 return NULL_TREE;
6919
6920 /* If we expect that a comparison against the argument will fold to
6921 a constant return the constant. In practice, this means a true
6922 constant or the address of a non-weak symbol. */
6923 inner = inner_arg0;
6924 STRIP_NOPS (inner);
6925 if (TREE_CODE (inner) == ADDR_EXPR)
6926 {
6927 do
6928 {
6929 inner = TREE_OPERAND (inner, 0);
6930 }
6931 while (TREE_CODE (inner) == COMPONENT_REF
6932 || TREE_CODE (inner) == ARRAY_REF);
6933 if ((TREE_CODE (inner) == VAR_DECL
6934 || TREE_CODE (inner) == FUNCTION_DECL)
6935 && DECL_WEAK (inner))
6936 return NULL_TREE;
6937 }
6938
6939 /* Otherwise, ARG0 already has the proper type for the return value. */
6940 return arg0;
6941 }
6942
6943 /* Fold a call to __builtin_classify_type with argument ARG. */
6944
6945 static tree
6946 fold_builtin_classify_type (tree arg)
6947 {
6948 if (arg == 0)
6949 return build_int_cst (integer_type_node, no_type_class);
6950
6951 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
6952 }
6953
6954 /* Fold a call to __builtin_strlen with argument ARG. */
6955
6956 static tree
6957 fold_builtin_strlen (location_t loc, tree type, tree arg)
6958 {
6959 if (!validate_arg (arg, POINTER_TYPE))
6960 return NULL_TREE;
6961 else
6962 {
6963 tree len = c_strlen (arg, 0);
6964
6965 if (len)
6966 return fold_convert_loc (loc, type, len);
6967
6968 return NULL_TREE;
6969 }
6970 }
6971
6972 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6973
6974 static tree
6975 fold_builtin_inf (location_t loc, tree type, int warn)
6976 {
6977 REAL_VALUE_TYPE real;
6978
6979 /* __builtin_inff is intended to be usable to define INFINITY on all
6980 targets. If an infinity is not available, INFINITY expands "to a
6981 positive constant of type float that overflows at translation
6982 time", footnote "In this case, using INFINITY will violate the
6983 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6984 Thus we pedwarn to ensure this constraint violation is
6985 diagnosed. */
6986 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6987 pedwarn (loc, 0, "target format does not support infinity");
6988
6989 real_inf (&real);
6990 return build_real (type, real);
6991 }
6992
6993 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6994
6995 static tree
6996 fold_builtin_nan (tree arg, tree type, int quiet)
6997 {
6998 REAL_VALUE_TYPE real;
6999 const char *str;
7000
7001 if (!validate_arg (arg, POINTER_TYPE))
7002 return NULL_TREE;
7003 str = c_getstr (arg);
7004 if (!str)
7005 return NULL_TREE;
7006
7007 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7008 return NULL_TREE;
7009
7010 return build_real (type, real);
7011 }
7012
7013 /* Return true if the floating point expression T has an integer value.
7014 We also allow +Inf, -Inf and NaN to be considered integer values. */
7015
7016 static bool
7017 integer_valued_real_p (tree t)
7018 {
7019 switch (TREE_CODE (t))
7020 {
7021 case FLOAT_EXPR:
7022 return true;
7023
7024 case ABS_EXPR:
7025 case SAVE_EXPR:
7026 return integer_valued_real_p (TREE_OPERAND (t, 0));
7027
7028 case COMPOUND_EXPR:
7029 case MODIFY_EXPR:
7030 case BIND_EXPR:
7031 return integer_valued_real_p (TREE_OPERAND (t, 1));
7032
7033 case PLUS_EXPR:
7034 case MINUS_EXPR:
7035 case MULT_EXPR:
7036 case MIN_EXPR:
7037 case MAX_EXPR:
7038 return integer_valued_real_p (TREE_OPERAND (t, 0))
7039 && integer_valued_real_p (TREE_OPERAND (t, 1));
7040
7041 case COND_EXPR:
7042 return integer_valued_real_p (TREE_OPERAND (t, 1))
7043 && integer_valued_real_p (TREE_OPERAND (t, 2));
7044
7045 case REAL_CST:
7046 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7047
7048 case NOP_EXPR:
7049 {
7050 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7051 if (TREE_CODE (type) == INTEGER_TYPE)
7052 return true;
7053 if (TREE_CODE (type) == REAL_TYPE)
7054 return integer_valued_real_p (TREE_OPERAND (t, 0));
7055 break;
7056 }
7057
7058 case CALL_EXPR:
7059 switch (builtin_mathfn_code (t))
7060 {
7061 CASE_FLT_FN (BUILT_IN_CEIL):
7062 CASE_FLT_FN (BUILT_IN_FLOOR):
7063 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7064 CASE_FLT_FN (BUILT_IN_RINT):
7065 CASE_FLT_FN (BUILT_IN_ROUND):
7066 CASE_FLT_FN (BUILT_IN_TRUNC):
7067 return true;
7068
7069 CASE_FLT_FN (BUILT_IN_FMIN):
7070 CASE_FLT_FN (BUILT_IN_FMAX):
7071 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7072 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7073
7074 default:
7075 break;
7076 }
7077 break;
7078
7079 default:
7080 break;
7081 }
7082 return false;
7083 }
7084
7085 /* FNDECL is assumed to be a builtin where truncation can be propagated
7086 across (for instance floor((double)f) == (double)floorf (f).
7087 Do the transformation for a call with argument ARG. */
7088
7089 static tree
7090 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7091 {
7092 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7093
7094 if (!validate_arg (arg, REAL_TYPE))
7095 return NULL_TREE;
7096
7097 /* Integer rounding functions are idempotent. */
7098 if (fcode == builtin_mathfn_code (arg))
7099 return arg;
7100
7101 /* If argument is already integer valued, and we don't need to worry
7102 about setting errno, there's no need to perform rounding. */
7103 if (! flag_errno_math && integer_valued_real_p (arg))
7104 return arg;
7105
7106 if (optimize)
7107 {
7108 tree arg0 = strip_float_extensions (arg);
7109 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7110 tree newtype = TREE_TYPE (arg0);
7111 tree decl;
7112
7113 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7114 && (decl = mathfn_built_in (newtype, fcode)))
7115 return fold_convert_loc (loc, ftype,
7116 build_call_expr_loc (loc, decl, 1,
7117 fold_convert_loc (loc,
7118 newtype,
7119 arg0)));
7120 }
7121 return NULL_TREE;
7122 }
7123
7124 /* FNDECL is assumed to be builtin which can narrow the FP type of
7125 the argument, for instance lround((double)f) -> lroundf (f).
7126 Do the transformation for a call with argument ARG. */
7127
7128 static tree
7129 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7130 {
7131 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7132
7133 if (!validate_arg (arg, REAL_TYPE))
7134 return NULL_TREE;
7135
7136 /* If argument is already integer valued, and we don't need to worry
7137 about setting errno, there's no need to perform rounding. */
7138 if (! flag_errno_math && integer_valued_real_p (arg))
7139 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7140 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7141
7142 if (optimize)
7143 {
7144 tree ftype = TREE_TYPE (arg);
7145 tree arg0 = strip_float_extensions (arg);
7146 tree newtype = TREE_TYPE (arg0);
7147 tree decl;
7148
7149 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7150 && (decl = mathfn_built_in (newtype, fcode)))
7151 return build_call_expr_loc (loc, decl, 1,
7152 fold_convert_loc (loc, newtype, arg0));
7153 }
7154
7155 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7156 sizeof (int) == sizeof (long). */
7157 if (TYPE_PRECISION (integer_type_node)
7158 == TYPE_PRECISION (long_integer_type_node))
7159 {
7160 tree newfn = NULL_TREE;
7161 switch (fcode)
7162 {
7163 CASE_FLT_FN (BUILT_IN_ICEIL):
7164 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7165 break;
7166
7167 CASE_FLT_FN (BUILT_IN_IFLOOR):
7168 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7169 break;
7170
7171 CASE_FLT_FN (BUILT_IN_IROUND):
7172 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7173 break;
7174
7175 CASE_FLT_FN (BUILT_IN_IRINT):
7176 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7177 break;
7178
7179 default:
7180 break;
7181 }
7182
7183 if (newfn)
7184 {
7185 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7186 return fold_convert_loc (loc,
7187 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7188 }
7189 }
7190
7191 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7192 sizeof (long long) == sizeof (long). */
7193 if (TYPE_PRECISION (long_long_integer_type_node)
7194 == TYPE_PRECISION (long_integer_type_node))
7195 {
7196 tree newfn = NULL_TREE;
7197 switch (fcode)
7198 {
7199 CASE_FLT_FN (BUILT_IN_LLCEIL):
7200 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7201 break;
7202
7203 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7204 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7205 break;
7206
7207 CASE_FLT_FN (BUILT_IN_LLROUND):
7208 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7209 break;
7210
7211 CASE_FLT_FN (BUILT_IN_LLRINT):
7212 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7213 break;
7214
7215 default:
7216 break;
7217 }
7218
7219 if (newfn)
7220 {
7221 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7222 return fold_convert_loc (loc,
7223 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7224 }
7225 }
7226
7227 return NULL_TREE;
7228 }
7229
7230 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7231 return type. Return NULL_TREE if no simplification can be made. */
7232
7233 static tree
7234 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7235 {
7236 tree res;
7237
7238 if (!validate_arg (arg, COMPLEX_TYPE)
7239 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7240 return NULL_TREE;
7241
7242 /* Calculate the result when the argument is a constant. */
7243 if (TREE_CODE (arg) == COMPLEX_CST
7244 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7245 type, mpfr_hypot)))
7246 return res;
7247
7248 if (TREE_CODE (arg) == COMPLEX_EXPR)
7249 {
7250 tree real = TREE_OPERAND (arg, 0);
7251 tree imag = TREE_OPERAND (arg, 1);
7252
7253 /* If either part is zero, cabs is fabs of the other. */
7254 if (real_zerop (real))
7255 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7256 if (real_zerop (imag))
7257 return fold_build1_loc (loc, ABS_EXPR, type, real);
7258
7259 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7260 if (flag_unsafe_math_optimizations
7261 && operand_equal_p (real, imag, OEP_PURE_SAME))
7262 {
7263 const REAL_VALUE_TYPE sqrt2_trunc
7264 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7265 STRIP_NOPS (real);
7266 return fold_build2_loc (loc, MULT_EXPR, type,
7267 fold_build1_loc (loc, ABS_EXPR, type, real),
7268 build_real (type, sqrt2_trunc));
7269 }
7270 }
7271
7272 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7273 if (TREE_CODE (arg) == NEGATE_EXPR
7274 || TREE_CODE (arg) == CONJ_EXPR)
7275 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7276
7277 /* Don't do this when optimizing for size. */
7278 if (flag_unsafe_math_optimizations
7279 && optimize && optimize_function_for_speed_p (cfun))
7280 {
7281 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7282
7283 if (sqrtfn != NULL_TREE)
7284 {
7285 tree rpart, ipart, result;
7286
7287 arg = builtin_save_expr (arg);
7288
7289 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7290 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7291
7292 rpart = builtin_save_expr (rpart);
7293 ipart = builtin_save_expr (ipart);
7294
7295 result = fold_build2_loc (loc, PLUS_EXPR, type,
7296 fold_build2_loc (loc, MULT_EXPR, type,
7297 rpart, rpart),
7298 fold_build2_loc (loc, MULT_EXPR, type,
7299 ipart, ipart));
7300
7301 return build_call_expr_loc (loc, sqrtfn, 1, result);
7302 }
7303 }
7304
7305 return NULL_TREE;
7306 }
7307
7308 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7309 complex tree type of the result. If NEG is true, the imaginary
7310 zero is negative. */
7311
7312 static tree
7313 build_complex_cproj (tree type, bool neg)
7314 {
7315 REAL_VALUE_TYPE rinf, rzero = dconst0;
7316
7317 real_inf (&rinf);
7318 rzero.sign = neg;
7319 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7320 build_real (TREE_TYPE (type), rzero));
7321 }
7322
7323 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7324 return type. Return NULL_TREE if no simplification can be made. */
7325
7326 static tree
7327 fold_builtin_cproj (location_t loc, tree arg, tree type)
7328 {
7329 if (!validate_arg (arg, COMPLEX_TYPE)
7330 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7331 return NULL_TREE;
7332
7333 /* If there are no infinities, return arg. */
7334 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7335 return non_lvalue_loc (loc, arg);
7336
7337 /* Calculate the result when the argument is a constant. */
7338 if (TREE_CODE (arg) == COMPLEX_CST)
7339 {
7340 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7341 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7342
7343 if (real_isinf (real) || real_isinf (imag))
7344 return build_complex_cproj (type, imag->sign);
7345 else
7346 return arg;
7347 }
7348 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7349 {
7350 tree real = TREE_OPERAND (arg, 0);
7351 tree imag = TREE_OPERAND (arg, 1);
7352
7353 STRIP_NOPS (real);
7354 STRIP_NOPS (imag);
7355
7356 /* If the real part is inf and the imag part is known to be
7357 nonnegative, return (inf + 0i). Remember side-effects are
7358 possible in the imag part. */
7359 if (TREE_CODE (real) == REAL_CST
7360 && real_isinf (TREE_REAL_CST_PTR (real))
7361 && tree_expr_nonnegative_p (imag))
7362 return omit_one_operand_loc (loc, type,
7363 build_complex_cproj (type, false),
7364 arg);
7365
7366 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7367 Remember side-effects are possible in the real part. */
7368 if (TREE_CODE (imag) == REAL_CST
7369 && real_isinf (TREE_REAL_CST_PTR (imag)))
7370 return
7371 omit_one_operand_loc (loc, type,
7372 build_complex_cproj (type, TREE_REAL_CST_PTR
7373 (imag)->sign), arg);
7374 }
7375
7376 return NULL_TREE;
7377 }
7378
7379 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7380 Return NULL_TREE if no simplification can be made. */
7381
7382 static tree
7383 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7384 {
7385
7386 enum built_in_function fcode;
7387 tree res;
7388
7389 if (!validate_arg (arg, REAL_TYPE))
7390 return NULL_TREE;
7391
7392 /* Calculate the result when the argument is a constant. */
7393 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7394 return res;
7395
7396 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7397 fcode = builtin_mathfn_code (arg);
7398 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7399 {
7400 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7401 arg = fold_build2_loc (loc, MULT_EXPR, type,
7402 CALL_EXPR_ARG (arg, 0),
7403 build_real (type, dconsthalf));
7404 return build_call_expr_loc (loc, expfn, 1, arg);
7405 }
7406
7407 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7408 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7409 {
7410 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7411
7412 if (powfn)
7413 {
7414 tree arg0 = CALL_EXPR_ARG (arg, 0);
7415 tree tree_root;
7416 /* The inner root was either sqrt or cbrt. */
7417 /* This was a conditional expression but it triggered a bug
7418 in Sun C 5.5. */
7419 REAL_VALUE_TYPE dconstroot;
7420 if (BUILTIN_SQRT_P (fcode))
7421 dconstroot = dconsthalf;
7422 else
7423 dconstroot = dconst_third ();
7424
7425 /* Adjust for the outer root. */
7426 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7427 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7428 tree_root = build_real (type, dconstroot);
7429 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7430 }
7431 }
7432
7433 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7434 if (flag_unsafe_math_optimizations
7435 && (fcode == BUILT_IN_POW
7436 || fcode == BUILT_IN_POWF
7437 || fcode == BUILT_IN_POWL))
7438 {
7439 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7440 tree arg0 = CALL_EXPR_ARG (arg, 0);
7441 tree arg1 = CALL_EXPR_ARG (arg, 1);
7442 tree narg1;
7443 if (!tree_expr_nonnegative_p (arg0))
7444 arg0 = build1 (ABS_EXPR, type, arg0);
7445 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7446 build_real (type, dconsthalf));
7447 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7448 }
7449
7450 return NULL_TREE;
7451 }
7452
7453 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7454 Return NULL_TREE if no simplification can be made. */
7455
7456 static tree
7457 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7458 {
7459 const enum built_in_function fcode = builtin_mathfn_code (arg);
7460 tree res;
7461
7462 if (!validate_arg (arg, REAL_TYPE))
7463 return NULL_TREE;
7464
7465 /* Calculate the result when the argument is a constant. */
7466 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7467 return res;
7468
7469 if (flag_unsafe_math_optimizations)
7470 {
7471 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7472 if (BUILTIN_EXPONENT_P (fcode))
7473 {
7474 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7475 const REAL_VALUE_TYPE third_trunc =
7476 real_value_truncate (TYPE_MODE (type), dconst_third ());
7477 arg = fold_build2_loc (loc, MULT_EXPR, type,
7478 CALL_EXPR_ARG (arg, 0),
7479 build_real (type, third_trunc));
7480 return build_call_expr_loc (loc, expfn, 1, arg);
7481 }
7482
7483 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7484 if (BUILTIN_SQRT_P (fcode))
7485 {
7486 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7487
7488 if (powfn)
7489 {
7490 tree arg0 = CALL_EXPR_ARG (arg, 0);
7491 tree tree_root;
7492 REAL_VALUE_TYPE dconstroot = dconst_third ();
7493
7494 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7495 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7496 tree_root = build_real (type, dconstroot);
7497 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7498 }
7499 }
7500
7501 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7502 if (BUILTIN_CBRT_P (fcode))
7503 {
7504 tree arg0 = CALL_EXPR_ARG (arg, 0);
7505 if (tree_expr_nonnegative_p (arg0))
7506 {
7507 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7508
7509 if (powfn)
7510 {
7511 tree tree_root;
7512 REAL_VALUE_TYPE dconstroot;
7513
7514 real_arithmetic (&dconstroot, MULT_EXPR,
7515 dconst_third_ptr (), dconst_third_ptr ());
7516 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7517 tree_root = build_real (type, dconstroot);
7518 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7519 }
7520 }
7521 }
7522
7523 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7524 if (fcode == BUILT_IN_POW
7525 || fcode == BUILT_IN_POWF
7526 || fcode == BUILT_IN_POWL)
7527 {
7528 tree arg00 = CALL_EXPR_ARG (arg, 0);
7529 tree arg01 = CALL_EXPR_ARG (arg, 1);
7530 if (tree_expr_nonnegative_p (arg00))
7531 {
7532 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7533 const REAL_VALUE_TYPE dconstroot
7534 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7535 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7536 build_real (type, dconstroot));
7537 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7538 }
7539 }
7540 }
7541 return NULL_TREE;
7542 }
7543
7544 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7545 TYPE is the type of the return value. Return NULL_TREE if no
7546 simplification can be made. */
7547
7548 static tree
7549 fold_builtin_cos (location_t loc,
7550 tree arg, tree type, tree fndecl)
7551 {
7552 tree res, narg;
7553
7554 if (!validate_arg (arg, REAL_TYPE))
7555 return NULL_TREE;
7556
7557 /* Calculate the result when the argument is a constant. */
7558 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7559 return res;
7560
7561 /* Optimize cos(-x) into cos (x). */
7562 if ((narg = fold_strip_sign_ops (arg)))
7563 return build_call_expr_loc (loc, fndecl, 1, narg);
7564
7565 return NULL_TREE;
7566 }
7567
7568 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7569 Return NULL_TREE if no simplification can be made. */
7570
7571 static tree
7572 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7573 {
7574 if (validate_arg (arg, REAL_TYPE))
7575 {
7576 tree res, narg;
7577
7578 /* Calculate the result when the argument is a constant. */
7579 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7580 return res;
7581
7582 /* Optimize cosh(-x) into cosh (x). */
7583 if ((narg = fold_strip_sign_ops (arg)))
7584 return build_call_expr_loc (loc, fndecl, 1, narg);
7585 }
7586
7587 return NULL_TREE;
7588 }
7589
7590 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7591 argument ARG. TYPE is the type of the return value. Return
7592 NULL_TREE if no simplification can be made. */
7593
7594 static tree
7595 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7596 bool hyper)
7597 {
7598 if (validate_arg (arg, COMPLEX_TYPE)
7599 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7600 {
7601 tree tmp;
7602
7603 /* Calculate the result when the argument is a constant. */
7604 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7605 return tmp;
7606
7607 /* Optimize fn(-x) into fn(x). */
7608 if ((tmp = fold_strip_sign_ops (arg)))
7609 return build_call_expr_loc (loc, fndecl, 1, tmp);
7610 }
7611
7612 return NULL_TREE;
7613 }
7614
7615 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7616 Return NULL_TREE if no simplification can be made. */
7617
7618 static tree
7619 fold_builtin_tan (tree arg, tree type)
7620 {
7621 enum built_in_function fcode;
7622 tree res;
7623
7624 if (!validate_arg (arg, REAL_TYPE))
7625 return NULL_TREE;
7626
7627 /* Calculate the result when the argument is a constant. */
7628 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7629 return res;
7630
7631 /* Optimize tan(atan(x)) = x. */
7632 fcode = builtin_mathfn_code (arg);
7633 if (flag_unsafe_math_optimizations
7634 && (fcode == BUILT_IN_ATAN
7635 || fcode == BUILT_IN_ATANF
7636 || fcode == BUILT_IN_ATANL))
7637 return CALL_EXPR_ARG (arg, 0);
7638
7639 return NULL_TREE;
7640 }
7641
7642 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7643 NULL_TREE if no simplification can be made. */
7644
7645 static tree
7646 fold_builtin_sincos (location_t loc,
7647 tree arg0, tree arg1, tree arg2)
7648 {
7649 tree type;
7650 tree res, fn, call;
7651
7652 if (!validate_arg (arg0, REAL_TYPE)
7653 || !validate_arg (arg1, POINTER_TYPE)
7654 || !validate_arg (arg2, POINTER_TYPE))
7655 return NULL_TREE;
7656
7657 type = TREE_TYPE (arg0);
7658
7659 /* Calculate the result when the argument is a constant. */
7660 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7661 return res;
7662
7663 /* Canonicalize sincos to cexpi. */
7664 if (!targetm.libc_has_function (function_c99_math_complex))
7665 return NULL_TREE;
7666 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7667 if (!fn)
7668 return NULL_TREE;
7669
7670 call = build_call_expr_loc (loc, fn, 1, arg0);
7671 call = builtin_save_expr (call);
7672
7673 return build2 (COMPOUND_EXPR, void_type_node,
7674 build2 (MODIFY_EXPR, void_type_node,
7675 build_fold_indirect_ref_loc (loc, arg1),
7676 build1 (IMAGPART_EXPR, type, call)),
7677 build2 (MODIFY_EXPR, void_type_node,
7678 build_fold_indirect_ref_loc (loc, arg2),
7679 build1 (REALPART_EXPR, type, call)));
7680 }
7681
7682 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7683 NULL_TREE if no simplification can be made. */
7684
7685 static tree
7686 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7687 {
7688 tree rtype;
7689 tree realp, imagp, ifn;
7690 tree res;
7691
7692 if (!validate_arg (arg0, COMPLEX_TYPE)
7693 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7694 return NULL_TREE;
7695
7696 /* Calculate the result when the argument is a constant. */
7697 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7698 return res;
7699
7700 rtype = TREE_TYPE (TREE_TYPE (arg0));
7701
7702 /* In case we can figure out the real part of arg0 and it is constant zero
7703 fold to cexpi. */
7704 if (!targetm.libc_has_function (function_c99_math_complex))
7705 return NULL_TREE;
7706 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7707 if (!ifn)
7708 return NULL_TREE;
7709
7710 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7711 && real_zerop (realp))
7712 {
7713 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7714 return build_call_expr_loc (loc, ifn, 1, narg);
7715 }
7716
7717 /* In case we can easily decompose real and imaginary parts split cexp
7718 to exp (r) * cexpi (i). */
7719 if (flag_unsafe_math_optimizations
7720 && realp)
7721 {
7722 tree rfn, rcall, icall;
7723
7724 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7725 if (!rfn)
7726 return NULL_TREE;
7727
7728 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7729 if (!imagp)
7730 return NULL_TREE;
7731
7732 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7733 icall = builtin_save_expr (icall);
7734 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7735 rcall = builtin_save_expr (rcall);
7736 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7737 fold_build2_loc (loc, MULT_EXPR, rtype,
7738 rcall,
7739 fold_build1_loc (loc, REALPART_EXPR,
7740 rtype, icall)),
7741 fold_build2_loc (loc, MULT_EXPR, rtype,
7742 rcall,
7743 fold_build1_loc (loc, IMAGPART_EXPR,
7744 rtype, icall)));
7745 }
7746
7747 return NULL_TREE;
7748 }
7749
7750 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7751 Return NULL_TREE if no simplification can be made. */
7752
7753 static tree
7754 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7755 {
7756 if (!validate_arg (arg, REAL_TYPE))
7757 return NULL_TREE;
7758
7759 /* Optimize trunc of constant value. */
7760 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7761 {
7762 REAL_VALUE_TYPE r, x;
7763 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7764
7765 x = TREE_REAL_CST (arg);
7766 real_trunc (&r, TYPE_MODE (type), &x);
7767 return build_real (type, r);
7768 }
7769
7770 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7771 }
7772
7773 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7774 Return NULL_TREE if no simplification can be made. */
7775
7776 static tree
7777 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7778 {
7779 if (!validate_arg (arg, REAL_TYPE))
7780 return NULL_TREE;
7781
7782 /* Optimize floor of constant value. */
7783 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7784 {
7785 REAL_VALUE_TYPE x;
7786
7787 x = TREE_REAL_CST (arg);
7788 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7789 {
7790 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7791 REAL_VALUE_TYPE r;
7792
7793 real_floor (&r, TYPE_MODE (type), &x);
7794 return build_real (type, r);
7795 }
7796 }
7797
7798 /* Fold floor (x) where x is nonnegative to trunc (x). */
7799 if (tree_expr_nonnegative_p (arg))
7800 {
7801 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7802 if (truncfn)
7803 return build_call_expr_loc (loc, truncfn, 1, arg);
7804 }
7805
7806 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7807 }
7808
7809 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7810 Return NULL_TREE if no simplification can be made. */
7811
7812 static tree
7813 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7814 {
7815 if (!validate_arg (arg, REAL_TYPE))
7816 return NULL_TREE;
7817
7818 /* Optimize ceil of constant value. */
7819 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7820 {
7821 REAL_VALUE_TYPE x;
7822
7823 x = TREE_REAL_CST (arg);
7824 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7825 {
7826 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7827 REAL_VALUE_TYPE r;
7828
7829 real_ceil (&r, TYPE_MODE (type), &x);
7830 return build_real (type, r);
7831 }
7832 }
7833
7834 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7835 }
7836
7837 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7838 Return NULL_TREE if no simplification can be made. */
7839
7840 static tree
7841 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7842 {
7843 if (!validate_arg (arg, REAL_TYPE))
7844 return NULL_TREE;
7845
7846 /* Optimize round of constant value. */
7847 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7848 {
7849 REAL_VALUE_TYPE x;
7850
7851 x = TREE_REAL_CST (arg);
7852 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7853 {
7854 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7855 REAL_VALUE_TYPE r;
7856
7857 real_round (&r, TYPE_MODE (type), &x);
7858 return build_real (type, r);
7859 }
7860 }
7861
7862 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7863 }
7864
7865 /* Fold function call to builtin lround, lroundf or lroundl (or the
7866 corresponding long long versions) and other rounding functions. ARG
7867 is the argument to the call. Return NULL_TREE if no simplification
7868 can be made. */
7869
7870 static tree
7871 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7872 {
7873 if (!validate_arg (arg, REAL_TYPE))
7874 return NULL_TREE;
7875
7876 /* Optimize lround of constant value. */
7877 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7878 {
7879 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7880
7881 if (real_isfinite (&x))
7882 {
7883 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7884 tree ftype = TREE_TYPE (arg);
7885 double_int val;
7886 REAL_VALUE_TYPE r;
7887
7888 switch (DECL_FUNCTION_CODE (fndecl))
7889 {
7890 CASE_FLT_FN (BUILT_IN_IFLOOR):
7891 CASE_FLT_FN (BUILT_IN_LFLOOR):
7892 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7893 real_floor (&r, TYPE_MODE (ftype), &x);
7894 break;
7895
7896 CASE_FLT_FN (BUILT_IN_ICEIL):
7897 CASE_FLT_FN (BUILT_IN_LCEIL):
7898 CASE_FLT_FN (BUILT_IN_LLCEIL):
7899 real_ceil (&r, TYPE_MODE (ftype), &x);
7900 break;
7901
7902 CASE_FLT_FN (BUILT_IN_IROUND):
7903 CASE_FLT_FN (BUILT_IN_LROUND):
7904 CASE_FLT_FN (BUILT_IN_LLROUND):
7905 real_round (&r, TYPE_MODE (ftype), &x);
7906 break;
7907
7908 default:
7909 gcc_unreachable ();
7910 }
7911
7912 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
7913 if (double_int_fits_to_tree_p (itype, val))
7914 return double_int_to_tree (itype, val);
7915 }
7916 }
7917
7918 switch (DECL_FUNCTION_CODE (fndecl))
7919 {
7920 CASE_FLT_FN (BUILT_IN_LFLOOR):
7921 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7922 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7923 if (tree_expr_nonnegative_p (arg))
7924 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7925 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7926 break;
7927 default:;
7928 }
7929
7930 return fold_fixed_mathfn (loc, fndecl, arg);
7931 }
7932
7933 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7934 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7935 the argument to the call. Return NULL_TREE if no simplification can
7936 be made. */
7937
7938 static tree
7939 fold_builtin_bitop (tree fndecl, tree arg)
7940 {
7941 if (!validate_arg (arg, INTEGER_TYPE))
7942 return NULL_TREE;
7943
7944 /* Optimize for constant argument. */
7945 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7946 {
7947 HOST_WIDE_INT hi, width, result;
7948 unsigned HOST_WIDE_INT lo;
7949 tree type;
7950
7951 type = TREE_TYPE (arg);
7952 width = TYPE_PRECISION (type);
7953 lo = TREE_INT_CST_LOW (arg);
7954
7955 /* Clear all the bits that are beyond the type's precision. */
7956 if (width > HOST_BITS_PER_WIDE_INT)
7957 {
7958 hi = TREE_INT_CST_HIGH (arg);
7959 if (width < HOST_BITS_PER_DOUBLE_INT)
7960 hi &= ~(HOST_WIDE_INT_M1U << (width - HOST_BITS_PER_WIDE_INT));
7961 }
7962 else
7963 {
7964 hi = 0;
7965 if (width < HOST_BITS_PER_WIDE_INT)
7966 lo &= ~(HOST_WIDE_INT_M1U << width);
7967 }
7968
7969 switch (DECL_FUNCTION_CODE (fndecl))
7970 {
7971 CASE_INT_FN (BUILT_IN_FFS):
7972 if (lo != 0)
7973 result = ffs_hwi (lo);
7974 else if (hi != 0)
7975 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
7976 else
7977 result = 0;
7978 break;
7979
7980 CASE_INT_FN (BUILT_IN_CLZ):
7981 if (hi != 0)
7982 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7983 else if (lo != 0)
7984 result = width - floor_log2 (lo) - 1;
7985 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7986 result = width;
7987 break;
7988
7989 CASE_INT_FN (BUILT_IN_CTZ):
7990 if (lo != 0)
7991 result = ctz_hwi (lo);
7992 else if (hi != 0)
7993 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
7994 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7995 result = width;
7996 break;
7997
7998 CASE_INT_FN (BUILT_IN_CLRSB):
7999 if (width > 2 * HOST_BITS_PER_WIDE_INT)
8000 return NULL_TREE;
8001 if (width > HOST_BITS_PER_WIDE_INT
8002 && (hi & ((unsigned HOST_WIDE_INT) 1
8003 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
8004 {
8005 hi = ~hi & ~(HOST_WIDE_INT_M1U
8006 << (width - HOST_BITS_PER_WIDE_INT - 1));
8007 lo = ~lo;
8008 }
8009 else if (width <= HOST_BITS_PER_WIDE_INT
8010 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
8011 lo = ~lo & ~(HOST_WIDE_INT_M1U << (width - 1));
8012 if (hi != 0)
8013 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
8014 else if (lo != 0)
8015 result = width - floor_log2 (lo) - 2;
8016 else
8017 result = width - 1;
8018 break;
8019
8020 CASE_INT_FN (BUILT_IN_POPCOUNT):
8021 result = 0;
8022 while (lo)
8023 result++, lo &= lo - 1;
8024 while (hi)
8025 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8026 break;
8027
8028 CASE_INT_FN (BUILT_IN_PARITY):
8029 result = 0;
8030 while (lo)
8031 result++, lo &= lo - 1;
8032 while (hi)
8033 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8034 result &= 1;
8035 break;
8036
8037 default:
8038 gcc_unreachable ();
8039 }
8040
8041 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8042 }
8043
8044 return NULL_TREE;
8045 }
8046
8047 /* Fold function call to builtin_bswap and the short, long and long long
8048 variants. Return NULL_TREE if no simplification can be made. */
8049 static tree
8050 fold_builtin_bswap (tree fndecl, tree arg)
8051 {
8052 if (! validate_arg (arg, INTEGER_TYPE))
8053 return NULL_TREE;
8054
8055 /* Optimize constant value. */
8056 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8057 {
8058 HOST_WIDE_INT hi, width, r_hi = 0;
8059 unsigned HOST_WIDE_INT lo, r_lo = 0;
8060 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8061
8062 width = TYPE_PRECISION (type);
8063 lo = TREE_INT_CST_LOW (arg);
8064 hi = TREE_INT_CST_HIGH (arg);
8065
8066 switch (DECL_FUNCTION_CODE (fndecl))
8067 {
8068 case BUILT_IN_BSWAP16:
8069 case BUILT_IN_BSWAP32:
8070 case BUILT_IN_BSWAP64:
8071 {
8072 int s;
8073
8074 for (s = 0; s < width; s += 8)
8075 {
8076 int d = width - s - 8;
8077 unsigned HOST_WIDE_INT byte;
8078
8079 if (s < HOST_BITS_PER_WIDE_INT)
8080 byte = (lo >> s) & 0xff;
8081 else
8082 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8083
8084 if (d < HOST_BITS_PER_WIDE_INT)
8085 r_lo |= byte << d;
8086 else
8087 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8088 }
8089 }
8090
8091 break;
8092
8093 default:
8094 gcc_unreachable ();
8095 }
8096
8097 if (width < HOST_BITS_PER_WIDE_INT)
8098 return build_int_cst (type, r_lo);
8099 else
8100 return build_int_cst_wide (type, r_lo, r_hi);
8101 }
8102
8103 return NULL_TREE;
8104 }
8105
8106 /* A subroutine of fold_builtin to fold the various logarithmic
8107 functions. Return NULL_TREE if no simplification can me made.
8108 FUNC is the corresponding MPFR logarithm function. */
8109
8110 static tree
8111 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8112 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8113 {
8114 if (validate_arg (arg, REAL_TYPE))
8115 {
8116 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8117 tree res;
8118 const enum built_in_function fcode = builtin_mathfn_code (arg);
8119
8120 /* Calculate the result when the argument is a constant. */
8121 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8122 return res;
8123
8124 /* Special case, optimize logN(expN(x)) = x. */
8125 if (flag_unsafe_math_optimizations
8126 && ((func == mpfr_log
8127 && (fcode == BUILT_IN_EXP
8128 || fcode == BUILT_IN_EXPF
8129 || fcode == BUILT_IN_EXPL))
8130 || (func == mpfr_log2
8131 && (fcode == BUILT_IN_EXP2
8132 || fcode == BUILT_IN_EXP2F
8133 || fcode == BUILT_IN_EXP2L))
8134 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8135 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8136
8137 /* Optimize logN(func()) for various exponential functions. We
8138 want to determine the value "x" and the power "exponent" in
8139 order to transform logN(x**exponent) into exponent*logN(x). */
8140 if (flag_unsafe_math_optimizations)
8141 {
8142 tree exponent = 0, x = 0;
8143
8144 switch (fcode)
8145 {
8146 CASE_FLT_FN (BUILT_IN_EXP):
8147 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8148 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8149 dconst_e ()));
8150 exponent = CALL_EXPR_ARG (arg, 0);
8151 break;
8152 CASE_FLT_FN (BUILT_IN_EXP2):
8153 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8154 x = build_real (type, dconst2);
8155 exponent = CALL_EXPR_ARG (arg, 0);
8156 break;
8157 CASE_FLT_FN (BUILT_IN_EXP10):
8158 CASE_FLT_FN (BUILT_IN_POW10):
8159 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8160 {
8161 REAL_VALUE_TYPE dconst10;
8162 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8163 x = build_real (type, dconst10);
8164 }
8165 exponent = CALL_EXPR_ARG (arg, 0);
8166 break;
8167 CASE_FLT_FN (BUILT_IN_SQRT):
8168 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8169 x = CALL_EXPR_ARG (arg, 0);
8170 exponent = build_real (type, dconsthalf);
8171 break;
8172 CASE_FLT_FN (BUILT_IN_CBRT):
8173 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8174 x = CALL_EXPR_ARG (arg, 0);
8175 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8176 dconst_third ()));
8177 break;
8178 CASE_FLT_FN (BUILT_IN_POW):
8179 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8180 x = CALL_EXPR_ARG (arg, 0);
8181 exponent = CALL_EXPR_ARG (arg, 1);
8182 break;
8183 default:
8184 break;
8185 }
8186
8187 /* Now perform the optimization. */
8188 if (x && exponent)
8189 {
8190 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8191 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8192 }
8193 }
8194 }
8195
8196 return NULL_TREE;
8197 }
8198
8199 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8200 NULL_TREE if no simplification can be made. */
8201
8202 static tree
8203 fold_builtin_hypot (location_t loc, tree fndecl,
8204 tree arg0, tree arg1, tree type)
8205 {
8206 tree res, narg0, narg1;
8207
8208 if (!validate_arg (arg0, REAL_TYPE)
8209 || !validate_arg (arg1, REAL_TYPE))
8210 return NULL_TREE;
8211
8212 /* Calculate the result when the argument is a constant. */
8213 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8214 return res;
8215
8216 /* If either argument to hypot has a negate or abs, strip that off.
8217 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8218 narg0 = fold_strip_sign_ops (arg0);
8219 narg1 = fold_strip_sign_ops (arg1);
8220 if (narg0 || narg1)
8221 {
8222 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8223 narg1 ? narg1 : arg1);
8224 }
8225
8226 /* If either argument is zero, hypot is fabs of the other. */
8227 if (real_zerop (arg0))
8228 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8229 else if (real_zerop (arg1))
8230 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8231
8232 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8233 if (flag_unsafe_math_optimizations
8234 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8235 {
8236 const REAL_VALUE_TYPE sqrt2_trunc
8237 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8238 return fold_build2_loc (loc, MULT_EXPR, type,
8239 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8240 build_real (type, sqrt2_trunc));
8241 }
8242
8243 return NULL_TREE;
8244 }
8245
8246
8247 /* Fold a builtin function call to pow, powf, or powl. Return
8248 NULL_TREE if no simplification can be made. */
8249 static tree
8250 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8251 {
8252 tree res;
8253
8254 if (!validate_arg (arg0, REAL_TYPE)
8255 || !validate_arg (arg1, REAL_TYPE))
8256 return NULL_TREE;
8257
8258 /* Calculate the result when the argument is a constant. */
8259 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8260 return res;
8261
8262 /* Optimize pow(1.0,y) = 1.0. */
8263 if (real_onep (arg0))
8264 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8265
8266 if (TREE_CODE (arg1) == REAL_CST
8267 && !TREE_OVERFLOW (arg1))
8268 {
8269 REAL_VALUE_TYPE cint;
8270 REAL_VALUE_TYPE c;
8271 HOST_WIDE_INT n;
8272
8273 c = TREE_REAL_CST (arg1);
8274
8275 /* Optimize pow(x,0.0) = 1.0. */
8276 if (REAL_VALUES_EQUAL (c, dconst0))
8277 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8278 arg0);
8279
8280 /* Optimize pow(x,1.0) = x. */
8281 if (REAL_VALUES_EQUAL (c, dconst1))
8282 return arg0;
8283
8284 /* Optimize pow(x,-1.0) = 1.0/x. */
8285 if (REAL_VALUES_EQUAL (c, dconstm1))
8286 return fold_build2_loc (loc, RDIV_EXPR, type,
8287 build_real (type, dconst1), arg0);
8288
8289 /* Optimize pow(x,0.5) = sqrt(x). */
8290 if (flag_unsafe_math_optimizations
8291 && REAL_VALUES_EQUAL (c, dconsthalf))
8292 {
8293 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8294
8295 if (sqrtfn != NULL_TREE)
8296 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8297 }
8298
8299 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8300 if (flag_unsafe_math_optimizations)
8301 {
8302 const REAL_VALUE_TYPE dconstroot
8303 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8304
8305 if (REAL_VALUES_EQUAL (c, dconstroot))
8306 {
8307 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8308 if (cbrtfn != NULL_TREE)
8309 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8310 }
8311 }
8312
8313 /* Check for an integer exponent. */
8314 n = real_to_integer (&c);
8315 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8316 if (real_identical (&c, &cint))
8317 {
8318 /* Attempt to evaluate pow at compile-time, unless this should
8319 raise an exception. */
8320 if (TREE_CODE (arg0) == REAL_CST
8321 && !TREE_OVERFLOW (arg0)
8322 && (n > 0
8323 || (!flag_trapping_math && !flag_errno_math)
8324 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8325 {
8326 REAL_VALUE_TYPE x;
8327 bool inexact;
8328
8329 x = TREE_REAL_CST (arg0);
8330 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8331 if (flag_unsafe_math_optimizations || !inexact)
8332 return build_real (type, x);
8333 }
8334
8335 /* Strip sign ops from even integer powers. */
8336 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8337 {
8338 tree narg0 = fold_strip_sign_ops (arg0);
8339 if (narg0)
8340 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8341 }
8342 }
8343 }
8344
8345 if (flag_unsafe_math_optimizations)
8346 {
8347 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8348
8349 /* Optimize pow(expN(x),y) = expN(x*y). */
8350 if (BUILTIN_EXPONENT_P (fcode))
8351 {
8352 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8353 tree arg = CALL_EXPR_ARG (arg0, 0);
8354 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8355 return build_call_expr_loc (loc, expfn, 1, arg);
8356 }
8357
8358 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8359 if (BUILTIN_SQRT_P (fcode))
8360 {
8361 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8362 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8363 build_real (type, dconsthalf));
8364 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8365 }
8366
8367 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8368 if (BUILTIN_CBRT_P (fcode))
8369 {
8370 tree arg = CALL_EXPR_ARG (arg0, 0);
8371 if (tree_expr_nonnegative_p (arg))
8372 {
8373 const REAL_VALUE_TYPE dconstroot
8374 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8375 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8376 build_real (type, dconstroot));
8377 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8378 }
8379 }
8380
8381 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8382 if (fcode == BUILT_IN_POW
8383 || fcode == BUILT_IN_POWF
8384 || fcode == BUILT_IN_POWL)
8385 {
8386 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8387 if (tree_expr_nonnegative_p (arg00))
8388 {
8389 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8390 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8391 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8392 }
8393 }
8394 }
8395
8396 return NULL_TREE;
8397 }
8398
8399 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8400 Return NULL_TREE if no simplification can be made. */
8401 static tree
8402 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8403 tree arg0, tree arg1, tree type)
8404 {
8405 if (!validate_arg (arg0, REAL_TYPE)
8406 || !validate_arg (arg1, INTEGER_TYPE))
8407 return NULL_TREE;
8408
8409 /* Optimize pow(1.0,y) = 1.0. */
8410 if (real_onep (arg0))
8411 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8412
8413 if (host_integerp (arg1, 0))
8414 {
8415 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8416
8417 /* Evaluate powi at compile-time. */
8418 if (TREE_CODE (arg0) == REAL_CST
8419 && !TREE_OVERFLOW (arg0))
8420 {
8421 REAL_VALUE_TYPE x;
8422 x = TREE_REAL_CST (arg0);
8423 real_powi (&x, TYPE_MODE (type), &x, c);
8424 return build_real (type, x);
8425 }
8426
8427 /* Optimize pow(x,0) = 1.0. */
8428 if (c == 0)
8429 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8430 arg0);
8431
8432 /* Optimize pow(x,1) = x. */
8433 if (c == 1)
8434 return arg0;
8435
8436 /* Optimize pow(x,-1) = 1.0/x. */
8437 if (c == -1)
8438 return fold_build2_loc (loc, RDIV_EXPR, type,
8439 build_real (type, dconst1), arg0);
8440 }
8441
8442 return NULL_TREE;
8443 }
8444
8445 /* A subroutine of fold_builtin to fold the various exponent
8446 functions. Return NULL_TREE if no simplification can be made.
8447 FUNC is the corresponding MPFR exponent function. */
8448
8449 static tree
8450 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8451 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8452 {
8453 if (validate_arg (arg, REAL_TYPE))
8454 {
8455 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8456 tree res;
8457
8458 /* Calculate the result when the argument is a constant. */
8459 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8460 return res;
8461
8462 /* Optimize expN(logN(x)) = x. */
8463 if (flag_unsafe_math_optimizations)
8464 {
8465 const enum built_in_function fcode = builtin_mathfn_code (arg);
8466
8467 if ((func == mpfr_exp
8468 && (fcode == BUILT_IN_LOG
8469 || fcode == BUILT_IN_LOGF
8470 || fcode == BUILT_IN_LOGL))
8471 || (func == mpfr_exp2
8472 && (fcode == BUILT_IN_LOG2
8473 || fcode == BUILT_IN_LOG2F
8474 || fcode == BUILT_IN_LOG2L))
8475 || (func == mpfr_exp10
8476 && (fcode == BUILT_IN_LOG10
8477 || fcode == BUILT_IN_LOG10F
8478 || fcode == BUILT_IN_LOG10L)))
8479 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8480 }
8481 }
8482
8483 return NULL_TREE;
8484 }
8485
8486 /* Return true if VAR is a VAR_DECL or a component thereof. */
8487
8488 static bool
8489 var_decl_component_p (tree var)
8490 {
8491 tree inner = var;
8492 while (handled_component_p (inner))
8493 inner = TREE_OPERAND (inner, 0);
8494 return SSA_VAR_P (inner);
8495 }
8496
8497 /* Fold function call to builtin memset. Return
8498 NULL_TREE if no simplification can be made. */
8499
8500 static tree
8501 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8502 tree type, bool ignore)
8503 {
8504 tree var, ret, etype;
8505 unsigned HOST_WIDE_INT length, cval;
8506
8507 if (! validate_arg (dest, POINTER_TYPE)
8508 || ! validate_arg (c, INTEGER_TYPE)
8509 || ! validate_arg (len, INTEGER_TYPE))
8510 return NULL_TREE;
8511
8512 if (! host_integerp (len, 1))
8513 return NULL_TREE;
8514
8515 /* If the LEN parameter is zero, return DEST. */
8516 if (integer_zerop (len))
8517 return omit_one_operand_loc (loc, type, dest, c);
8518
8519 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8520 return NULL_TREE;
8521
8522 var = dest;
8523 STRIP_NOPS (var);
8524 if (TREE_CODE (var) != ADDR_EXPR)
8525 return NULL_TREE;
8526
8527 var = TREE_OPERAND (var, 0);
8528 if (TREE_THIS_VOLATILE (var))
8529 return NULL_TREE;
8530
8531 etype = TREE_TYPE (var);
8532 if (TREE_CODE (etype) == ARRAY_TYPE)
8533 etype = TREE_TYPE (etype);
8534
8535 if (!INTEGRAL_TYPE_P (etype)
8536 && !POINTER_TYPE_P (etype))
8537 return NULL_TREE;
8538
8539 if (! var_decl_component_p (var))
8540 return NULL_TREE;
8541
8542 length = tree_low_cst (len, 1);
8543 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8544 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
8545 return NULL_TREE;
8546
8547 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8548 return NULL_TREE;
8549
8550 if (integer_zerop (c))
8551 cval = 0;
8552 else
8553 {
8554 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8555 return NULL_TREE;
8556
8557 cval = TREE_INT_CST_LOW (c);
8558 cval &= 0xff;
8559 cval |= cval << 8;
8560 cval |= cval << 16;
8561 cval |= (cval << 31) << 1;
8562 }
8563
8564 ret = build_int_cst_type (etype, cval);
8565 var = build_fold_indirect_ref_loc (loc,
8566 fold_convert_loc (loc,
8567 build_pointer_type (etype),
8568 dest));
8569 ret = build2 (MODIFY_EXPR, etype, var, ret);
8570 if (ignore)
8571 return ret;
8572
8573 return omit_one_operand_loc (loc, type, dest, ret);
8574 }
8575
8576 /* Fold function call to builtin memset. Return
8577 NULL_TREE if no simplification can be made. */
8578
8579 static tree
8580 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8581 {
8582 if (! validate_arg (dest, POINTER_TYPE)
8583 || ! validate_arg (size, INTEGER_TYPE))
8584 return NULL_TREE;
8585
8586 if (!ignore)
8587 return NULL_TREE;
8588
8589 /* New argument list transforming bzero(ptr x, int y) to
8590 memset(ptr x, int 0, size_t y). This is done this way
8591 so that if it isn't expanded inline, we fallback to
8592 calling bzero instead of memset. */
8593
8594 return fold_builtin_memset (loc, dest, integer_zero_node,
8595 fold_convert_loc (loc, size_type_node, size),
8596 void_type_node, ignore);
8597 }
8598
8599 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8600 NULL_TREE if no simplification can be made.
8601 If ENDP is 0, return DEST (like memcpy).
8602 If ENDP is 1, return DEST+LEN (like mempcpy).
8603 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8604 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8605 (memmove). */
8606
8607 static tree
8608 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8609 tree len, tree type, bool ignore, int endp)
8610 {
8611 tree destvar, srcvar, expr;
8612
8613 if (! validate_arg (dest, POINTER_TYPE)
8614 || ! validate_arg (src, POINTER_TYPE)
8615 || ! validate_arg (len, INTEGER_TYPE))
8616 return NULL_TREE;
8617
8618 /* If the LEN parameter is zero, return DEST. */
8619 if (integer_zerop (len))
8620 return omit_one_operand_loc (loc, type, dest, src);
8621
8622 /* If SRC and DEST are the same (and not volatile), return
8623 DEST{,+LEN,+LEN-1}. */
8624 if (operand_equal_p (src, dest, 0))
8625 expr = len;
8626 else
8627 {
8628 tree srctype, desttype;
8629 unsigned int src_align, dest_align;
8630 tree off0;
8631
8632 if (endp == 3)
8633 {
8634 src_align = get_pointer_alignment (src);
8635 dest_align = get_pointer_alignment (dest);
8636
8637 /* Both DEST and SRC must be pointer types.
8638 ??? This is what old code did. Is the testing for pointer types
8639 really mandatory?
8640
8641 If either SRC is readonly or length is 1, we can use memcpy. */
8642 if (!dest_align || !src_align)
8643 return NULL_TREE;
8644 if (readonly_data_expr (src)
8645 || (host_integerp (len, 1)
8646 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8647 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
8648 {
8649 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8650 if (!fn)
8651 return NULL_TREE;
8652 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8653 }
8654
8655 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8656 if (TREE_CODE (src) == ADDR_EXPR
8657 && TREE_CODE (dest) == ADDR_EXPR)
8658 {
8659 tree src_base, dest_base, fn;
8660 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8661 HOST_WIDE_INT size = -1;
8662 HOST_WIDE_INT maxsize = -1;
8663
8664 srcvar = TREE_OPERAND (src, 0);
8665 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8666 &size, &maxsize);
8667 destvar = TREE_OPERAND (dest, 0);
8668 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8669 &size, &maxsize);
8670 if (host_integerp (len, 1))
8671 maxsize = tree_low_cst (len, 1);
8672 else
8673 maxsize = -1;
8674 src_offset /= BITS_PER_UNIT;
8675 dest_offset /= BITS_PER_UNIT;
8676 if (SSA_VAR_P (src_base)
8677 && SSA_VAR_P (dest_base))
8678 {
8679 if (operand_equal_p (src_base, dest_base, 0)
8680 && ranges_overlap_p (src_offset, maxsize,
8681 dest_offset, maxsize))
8682 return NULL_TREE;
8683 }
8684 else if (TREE_CODE (src_base) == MEM_REF
8685 && TREE_CODE (dest_base) == MEM_REF)
8686 {
8687 double_int off;
8688 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8689 TREE_OPERAND (dest_base, 0), 0))
8690 return NULL_TREE;
8691 off = mem_ref_offset (src_base) +
8692 double_int::from_shwi (src_offset);
8693 if (!off.fits_shwi ())
8694 return NULL_TREE;
8695 src_offset = off.low;
8696 off = mem_ref_offset (dest_base) +
8697 double_int::from_shwi (dest_offset);
8698 if (!off.fits_shwi ())
8699 return NULL_TREE;
8700 dest_offset = off.low;
8701 if (ranges_overlap_p (src_offset, maxsize,
8702 dest_offset, maxsize))
8703 return NULL_TREE;
8704 }
8705 else
8706 return NULL_TREE;
8707
8708 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8709 if (!fn)
8710 return NULL_TREE;
8711 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8712 }
8713
8714 /* If the destination and source do not alias optimize into
8715 memcpy as well. */
8716 if ((is_gimple_min_invariant (dest)
8717 || TREE_CODE (dest) == SSA_NAME)
8718 && (is_gimple_min_invariant (src)
8719 || TREE_CODE (src) == SSA_NAME))
8720 {
8721 ao_ref destr, srcr;
8722 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8723 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8724 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8725 {
8726 tree fn;
8727 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8728 if (!fn)
8729 return NULL_TREE;
8730 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8731 }
8732 }
8733
8734 return NULL_TREE;
8735 }
8736
8737 if (!host_integerp (len, 0))
8738 return NULL_TREE;
8739 /* FIXME:
8740 This logic lose for arguments like (type *)malloc (sizeof (type)),
8741 since we strip the casts of up to VOID return value from malloc.
8742 Perhaps we ought to inherit type from non-VOID argument here? */
8743 STRIP_NOPS (src);
8744 STRIP_NOPS (dest);
8745 if (!POINTER_TYPE_P (TREE_TYPE (src))
8746 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8747 return NULL_TREE;
8748 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8749 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8750 {
8751 tree tem = TREE_OPERAND (src, 0);
8752 STRIP_NOPS (tem);
8753 if (tem != TREE_OPERAND (src, 0))
8754 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8755 }
8756 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8757 {
8758 tree tem = TREE_OPERAND (dest, 0);
8759 STRIP_NOPS (tem);
8760 if (tem != TREE_OPERAND (dest, 0))
8761 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8762 }
8763 srctype = TREE_TYPE (TREE_TYPE (src));
8764 if (TREE_CODE (srctype) == ARRAY_TYPE
8765 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8766 {
8767 srctype = TREE_TYPE (srctype);
8768 STRIP_NOPS (src);
8769 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8770 }
8771 desttype = TREE_TYPE (TREE_TYPE (dest));
8772 if (TREE_CODE (desttype) == ARRAY_TYPE
8773 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8774 {
8775 desttype = TREE_TYPE (desttype);
8776 STRIP_NOPS (dest);
8777 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8778 }
8779 if (TREE_ADDRESSABLE (srctype)
8780 || TREE_ADDRESSABLE (desttype))
8781 return NULL_TREE;
8782
8783 src_align = get_pointer_alignment (src);
8784 dest_align = get_pointer_alignment (dest);
8785 if (dest_align < TYPE_ALIGN (desttype)
8786 || src_align < TYPE_ALIGN (srctype))
8787 return NULL_TREE;
8788
8789 if (!ignore)
8790 dest = builtin_save_expr (dest);
8791
8792 /* Build accesses at offset zero with a ref-all character type. */
8793 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8794 ptr_mode, true), 0);
8795
8796 destvar = dest;
8797 STRIP_NOPS (destvar);
8798 if (TREE_CODE (destvar) == ADDR_EXPR
8799 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8800 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8801 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8802 else
8803 destvar = NULL_TREE;
8804
8805 srcvar = src;
8806 STRIP_NOPS (srcvar);
8807 if (TREE_CODE (srcvar) == ADDR_EXPR
8808 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8809 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8810 {
8811 if (!destvar
8812 || src_align >= TYPE_ALIGN (desttype))
8813 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8814 srcvar, off0);
8815 else if (!STRICT_ALIGNMENT)
8816 {
8817 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8818 src_align);
8819 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8820 }
8821 else
8822 srcvar = NULL_TREE;
8823 }
8824 else
8825 srcvar = NULL_TREE;
8826
8827 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8828 return NULL_TREE;
8829
8830 if (srcvar == NULL_TREE)
8831 {
8832 STRIP_NOPS (src);
8833 if (src_align >= TYPE_ALIGN (desttype))
8834 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8835 else
8836 {
8837 if (STRICT_ALIGNMENT)
8838 return NULL_TREE;
8839 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8840 src_align);
8841 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
8842 }
8843 }
8844 else if (destvar == NULL_TREE)
8845 {
8846 STRIP_NOPS (dest);
8847 if (dest_align >= TYPE_ALIGN (srctype))
8848 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8849 else
8850 {
8851 if (STRICT_ALIGNMENT)
8852 return NULL_TREE;
8853 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
8854 dest_align);
8855 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
8856 }
8857 }
8858
8859 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8860 }
8861
8862 if (ignore)
8863 return expr;
8864
8865 if (endp == 0 || endp == 3)
8866 return omit_one_operand_loc (loc, type, dest, expr);
8867
8868 if (expr == len)
8869 expr = NULL_TREE;
8870
8871 if (endp == 2)
8872 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8873 ssize_int (1));
8874
8875 dest = fold_build_pointer_plus_loc (loc, dest, len);
8876 dest = fold_convert_loc (loc, type, dest);
8877 if (expr)
8878 dest = omit_one_operand_loc (loc, type, dest, expr);
8879 return dest;
8880 }
8881
8882 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8883 If LEN is not NULL, it represents the length of the string to be
8884 copied. Return NULL_TREE if no simplification can be made. */
8885
8886 tree
8887 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8888 {
8889 tree fn;
8890
8891 if (!validate_arg (dest, POINTER_TYPE)
8892 || !validate_arg (src, POINTER_TYPE))
8893 return NULL_TREE;
8894
8895 /* If SRC and DEST are the same (and not volatile), return DEST. */
8896 if (operand_equal_p (src, dest, 0))
8897 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8898
8899 if (optimize_function_for_size_p (cfun))
8900 return NULL_TREE;
8901
8902 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8903 if (!fn)
8904 return NULL_TREE;
8905
8906 if (!len)
8907 {
8908 len = c_strlen (src, 1);
8909 if (! len || TREE_SIDE_EFFECTS (len))
8910 return NULL_TREE;
8911 }
8912
8913 len = fold_convert_loc (loc, size_type_node, len);
8914 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
8915 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8916 build_call_expr_loc (loc, fn, 3, dest, src, len));
8917 }
8918
8919 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8920 Return NULL_TREE if no simplification can be made. */
8921
8922 static tree
8923 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8924 {
8925 tree fn, len, lenp1, call, type;
8926
8927 if (!validate_arg (dest, POINTER_TYPE)
8928 || !validate_arg (src, POINTER_TYPE))
8929 return NULL_TREE;
8930
8931 len = c_strlen (src, 1);
8932 if (!len
8933 || TREE_CODE (len) != INTEGER_CST)
8934 return NULL_TREE;
8935
8936 if (optimize_function_for_size_p (cfun)
8937 /* If length is zero it's small enough. */
8938 && !integer_zerop (len))
8939 return NULL_TREE;
8940
8941 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8942 if (!fn)
8943 return NULL_TREE;
8944
8945 lenp1 = size_binop_loc (loc, PLUS_EXPR,
8946 fold_convert_loc (loc, size_type_node, len),
8947 build_int_cst (size_type_node, 1));
8948 /* We use dest twice in building our expression. Save it from
8949 multiple expansions. */
8950 dest = builtin_save_expr (dest);
8951 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8952
8953 type = TREE_TYPE (TREE_TYPE (fndecl));
8954 dest = fold_build_pointer_plus_loc (loc, dest, len);
8955 dest = fold_convert_loc (loc, type, dest);
8956 dest = omit_one_operand_loc (loc, type, dest, call);
8957 return dest;
8958 }
8959
8960 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8961 If SLEN is not NULL, it represents the length of the source string.
8962 Return NULL_TREE if no simplification can be made. */
8963
8964 tree
8965 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8966 tree src, tree len, tree slen)
8967 {
8968 tree fn;
8969
8970 if (!validate_arg (dest, POINTER_TYPE)
8971 || !validate_arg (src, POINTER_TYPE)
8972 || !validate_arg (len, INTEGER_TYPE))
8973 return NULL_TREE;
8974
8975 /* If the LEN parameter is zero, return DEST. */
8976 if (integer_zerop (len))
8977 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8978
8979 /* We can't compare slen with len as constants below if len is not a
8980 constant. */
8981 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8982 return NULL_TREE;
8983
8984 if (!slen)
8985 slen = c_strlen (src, 1);
8986
8987 /* Now, we must be passed a constant src ptr parameter. */
8988 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8989 return NULL_TREE;
8990
8991 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8992
8993 /* We do not support simplification of this case, though we do
8994 support it when expanding trees into RTL. */
8995 /* FIXME: generate a call to __builtin_memset. */
8996 if (tree_int_cst_lt (slen, len))
8997 return NULL_TREE;
8998
8999 /* OK transform into builtin memcpy. */
9000 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9001 if (!fn)
9002 return NULL_TREE;
9003
9004 len = fold_convert_loc (loc, size_type_node, len);
9005 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9006 build_call_expr_loc (loc, fn, 3, dest, src, len));
9007 }
9008
9009 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9010 arguments to the call, and TYPE is its return type.
9011 Return NULL_TREE if no simplification can be made. */
9012
9013 static tree
9014 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
9015 {
9016 if (!validate_arg (arg1, POINTER_TYPE)
9017 || !validate_arg (arg2, INTEGER_TYPE)
9018 || !validate_arg (len, INTEGER_TYPE))
9019 return NULL_TREE;
9020 else
9021 {
9022 const char *p1;
9023
9024 if (TREE_CODE (arg2) != INTEGER_CST
9025 || !host_integerp (len, 1))
9026 return NULL_TREE;
9027
9028 p1 = c_getstr (arg1);
9029 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9030 {
9031 char c;
9032 const char *r;
9033 tree tem;
9034
9035 if (target_char_cast (arg2, &c))
9036 return NULL_TREE;
9037
9038 r = (const char *) memchr (p1, c, tree_low_cst (len, 1));
9039
9040 if (r == NULL)
9041 return build_int_cst (TREE_TYPE (arg1), 0);
9042
9043 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
9044 return fold_convert_loc (loc, type, tem);
9045 }
9046 return NULL_TREE;
9047 }
9048 }
9049
9050 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9051 Return NULL_TREE if no simplification can be made. */
9052
9053 static tree
9054 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9055 {
9056 const char *p1, *p2;
9057
9058 if (!validate_arg (arg1, POINTER_TYPE)
9059 || !validate_arg (arg2, POINTER_TYPE)
9060 || !validate_arg (len, INTEGER_TYPE))
9061 return NULL_TREE;
9062
9063 /* If the LEN parameter is zero, return zero. */
9064 if (integer_zerop (len))
9065 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9066 arg1, arg2);
9067
9068 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9069 if (operand_equal_p (arg1, arg2, 0))
9070 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9071
9072 p1 = c_getstr (arg1);
9073 p2 = c_getstr (arg2);
9074
9075 /* If all arguments are constant, and the value of len is not greater
9076 than the lengths of arg1 and arg2, evaluate at compile-time. */
9077 if (host_integerp (len, 1) && p1 && p2
9078 && compare_tree_int (len, strlen (p1) + 1) <= 0
9079 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9080 {
9081 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9082
9083 if (r > 0)
9084 return integer_one_node;
9085 else if (r < 0)
9086 return integer_minus_one_node;
9087 else
9088 return integer_zero_node;
9089 }
9090
9091 /* If len parameter is one, return an expression corresponding to
9092 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9093 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9094 {
9095 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9096 tree cst_uchar_ptr_node
9097 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9098
9099 tree ind1
9100 = fold_convert_loc (loc, integer_type_node,
9101 build1 (INDIRECT_REF, cst_uchar_node,
9102 fold_convert_loc (loc,
9103 cst_uchar_ptr_node,
9104 arg1)));
9105 tree ind2
9106 = fold_convert_loc (loc, integer_type_node,
9107 build1 (INDIRECT_REF, cst_uchar_node,
9108 fold_convert_loc (loc,
9109 cst_uchar_ptr_node,
9110 arg2)));
9111 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9112 }
9113
9114 return NULL_TREE;
9115 }
9116
9117 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9118 Return NULL_TREE if no simplification can be made. */
9119
9120 static tree
9121 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9122 {
9123 const char *p1, *p2;
9124
9125 if (!validate_arg (arg1, POINTER_TYPE)
9126 || !validate_arg (arg2, POINTER_TYPE))
9127 return NULL_TREE;
9128
9129 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9130 if (operand_equal_p (arg1, arg2, 0))
9131 return integer_zero_node;
9132
9133 p1 = c_getstr (arg1);
9134 p2 = c_getstr (arg2);
9135
9136 if (p1 && p2)
9137 {
9138 const int i = strcmp (p1, p2);
9139 if (i < 0)
9140 return integer_minus_one_node;
9141 else if (i > 0)
9142 return integer_one_node;
9143 else
9144 return integer_zero_node;
9145 }
9146
9147 /* If the second arg is "", return *(const unsigned char*)arg1. */
9148 if (p2 && *p2 == '\0')
9149 {
9150 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9151 tree cst_uchar_ptr_node
9152 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9153
9154 return fold_convert_loc (loc, integer_type_node,
9155 build1 (INDIRECT_REF, cst_uchar_node,
9156 fold_convert_loc (loc,
9157 cst_uchar_ptr_node,
9158 arg1)));
9159 }
9160
9161 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9162 if (p1 && *p1 == '\0')
9163 {
9164 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9165 tree cst_uchar_ptr_node
9166 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9167
9168 tree temp
9169 = fold_convert_loc (loc, integer_type_node,
9170 build1 (INDIRECT_REF, cst_uchar_node,
9171 fold_convert_loc (loc,
9172 cst_uchar_ptr_node,
9173 arg2)));
9174 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9175 }
9176
9177 return NULL_TREE;
9178 }
9179
9180 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9181 Return NULL_TREE if no simplification can be made. */
9182
9183 static tree
9184 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9185 {
9186 const char *p1, *p2;
9187
9188 if (!validate_arg (arg1, POINTER_TYPE)
9189 || !validate_arg (arg2, POINTER_TYPE)
9190 || !validate_arg (len, INTEGER_TYPE))
9191 return NULL_TREE;
9192
9193 /* If the LEN parameter is zero, return zero. */
9194 if (integer_zerop (len))
9195 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9196 arg1, arg2);
9197
9198 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9199 if (operand_equal_p (arg1, arg2, 0))
9200 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9201
9202 p1 = c_getstr (arg1);
9203 p2 = c_getstr (arg2);
9204
9205 if (host_integerp (len, 1) && p1 && p2)
9206 {
9207 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9208 if (i > 0)
9209 return integer_one_node;
9210 else if (i < 0)
9211 return integer_minus_one_node;
9212 else
9213 return integer_zero_node;
9214 }
9215
9216 /* If the second arg is "", and the length is greater than zero,
9217 return *(const unsigned char*)arg1. */
9218 if (p2 && *p2 == '\0'
9219 && TREE_CODE (len) == INTEGER_CST
9220 && tree_int_cst_sgn (len) == 1)
9221 {
9222 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9223 tree cst_uchar_ptr_node
9224 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9225
9226 return fold_convert_loc (loc, integer_type_node,
9227 build1 (INDIRECT_REF, cst_uchar_node,
9228 fold_convert_loc (loc,
9229 cst_uchar_ptr_node,
9230 arg1)));
9231 }
9232
9233 /* If the first arg is "", and the length is greater than zero,
9234 return -*(const unsigned char*)arg2. */
9235 if (p1 && *p1 == '\0'
9236 && TREE_CODE (len) == INTEGER_CST
9237 && tree_int_cst_sgn (len) == 1)
9238 {
9239 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9240 tree cst_uchar_ptr_node
9241 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9242
9243 tree temp = fold_convert_loc (loc, integer_type_node,
9244 build1 (INDIRECT_REF, cst_uchar_node,
9245 fold_convert_loc (loc,
9246 cst_uchar_ptr_node,
9247 arg2)));
9248 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9249 }
9250
9251 /* If len parameter is one, return an expression corresponding to
9252 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9253 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9254 {
9255 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9256 tree cst_uchar_ptr_node
9257 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9258
9259 tree ind1 = fold_convert_loc (loc, integer_type_node,
9260 build1 (INDIRECT_REF, cst_uchar_node,
9261 fold_convert_loc (loc,
9262 cst_uchar_ptr_node,
9263 arg1)));
9264 tree ind2 = fold_convert_loc (loc, integer_type_node,
9265 build1 (INDIRECT_REF, cst_uchar_node,
9266 fold_convert_loc (loc,
9267 cst_uchar_ptr_node,
9268 arg2)));
9269 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9270 }
9271
9272 return NULL_TREE;
9273 }
9274
9275 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9276 ARG. Return NULL_TREE if no simplification can be made. */
9277
9278 static tree
9279 fold_builtin_signbit (location_t loc, tree arg, tree type)
9280 {
9281 if (!validate_arg (arg, REAL_TYPE))
9282 return NULL_TREE;
9283
9284 /* If ARG is a compile-time constant, determine the result. */
9285 if (TREE_CODE (arg) == REAL_CST
9286 && !TREE_OVERFLOW (arg))
9287 {
9288 REAL_VALUE_TYPE c;
9289
9290 c = TREE_REAL_CST (arg);
9291 return (REAL_VALUE_NEGATIVE (c)
9292 ? build_one_cst (type)
9293 : build_zero_cst (type));
9294 }
9295
9296 /* If ARG is non-negative, the result is always zero. */
9297 if (tree_expr_nonnegative_p (arg))
9298 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9299
9300 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9301 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9302 return fold_convert (type,
9303 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9304 build_real (TREE_TYPE (arg), dconst0)));
9305
9306 return NULL_TREE;
9307 }
9308
9309 /* Fold function call to builtin copysign, copysignf or copysignl with
9310 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9311 be made. */
9312
9313 static tree
9314 fold_builtin_copysign (location_t loc, tree fndecl,
9315 tree arg1, tree arg2, tree type)
9316 {
9317 tree tem;
9318
9319 if (!validate_arg (arg1, REAL_TYPE)
9320 || !validate_arg (arg2, REAL_TYPE))
9321 return NULL_TREE;
9322
9323 /* copysign(X,X) is X. */
9324 if (operand_equal_p (arg1, arg2, 0))
9325 return fold_convert_loc (loc, type, arg1);
9326
9327 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9328 if (TREE_CODE (arg1) == REAL_CST
9329 && TREE_CODE (arg2) == REAL_CST
9330 && !TREE_OVERFLOW (arg1)
9331 && !TREE_OVERFLOW (arg2))
9332 {
9333 REAL_VALUE_TYPE c1, c2;
9334
9335 c1 = TREE_REAL_CST (arg1);
9336 c2 = TREE_REAL_CST (arg2);
9337 /* c1.sign := c2.sign. */
9338 real_copysign (&c1, &c2);
9339 return build_real (type, c1);
9340 }
9341
9342 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9343 Remember to evaluate Y for side-effects. */
9344 if (tree_expr_nonnegative_p (arg2))
9345 return omit_one_operand_loc (loc, type,
9346 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9347 arg2);
9348
9349 /* Strip sign changing operations for the first argument. */
9350 tem = fold_strip_sign_ops (arg1);
9351 if (tem)
9352 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9353
9354 return NULL_TREE;
9355 }
9356
9357 /* Fold a call to builtin isascii with argument ARG. */
9358
9359 static tree
9360 fold_builtin_isascii (location_t loc, tree arg)
9361 {
9362 if (!validate_arg (arg, INTEGER_TYPE))
9363 return NULL_TREE;
9364 else
9365 {
9366 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9367 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9368 build_int_cst (integer_type_node,
9369 ~ (unsigned HOST_WIDE_INT) 0x7f));
9370 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9371 arg, integer_zero_node);
9372 }
9373 }
9374
9375 /* Fold a call to builtin toascii with argument ARG. */
9376
9377 static tree
9378 fold_builtin_toascii (location_t loc, tree arg)
9379 {
9380 if (!validate_arg (arg, INTEGER_TYPE))
9381 return NULL_TREE;
9382
9383 /* Transform toascii(c) -> (c & 0x7f). */
9384 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9385 build_int_cst (integer_type_node, 0x7f));
9386 }
9387
9388 /* Fold a call to builtin isdigit with argument ARG. */
9389
9390 static tree
9391 fold_builtin_isdigit (location_t loc, tree arg)
9392 {
9393 if (!validate_arg (arg, INTEGER_TYPE))
9394 return NULL_TREE;
9395 else
9396 {
9397 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9398 /* According to the C standard, isdigit is unaffected by locale.
9399 However, it definitely is affected by the target character set. */
9400 unsigned HOST_WIDE_INT target_digit0
9401 = lang_hooks.to_target_charset ('0');
9402
9403 if (target_digit0 == 0)
9404 return NULL_TREE;
9405
9406 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9407 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9408 build_int_cst (unsigned_type_node, target_digit0));
9409 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9410 build_int_cst (unsigned_type_node, 9));
9411 }
9412 }
9413
9414 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9415
9416 static tree
9417 fold_builtin_fabs (location_t loc, tree arg, tree type)
9418 {
9419 if (!validate_arg (arg, REAL_TYPE))
9420 return NULL_TREE;
9421
9422 arg = fold_convert_loc (loc, type, arg);
9423 if (TREE_CODE (arg) == REAL_CST)
9424 return fold_abs_const (arg, type);
9425 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9426 }
9427
9428 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9429
9430 static tree
9431 fold_builtin_abs (location_t loc, tree arg, tree type)
9432 {
9433 if (!validate_arg (arg, INTEGER_TYPE))
9434 return NULL_TREE;
9435
9436 arg = fold_convert_loc (loc, type, arg);
9437 if (TREE_CODE (arg) == INTEGER_CST)
9438 return fold_abs_const (arg, type);
9439 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9440 }
9441
9442 /* Fold a fma operation with arguments ARG[012]. */
9443
9444 tree
9445 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9446 tree type, tree arg0, tree arg1, tree arg2)
9447 {
9448 if (TREE_CODE (arg0) == REAL_CST
9449 && TREE_CODE (arg1) == REAL_CST
9450 && TREE_CODE (arg2) == REAL_CST)
9451 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9452
9453 return NULL_TREE;
9454 }
9455
9456 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9457
9458 static tree
9459 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9460 {
9461 if (validate_arg (arg0, REAL_TYPE)
9462 && validate_arg (arg1, REAL_TYPE)
9463 && validate_arg (arg2, REAL_TYPE))
9464 {
9465 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9466 if (tem)
9467 return tem;
9468
9469 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9470 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9471 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9472 }
9473 return NULL_TREE;
9474 }
9475
9476 /* Fold a call to builtin fmin or fmax. */
9477
9478 static tree
9479 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9480 tree type, bool max)
9481 {
9482 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9483 {
9484 /* Calculate the result when the argument is a constant. */
9485 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9486
9487 if (res)
9488 return res;
9489
9490 /* If either argument is NaN, return the other one. Avoid the
9491 transformation if we get (and honor) a signalling NaN. Using
9492 omit_one_operand() ensures we create a non-lvalue. */
9493 if (TREE_CODE (arg0) == REAL_CST
9494 && real_isnan (&TREE_REAL_CST (arg0))
9495 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9496 || ! TREE_REAL_CST (arg0).signalling))
9497 return omit_one_operand_loc (loc, type, arg1, arg0);
9498 if (TREE_CODE (arg1) == REAL_CST
9499 && real_isnan (&TREE_REAL_CST (arg1))
9500 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9501 || ! TREE_REAL_CST (arg1).signalling))
9502 return omit_one_operand_loc (loc, type, arg0, arg1);
9503
9504 /* Transform fmin/fmax(x,x) -> x. */
9505 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9506 return omit_one_operand_loc (loc, type, arg0, arg1);
9507
9508 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9509 functions to return the numeric arg if the other one is NaN.
9510 These tree codes don't honor that, so only transform if
9511 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9512 handled, so we don't have to worry about it either. */
9513 if (flag_finite_math_only)
9514 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9515 fold_convert_loc (loc, type, arg0),
9516 fold_convert_loc (loc, type, arg1));
9517 }
9518 return NULL_TREE;
9519 }
9520
9521 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9522
9523 static tree
9524 fold_builtin_carg (location_t loc, tree arg, tree type)
9525 {
9526 if (validate_arg (arg, COMPLEX_TYPE)
9527 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9528 {
9529 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9530
9531 if (atan2_fn)
9532 {
9533 tree new_arg = builtin_save_expr (arg);
9534 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9535 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9536 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9537 }
9538 }
9539
9540 return NULL_TREE;
9541 }
9542
9543 /* Fold a call to builtin logb/ilogb. */
9544
9545 static tree
9546 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9547 {
9548 if (! validate_arg (arg, REAL_TYPE))
9549 return NULL_TREE;
9550
9551 STRIP_NOPS (arg);
9552
9553 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9554 {
9555 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9556
9557 switch (value->cl)
9558 {
9559 case rvc_nan:
9560 case rvc_inf:
9561 /* If arg is Inf or NaN and we're logb, return it. */
9562 if (TREE_CODE (rettype) == REAL_TYPE)
9563 {
9564 /* For logb(-Inf) we have to return +Inf. */
9565 if (real_isinf (value) && real_isneg (value))
9566 {
9567 REAL_VALUE_TYPE tem;
9568 real_inf (&tem);
9569 return build_real (rettype, tem);
9570 }
9571 return fold_convert_loc (loc, rettype, arg);
9572 }
9573 /* Fall through... */
9574 case rvc_zero:
9575 /* Zero may set errno and/or raise an exception for logb, also
9576 for ilogb we don't know FP_ILOGB0. */
9577 return NULL_TREE;
9578 case rvc_normal:
9579 /* For normal numbers, proceed iff radix == 2. In GCC,
9580 normalized significands are in the range [0.5, 1.0). We
9581 want the exponent as if they were [1.0, 2.0) so get the
9582 exponent and subtract 1. */
9583 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9584 return fold_convert_loc (loc, rettype,
9585 build_int_cst (integer_type_node,
9586 REAL_EXP (value)-1));
9587 break;
9588 }
9589 }
9590
9591 return NULL_TREE;
9592 }
9593
9594 /* Fold a call to builtin significand, if radix == 2. */
9595
9596 static tree
9597 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9598 {
9599 if (! validate_arg (arg, REAL_TYPE))
9600 return NULL_TREE;
9601
9602 STRIP_NOPS (arg);
9603
9604 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9605 {
9606 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9607
9608 switch (value->cl)
9609 {
9610 case rvc_zero:
9611 case rvc_nan:
9612 case rvc_inf:
9613 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9614 return fold_convert_loc (loc, rettype, arg);
9615 case rvc_normal:
9616 /* For normal numbers, proceed iff radix == 2. */
9617 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9618 {
9619 REAL_VALUE_TYPE result = *value;
9620 /* In GCC, normalized significands are in the range [0.5,
9621 1.0). We want them to be [1.0, 2.0) so set the
9622 exponent to 1. */
9623 SET_REAL_EXP (&result, 1);
9624 return build_real (rettype, result);
9625 }
9626 break;
9627 }
9628 }
9629
9630 return NULL_TREE;
9631 }
9632
9633 /* Fold a call to builtin frexp, we can assume the base is 2. */
9634
9635 static tree
9636 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9637 {
9638 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9639 return NULL_TREE;
9640
9641 STRIP_NOPS (arg0);
9642
9643 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9644 return NULL_TREE;
9645
9646 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9647
9648 /* Proceed if a valid pointer type was passed in. */
9649 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9650 {
9651 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9652 tree frac, exp;
9653
9654 switch (value->cl)
9655 {
9656 case rvc_zero:
9657 /* For +-0, return (*exp = 0, +-0). */
9658 exp = integer_zero_node;
9659 frac = arg0;
9660 break;
9661 case rvc_nan:
9662 case rvc_inf:
9663 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9664 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9665 case rvc_normal:
9666 {
9667 /* Since the frexp function always expects base 2, and in
9668 GCC normalized significands are already in the range
9669 [0.5, 1.0), we have exactly what frexp wants. */
9670 REAL_VALUE_TYPE frac_rvt = *value;
9671 SET_REAL_EXP (&frac_rvt, 0);
9672 frac = build_real (rettype, frac_rvt);
9673 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9674 }
9675 break;
9676 default:
9677 gcc_unreachable ();
9678 }
9679
9680 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9681 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9682 TREE_SIDE_EFFECTS (arg1) = 1;
9683 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9684 }
9685
9686 return NULL_TREE;
9687 }
9688
9689 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9690 then we can assume the base is two. If it's false, then we have to
9691 check the mode of the TYPE parameter in certain cases. */
9692
9693 static tree
9694 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9695 tree type, bool ldexp)
9696 {
9697 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9698 {
9699 STRIP_NOPS (arg0);
9700 STRIP_NOPS (arg1);
9701
9702 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9703 if (real_zerop (arg0) || integer_zerop (arg1)
9704 || (TREE_CODE (arg0) == REAL_CST
9705 && !real_isfinite (&TREE_REAL_CST (arg0))))
9706 return omit_one_operand_loc (loc, type, arg0, arg1);
9707
9708 /* If both arguments are constant, then try to evaluate it. */
9709 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9710 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9711 && host_integerp (arg1, 0))
9712 {
9713 /* Bound the maximum adjustment to twice the range of the
9714 mode's valid exponents. Use abs to ensure the range is
9715 positive as a sanity check. */
9716 const long max_exp_adj = 2 *
9717 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9718 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9719
9720 /* Get the user-requested adjustment. */
9721 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9722
9723 /* The requested adjustment must be inside this range. This
9724 is a preliminary cap to avoid things like overflow, we
9725 may still fail to compute the result for other reasons. */
9726 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9727 {
9728 REAL_VALUE_TYPE initial_result;
9729
9730 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9731
9732 /* Ensure we didn't overflow. */
9733 if (! real_isinf (&initial_result))
9734 {
9735 const REAL_VALUE_TYPE trunc_result
9736 = real_value_truncate (TYPE_MODE (type), initial_result);
9737
9738 /* Only proceed if the target mode can hold the
9739 resulting value. */
9740 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9741 return build_real (type, trunc_result);
9742 }
9743 }
9744 }
9745 }
9746
9747 return NULL_TREE;
9748 }
9749
9750 /* Fold a call to builtin modf. */
9751
9752 static tree
9753 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9754 {
9755 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9756 return NULL_TREE;
9757
9758 STRIP_NOPS (arg0);
9759
9760 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9761 return NULL_TREE;
9762
9763 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9764
9765 /* Proceed if a valid pointer type was passed in. */
9766 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9767 {
9768 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9769 REAL_VALUE_TYPE trunc, frac;
9770
9771 switch (value->cl)
9772 {
9773 case rvc_nan:
9774 case rvc_zero:
9775 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9776 trunc = frac = *value;
9777 break;
9778 case rvc_inf:
9779 /* For +-Inf, return (*arg1 = arg0, +-0). */
9780 frac = dconst0;
9781 frac.sign = value->sign;
9782 trunc = *value;
9783 break;
9784 case rvc_normal:
9785 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9786 real_trunc (&trunc, VOIDmode, value);
9787 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9788 /* If the original number was negative and already
9789 integral, then the fractional part is -0.0. */
9790 if (value->sign && frac.cl == rvc_zero)
9791 frac.sign = value->sign;
9792 break;
9793 }
9794
9795 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9796 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9797 build_real (rettype, trunc));
9798 TREE_SIDE_EFFECTS (arg1) = 1;
9799 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9800 build_real (rettype, frac));
9801 }
9802
9803 return NULL_TREE;
9804 }
9805
9806 /* Given a location LOC, an interclass builtin function decl FNDECL
9807 and its single argument ARG, return an folded expression computing
9808 the same, or NULL_TREE if we either couldn't or didn't want to fold
9809 (the latter happen if there's an RTL instruction available). */
9810
9811 static tree
9812 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9813 {
9814 enum machine_mode mode;
9815
9816 if (!validate_arg (arg, REAL_TYPE))
9817 return NULL_TREE;
9818
9819 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9820 return NULL_TREE;
9821
9822 mode = TYPE_MODE (TREE_TYPE (arg));
9823
9824 /* If there is no optab, try generic code. */
9825 switch (DECL_FUNCTION_CODE (fndecl))
9826 {
9827 tree result;
9828
9829 CASE_FLT_FN (BUILT_IN_ISINF):
9830 {
9831 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9832 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9833 tree const type = TREE_TYPE (arg);
9834 REAL_VALUE_TYPE r;
9835 char buf[128];
9836
9837 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9838 real_from_string (&r, buf);
9839 result = build_call_expr (isgr_fn, 2,
9840 fold_build1_loc (loc, ABS_EXPR, type, arg),
9841 build_real (type, r));
9842 return result;
9843 }
9844 CASE_FLT_FN (BUILT_IN_FINITE):
9845 case BUILT_IN_ISFINITE:
9846 {
9847 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9848 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9849 tree const type = TREE_TYPE (arg);
9850 REAL_VALUE_TYPE r;
9851 char buf[128];
9852
9853 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9854 real_from_string (&r, buf);
9855 result = build_call_expr (isle_fn, 2,
9856 fold_build1_loc (loc, ABS_EXPR, type, arg),
9857 build_real (type, r));
9858 /*result = fold_build2_loc (loc, UNGT_EXPR,
9859 TREE_TYPE (TREE_TYPE (fndecl)),
9860 fold_build1_loc (loc, ABS_EXPR, type, arg),
9861 build_real (type, r));
9862 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9863 TREE_TYPE (TREE_TYPE (fndecl)),
9864 result);*/
9865 return result;
9866 }
9867 case BUILT_IN_ISNORMAL:
9868 {
9869 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9870 islessequal(fabs(x),DBL_MAX). */
9871 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9872 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9873 tree const type = TREE_TYPE (arg);
9874 REAL_VALUE_TYPE rmax, rmin;
9875 char buf[128];
9876
9877 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9878 real_from_string (&rmax, buf);
9879 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9880 real_from_string (&rmin, buf);
9881 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9882 result = build_call_expr (isle_fn, 2, arg,
9883 build_real (type, rmax));
9884 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9885 build_call_expr (isge_fn, 2, arg,
9886 build_real (type, rmin)));
9887 return result;
9888 }
9889 default:
9890 break;
9891 }
9892
9893 return NULL_TREE;
9894 }
9895
9896 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9897 ARG is the argument for the call. */
9898
9899 static tree
9900 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9901 {
9902 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9903 REAL_VALUE_TYPE r;
9904
9905 if (!validate_arg (arg, REAL_TYPE))
9906 return NULL_TREE;
9907
9908 switch (builtin_index)
9909 {
9910 case BUILT_IN_ISINF:
9911 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9912 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9913
9914 if (TREE_CODE (arg) == REAL_CST)
9915 {
9916 r = TREE_REAL_CST (arg);
9917 if (real_isinf (&r))
9918 return real_compare (GT_EXPR, &r, &dconst0)
9919 ? integer_one_node : integer_minus_one_node;
9920 else
9921 return integer_zero_node;
9922 }
9923
9924 return NULL_TREE;
9925
9926 case BUILT_IN_ISINF_SIGN:
9927 {
9928 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9929 /* In a boolean context, GCC will fold the inner COND_EXPR to
9930 1. So e.g. "if (isinf_sign(x))" would be folded to just
9931 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9932 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9933 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9934 tree tmp = NULL_TREE;
9935
9936 arg = builtin_save_expr (arg);
9937
9938 if (signbit_fn && isinf_fn)
9939 {
9940 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9941 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9942
9943 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9944 signbit_call, integer_zero_node);
9945 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9946 isinf_call, integer_zero_node);
9947
9948 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9949 integer_minus_one_node, integer_one_node);
9950 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9951 isinf_call, tmp,
9952 integer_zero_node);
9953 }
9954
9955 return tmp;
9956 }
9957
9958 case BUILT_IN_ISFINITE:
9959 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9960 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9961 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9962
9963 if (TREE_CODE (arg) == REAL_CST)
9964 {
9965 r = TREE_REAL_CST (arg);
9966 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9967 }
9968
9969 return NULL_TREE;
9970
9971 case BUILT_IN_ISNAN:
9972 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9973 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9974
9975 if (TREE_CODE (arg) == REAL_CST)
9976 {
9977 r = TREE_REAL_CST (arg);
9978 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9979 }
9980
9981 arg = builtin_save_expr (arg);
9982 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9983
9984 default:
9985 gcc_unreachable ();
9986 }
9987 }
9988
9989 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9990 This builtin will generate code to return the appropriate floating
9991 point classification depending on the value of the floating point
9992 number passed in. The possible return values must be supplied as
9993 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9994 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9995 one floating point argument which is "type generic". */
9996
9997 static tree
9998 fold_builtin_fpclassify (location_t loc, tree exp)
9999 {
10000 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10001 arg, type, res, tmp;
10002 enum machine_mode mode;
10003 REAL_VALUE_TYPE r;
10004 char buf[128];
10005
10006 /* Verify the required arguments in the original call. */
10007 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10008 INTEGER_TYPE, INTEGER_TYPE,
10009 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10010 return NULL_TREE;
10011
10012 fp_nan = CALL_EXPR_ARG (exp, 0);
10013 fp_infinite = CALL_EXPR_ARG (exp, 1);
10014 fp_normal = CALL_EXPR_ARG (exp, 2);
10015 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10016 fp_zero = CALL_EXPR_ARG (exp, 4);
10017 arg = CALL_EXPR_ARG (exp, 5);
10018 type = TREE_TYPE (arg);
10019 mode = TYPE_MODE (type);
10020 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10021
10022 /* fpclassify(x) ->
10023 isnan(x) ? FP_NAN :
10024 (fabs(x) == Inf ? FP_INFINITE :
10025 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10026 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10027
10028 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10029 build_real (type, dconst0));
10030 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10031 tmp, fp_zero, fp_subnormal);
10032
10033 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10034 real_from_string (&r, buf);
10035 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10036 arg, build_real (type, r));
10037 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10038
10039 if (HONOR_INFINITIES (mode))
10040 {
10041 real_inf (&r);
10042 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10043 build_real (type, r));
10044 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10045 fp_infinite, res);
10046 }
10047
10048 if (HONOR_NANS (mode))
10049 {
10050 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10051 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10052 }
10053
10054 return res;
10055 }
10056
10057 /* Fold a call to an unordered comparison function such as
10058 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10059 being called and ARG0 and ARG1 are the arguments for the call.
10060 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10061 the opposite of the desired result. UNORDERED_CODE is used
10062 for modes that can hold NaNs and ORDERED_CODE is used for
10063 the rest. */
10064
10065 static tree
10066 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10067 enum tree_code unordered_code,
10068 enum tree_code ordered_code)
10069 {
10070 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10071 enum tree_code code;
10072 tree type0, type1;
10073 enum tree_code code0, code1;
10074 tree cmp_type = NULL_TREE;
10075
10076 type0 = TREE_TYPE (arg0);
10077 type1 = TREE_TYPE (arg1);
10078
10079 code0 = TREE_CODE (type0);
10080 code1 = TREE_CODE (type1);
10081
10082 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10083 /* Choose the wider of two real types. */
10084 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10085 ? type0 : type1;
10086 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10087 cmp_type = type0;
10088 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10089 cmp_type = type1;
10090
10091 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10092 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10093
10094 if (unordered_code == UNORDERED_EXPR)
10095 {
10096 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10097 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10098 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10099 }
10100
10101 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10102 : ordered_code;
10103 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10104 fold_build2_loc (loc, code, type, arg0, arg1));
10105 }
10106
10107 /* Fold a call to built-in function FNDECL with 0 arguments.
10108 IGNORE is true if the result of the function call is ignored. This
10109 function returns NULL_TREE if no simplification was possible. */
10110
10111 static tree
10112 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10113 {
10114 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10115 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10116 switch (fcode)
10117 {
10118 CASE_FLT_FN (BUILT_IN_INF):
10119 case BUILT_IN_INFD32:
10120 case BUILT_IN_INFD64:
10121 case BUILT_IN_INFD128:
10122 return fold_builtin_inf (loc, type, true);
10123
10124 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10125 return fold_builtin_inf (loc, type, false);
10126
10127 case BUILT_IN_CLASSIFY_TYPE:
10128 return fold_builtin_classify_type (NULL_TREE);
10129
10130 case BUILT_IN_UNREACHABLE:
10131 if (flag_sanitize & SANITIZE_UNREACHABLE
10132 && (current_function_decl == NULL
10133 || !lookup_attribute ("no_sanitize_undefined",
10134 DECL_ATTRIBUTES (current_function_decl))))
10135 return ubsan_instrument_unreachable (loc);
10136 break;
10137
10138 default:
10139 break;
10140 }
10141 return NULL_TREE;
10142 }
10143
10144 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10145 IGNORE is true if the result of the function call is ignored. This
10146 function returns NULL_TREE if no simplification was possible. */
10147
10148 static tree
10149 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10150 {
10151 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10152 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10153 switch (fcode)
10154 {
10155 case BUILT_IN_CONSTANT_P:
10156 {
10157 tree val = fold_builtin_constant_p (arg0);
10158
10159 /* Gimplification will pull the CALL_EXPR for the builtin out of
10160 an if condition. When not optimizing, we'll not CSE it back.
10161 To avoid link error types of regressions, return false now. */
10162 if (!val && !optimize)
10163 val = integer_zero_node;
10164
10165 return val;
10166 }
10167
10168 case BUILT_IN_CLASSIFY_TYPE:
10169 return fold_builtin_classify_type (arg0);
10170
10171 case BUILT_IN_STRLEN:
10172 return fold_builtin_strlen (loc, type, arg0);
10173
10174 CASE_FLT_FN (BUILT_IN_FABS):
10175 case BUILT_IN_FABSD32:
10176 case BUILT_IN_FABSD64:
10177 case BUILT_IN_FABSD128:
10178 return fold_builtin_fabs (loc, arg0, type);
10179
10180 case BUILT_IN_ABS:
10181 case BUILT_IN_LABS:
10182 case BUILT_IN_LLABS:
10183 case BUILT_IN_IMAXABS:
10184 return fold_builtin_abs (loc, arg0, type);
10185
10186 CASE_FLT_FN (BUILT_IN_CONJ):
10187 if (validate_arg (arg0, COMPLEX_TYPE)
10188 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10189 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10190 break;
10191
10192 CASE_FLT_FN (BUILT_IN_CREAL):
10193 if (validate_arg (arg0, COMPLEX_TYPE)
10194 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10195 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10196 break;
10197
10198 CASE_FLT_FN (BUILT_IN_CIMAG):
10199 if (validate_arg (arg0, COMPLEX_TYPE)
10200 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10201 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10202 break;
10203
10204 CASE_FLT_FN (BUILT_IN_CCOS):
10205 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
10206
10207 CASE_FLT_FN (BUILT_IN_CCOSH):
10208 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
10209
10210 CASE_FLT_FN (BUILT_IN_CPROJ):
10211 return fold_builtin_cproj (loc, arg0, type);
10212
10213 CASE_FLT_FN (BUILT_IN_CSIN):
10214 if (validate_arg (arg0, COMPLEX_TYPE)
10215 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10216 return do_mpc_arg1 (arg0, type, mpc_sin);
10217 break;
10218
10219 CASE_FLT_FN (BUILT_IN_CSINH):
10220 if (validate_arg (arg0, COMPLEX_TYPE)
10221 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10222 return do_mpc_arg1 (arg0, type, mpc_sinh);
10223 break;
10224
10225 CASE_FLT_FN (BUILT_IN_CTAN):
10226 if (validate_arg (arg0, COMPLEX_TYPE)
10227 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10228 return do_mpc_arg1 (arg0, type, mpc_tan);
10229 break;
10230
10231 CASE_FLT_FN (BUILT_IN_CTANH):
10232 if (validate_arg (arg0, COMPLEX_TYPE)
10233 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10234 return do_mpc_arg1 (arg0, type, mpc_tanh);
10235 break;
10236
10237 CASE_FLT_FN (BUILT_IN_CLOG):
10238 if (validate_arg (arg0, COMPLEX_TYPE)
10239 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10240 return do_mpc_arg1 (arg0, type, mpc_log);
10241 break;
10242
10243 CASE_FLT_FN (BUILT_IN_CSQRT):
10244 if (validate_arg (arg0, COMPLEX_TYPE)
10245 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10246 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10247 break;
10248
10249 CASE_FLT_FN (BUILT_IN_CASIN):
10250 if (validate_arg (arg0, COMPLEX_TYPE)
10251 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10252 return do_mpc_arg1 (arg0, type, mpc_asin);
10253 break;
10254
10255 CASE_FLT_FN (BUILT_IN_CACOS):
10256 if (validate_arg (arg0, COMPLEX_TYPE)
10257 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10258 return do_mpc_arg1 (arg0, type, mpc_acos);
10259 break;
10260
10261 CASE_FLT_FN (BUILT_IN_CATAN):
10262 if (validate_arg (arg0, COMPLEX_TYPE)
10263 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10264 return do_mpc_arg1 (arg0, type, mpc_atan);
10265 break;
10266
10267 CASE_FLT_FN (BUILT_IN_CASINH):
10268 if (validate_arg (arg0, COMPLEX_TYPE)
10269 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10270 return do_mpc_arg1 (arg0, type, mpc_asinh);
10271 break;
10272
10273 CASE_FLT_FN (BUILT_IN_CACOSH):
10274 if (validate_arg (arg0, COMPLEX_TYPE)
10275 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10276 return do_mpc_arg1 (arg0, type, mpc_acosh);
10277 break;
10278
10279 CASE_FLT_FN (BUILT_IN_CATANH):
10280 if (validate_arg (arg0, COMPLEX_TYPE)
10281 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10282 return do_mpc_arg1 (arg0, type, mpc_atanh);
10283 break;
10284
10285 CASE_FLT_FN (BUILT_IN_CABS):
10286 return fold_builtin_cabs (loc, arg0, type, fndecl);
10287
10288 CASE_FLT_FN (BUILT_IN_CARG):
10289 return fold_builtin_carg (loc, arg0, type);
10290
10291 CASE_FLT_FN (BUILT_IN_SQRT):
10292 return fold_builtin_sqrt (loc, arg0, type);
10293
10294 CASE_FLT_FN (BUILT_IN_CBRT):
10295 return fold_builtin_cbrt (loc, arg0, type);
10296
10297 CASE_FLT_FN (BUILT_IN_ASIN):
10298 if (validate_arg (arg0, REAL_TYPE))
10299 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10300 &dconstm1, &dconst1, true);
10301 break;
10302
10303 CASE_FLT_FN (BUILT_IN_ACOS):
10304 if (validate_arg (arg0, REAL_TYPE))
10305 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10306 &dconstm1, &dconst1, true);
10307 break;
10308
10309 CASE_FLT_FN (BUILT_IN_ATAN):
10310 if (validate_arg (arg0, REAL_TYPE))
10311 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10312 break;
10313
10314 CASE_FLT_FN (BUILT_IN_ASINH):
10315 if (validate_arg (arg0, REAL_TYPE))
10316 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10317 break;
10318
10319 CASE_FLT_FN (BUILT_IN_ACOSH):
10320 if (validate_arg (arg0, REAL_TYPE))
10321 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10322 &dconst1, NULL, true);
10323 break;
10324
10325 CASE_FLT_FN (BUILT_IN_ATANH):
10326 if (validate_arg (arg0, REAL_TYPE))
10327 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10328 &dconstm1, &dconst1, false);
10329 break;
10330
10331 CASE_FLT_FN (BUILT_IN_SIN):
10332 if (validate_arg (arg0, REAL_TYPE))
10333 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10334 break;
10335
10336 CASE_FLT_FN (BUILT_IN_COS):
10337 return fold_builtin_cos (loc, arg0, type, fndecl);
10338
10339 CASE_FLT_FN (BUILT_IN_TAN):
10340 return fold_builtin_tan (arg0, type);
10341
10342 CASE_FLT_FN (BUILT_IN_CEXP):
10343 return fold_builtin_cexp (loc, arg0, type);
10344
10345 CASE_FLT_FN (BUILT_IN_CEXPI):
10346 if (validate_arg (arg0, REAL_TYPE))
10347 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10348 break;
10349
10350 CASE_FLT_FN (BUILT_IN_SINH):
10351 if (validate_arg (arg0, REAL_TYPE))
10352 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10353 break;
10354
10355 CASE_FLT_FN (BUILT_IN_COSH):
10356 return fold_builtin_cosh (loc, arg0, type, fndecl);
10357
10358 CASE_FLT_FN (BUILT_IN_TANH):
10359 if (validate_arg (arg0, REAL_TYPE))
10360 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10361 break;
10362
10363 CASE_FLT_FN (BUILT_IN_ERF):
10364 if (validate_arg (arg0, REAL_TYPE))
10365 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10366 break;
10367
10368 CASE_FLT_FN (BUILT_IN_ERFC):
10369 if (validate_arg (arg0, REAL_TYPE))
10370 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10371 break;
10372
10373 CASE_FLT_FN (BUILT_IN_TGAMMA):
10374 if (validate_arg (arg0, REAL_TYPE))
10375 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10376 break;
10377
10378 CASE_FLT_FN (BUILT_IN_EXP):
10379 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10380
10381 CASE_FLT_FN (BUILT_IN_EXP2):
10382 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10383
10384 CASE_FLT_FN (BUILT_IN_EXP10):
10385 CASE_FLT_FN (BUILT_IN_POW10):
10386 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10387
10388 CASE_FLT_FN (BUILT_IN_EXPM1):
10389 if (validate_arg (arg0, REAL_TYPE))
10390 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10391 break;
10392
10393 CASE_FLT_FN (BUILT_IN_LOG):
10394 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10395
10396 CASE_FLT_FN (BUILT_IN_LOG2):
10397 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10398
10399 CASE_FLT_FN (BUILT_IN_LOG10):
10400 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10401
10402 CASE_FLT_FN (BUILT_IN_LOG1P):
10403 if (validate_arg (arg0, REAL_TYPE))
10404 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10405 &dconstm1, NULL, false);
10406 break;
10407
10408 CASE_FLT_FN (BUILT_IN_J0):
10409 if (validate_arg (arg0, REAL_TYPE))
10410 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10411 NULL, NULL, 0);
10412 break;
10413
10414 CASE_FLT_FN (BUILT_IN_J1):
10415 if (validate_arg (arg0, REAL_TYPE))
10416 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10417 NULL, NULL, 0);
10418 break;
10419
10420 CASE_FLT_FN (BUILT_IN_Y0):
10421 if (validate_arg (arg0, REAL_TYPE))
10422 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10423 &dconst0, NULL, false);
10424 break;
10425
10426 CASE_FLT_FN (BUILT_IN_Y1):
10427 if (validate_arg (arg0, REAL_TYPE))
10428 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10429 &dconst0, NULL, false);
10430 break;
10431
10432 CASE_FLT_FN (BUILT_IN_NAN):
10433 case BUILT_IN_NAND32:
10434 case BUILT_IN_NAND64:
10435 case BUILT_IN_NAND128:
10436 return fold_builtin_nan (arg0, type, true);
10437
10438 CASE_FLT_FN (BUILT_IN_NANS):
10439 return fold_builtin_nan (arg0, type, false);
10440
10441 CASE_FLT_FN (BUILT_IN_FLOOR):
10442 return fold_builtin_floor (loc, fndecl, arg0);
10443
10444 CASE_FLT_FN (BUILT_IN_CEIL):
10445 return fold_builtin_ceil (loc, fndecl, arg0);
10446
10447 CASE_FLT_FN (BUILT_IN_TRUNC):
10448 return fold_builtin_trunc (loc, fndecl, arg0);
10449
10450 CASE_FLT_FN (BUILT_IN_ROUND):
10451 return fold_builtin_round (loc, fndecl, arg0);
10452
10453 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10454 CASE_FLT_FN (BUILT_IN_RINT):
10455 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10456
10457 CASE_FLT_FN (BUILT_IN_ICEIL):
10458 CASE_FLT_FN (BUILT_IN_LCEIL):
10459 CASE_FLT_FN (BUILT_IN_LLCEIL):
10460 CASE_FLT_FN (BUILT_IN_LFLOOR):
10461 CASE_FLT_FN (BUILT_IN_IFLOOR):
10462 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10463 CASE_FLT_FN (BUILT_IN_IROUND):
10464 CASE_FLT_FN (BUILT_IN_LROUND):
10465 CASE_FLT_FN (BUILT_IN_LLROUND):
10466 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10467
10468 CASE_FLT_FN (BUILT_IN_IRINT):
10469 CASE_FLT_FN (BUILT_IN_LRINT):
10470 CASE_FLT_FN (BUILT_IN_LLRINT):
10471 return fold_fixed_mathfn (loc, fndecl, arg0);
10472
10473 case BUILT_IN_BSWAP16:
10474 case BUILT_IN_BSWAP32:
10475 case BUILT_IN_BSWAP64:
10476 return fold_builtin_bswap (fndecl, arg0);
10477
10478 CASE_INT_FN (BUILT_IN_FFS):
10479 CASE_INT_FN (BUILT_IN_CLZ):
10480 CASE_INT_FN (BUILT_IN_CTZ):
10481 CASE_INT_FN (BUILT_IN_CLRSB):
10482 CASE_INT_FN (BUILT_IN_POPCOUNT):
10483 CASE_INT_FN (BUILT_IN_PARITY):
10484 return fold_builtin_bitop (fndecl, arg0);
10485
10486 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10487 return fold_builtin_signbit (loc, arg0, type);
10488
10489 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10490 return fold_builtin_significand (loc, arg0, type);
10491
10492 CASE_FLT_FN (BUILT_IN_ILOGB):
10493 CASE_FLT_FN (BUILT_IN_LOGB):
10494 return fold_builtin_logb (loc, arg0, type);
10495
10496 case BUILT_IN_ISASCII:
10497 return fold_builtin_isascii (loc, arg0);
10498
10499 case BUILT_IN_TOASCII:
10500 return fold_builtin_toascii (loc, arg0);
10501
10502 case BUILT_IN_ISDIGIT:
10503 return fold_builtin_isdigit (loc, arg0);
10504
10505 CASE_FLT_FN (BUILT_IN_FINITE):
10506 case BUILT_IN_FINITED32:
10507 case BUILT_IN_FINITED64:
10508 case BUILT_IN_FINITED128:
10509 case BUILT_IN_ISFINITE:
10510 {
10511 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10512 if (ret)
10513 return ret;
10514 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10515 }
10516
10517 CASE_FLT_FN (BUILT_IN_ISINF):
10518 case BUILT_IN_ISINFD32:
10519 case BUILT_IN_ISINFD64:
10520 case BUILT_IN_ISINFD128:
10521 {
10522 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10523 if (ret)
10524 return ret;
10525 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10526 }
10527
10528 case BUILT_IN_ISNORMAL:
10529 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10530
10531 case BUILT_IN_ISINF_SIGN:
10532 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10533
10534 CASE_FLT_FN (BUILT_IN_ISNAN):
10535 case BUILT_IN_ISNAND32:
10536 case BUILT_IN_ISNAND64:
10537 case BUILT_IN_ISNAND128:
10538 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10539
10540 case BUILT_IN_PRINTF:
10541 case BUILT_IN_PRINTF_UNLOCKED:
10542 case BUILT_IN_VPRINTF:
10543 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10544
10545 case BUILT_IN_FREE:
10546 if (integer_zerop (arg0))
10547 return build_empty_stmt (loc);
10548 break;
10549
10550 default:
10551 break;
10552 }
10553
10554 return NULL_TREE;
10555
10556 }
10557
10558 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10559 IGNORE is true if the result of the function call is ignored. This
10560 function returns NULL_TREE if no simplification was possible. */
10561
10562 static tree
10563 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10564 {
10565 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10566 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10567
10568 switch (fcode)
10569 {
10570 CASE_FLT_FN (BUILT_IN_JN):
10571 if (validate_arg (arg0, INTEGER_TYPE)
10572 && validate_arg (arg1, REAL_TYPE))
10573 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10574 break;
10575
10576 CASE_FLT_FN (BUILT_IN_YN):
10577 if (validate_arg (arg0, INTEGER_TYPE)
10578 && validate_arg (arg1, REAL_TYPE))
10579 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10580 &dconst0, false);
10581 break;
10582
10583 CASE_FLT_FN (BUILT_IN_DREM):
10584 CASE_FLT_FN (BUILT_IN_REMAINDER):
10585 if (validate_arg (arg0, REAL_TYPE)
10586 && validate_arg (arg1, REAL_TYPE))
10587 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10588 break;
10589
10590 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10591 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10592 if (validate_arg (arg0, REAL_TYPE)
10593 && validate_arg (arg1, POINTER_TYPE))
10594 return do_mpfr_lgamma_r (arg0, arg1, type);
10595 break;
10596
10597 CASE_FLT_FN (BUILT_IN_ATAN2):
10598 if (validate_arg (arg0, REAL_TYPE)
10599 && validate_arg (arg1, REAL_TYPE))
10600 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10601 break;
10602
10603 CASE_FLT_FN (BUILT_IN_FDIM):
10604 if (validate_arg (arg0, REAL_TYPE)
10605 && validate_arg (arg1, REAL_TYPE))
10606 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10607 break;
10608
10609 CASE_FLT_FN (BUILT_IN_HYPOT):
10610 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10611
10612 CASE_FLT_FN (BUILT_IN_CPOW):
10613 if (validate_arg (arg0, COMPLEX_TYPE)
10614 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10615 && validate_arg (arg1, COMPLEX_TYPE)
10616 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10617 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10618 break;
10619
10620 CASE_FLT_FN (BUILT_IN_LDEXP):
10621 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10622 CASE_FLT_FN (BUILT_IN_SCALBN):
10623 CASE_FLT_FN (BUILT_IN_SCALBLN):
10624 return fold_builtin_load_exponent (loc, arg0, arg1,
10625 type, /*ldexp=*/false);
10626
10627 CASE_FLT_FN (BUILT_IN_FREXP):
10628 return fold_builtin_frexp (loc, arg0, arg1, type);
10629
10630 CASE_FLT_FN (BUILT_IN_MODF):
10631 return fold_builtin_modf (loc, arg0, arg1, type);
10632
10633 case BUILT_IN_BZERO:
10634 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10635
10636 case BUILT_IN_FPUTS:
10637 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10638
10639 case BUILT_IN_FPUTS_UNLOCKED:
10640 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10641
10642 case BUILT_IN_STRSTR:
10643 return fold_builtin_strstr (loc, arg0, arg1, type);
10644
10645 case BUILT_IN_STRCAT:
10646 return fold_builtin_strcat (loc, arg0, arg1);
10647
10648 case BUILT_IN_STRSPN:
10649 return fold_builtin_strspn (loc, arg0, arg1);
10650
10651 case BUILT_IN_STRCSPN:
10652 return fold_builtin_strcspn (loc, arg0, arg1);
10653
10654 case BUILT_IN_STRCHR:
10655 case BUILT_IN_INDEX:
10656 return fold_builtin_strchr (loc, arg0, arg1, type);
10657
10658 case BUILT_IN_STRRCHR:
10659 case BUILT_IN_RINDEX:
10660 return fold_builtin_strrchr (loc, arg0, arg1, type);
10661
10662 case BUILT_IN_STRCPY:
10663 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10664
10665 case BUILT_IN_STPCPY:
10666 if (ignore)
10667 {
10668 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10669 if (!fn)
10670 break;
10671
10672 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10673 }
10674 else
10675 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10676 break;
10677
10678 case BUILT_IN_STRCMP:
10679 return fold_builtin_strcmp (loc, arg0, arg1);
10680
10681 case BUILT_IN_STRPBRK:
10682 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10683
10684 case BUILT_IN_EXPECT:
10685 return fold_builtin_expect (loc, arg0, arg1);
10686
10687 CASE_FLT_FN (BUILT_IN_POW):
10688 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10689
10690 CASE_FLT_FN (BUILT_IN_POWI):
10691 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10692
10693 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10694 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10695
10696 CASE_FLT_FN (BUILT_IN_FMIN):
10697 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10698
10699 CASE_FLT_FN (BUILT_IN_FMAX):
10700 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10701
10702 case BUILT_IN_ISGREATER:
10703 return fold_builtin_unordered_cmp (loc, fndecl,
10704 arg0, arg1, UNLE_EXPR, LE_EXPR);
10705 case BUILT_IN_ISGREATEREQUAL:
10706 return fold_builtin_unordered_cmp (loc, fndecl,
10707 arg0, arg1, UNLT_EXPR, LT_EXPR);
10708 case BUILT_IN_ISLESS:
10709 return fold_builtin_unordered_cmp (loc, fndecl,
10710 arg0, arg1, UNGE_EXPR, GE_EXPR);
10711 case BUILT_IN_ISLESSEQUAL:
10712 return fold_builtin_unordered_cmp (loc, fndecl,
10713 arg0, arg1, UNGT_EXPR, GT_EXPR);
10714 case BUILT_IN_ISLESSGREATER:
10715 return fold_builtin_unordered_cmp (loc, fndecl,
10716 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10717 case BUILT_IN_ISUNORDERED:
10718 return fold_builtin_unordered_cmp (loc, fndecl,
10719 arg0, arg1, UNORDERED_EXPR,
10720 NOP_EXPR);
10721
10722 /* We do the folding for va_start in the expander. */
10723 case BUILT_IN_VA_START:
10724 break;
10725
10726 case BUILT_IN_SPRINTF:
10727 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10728
10729 case BUILT_IN_OBJECT_SIZE:
10730 return fold_builtin_object_size (arg0, arg1);
10731
10732 case BUILT_IN_PRINTF:
10733 case BUILT_IN_PRINTF_UNLOCKED:
10734 case BUILT_IN_VPRINTF:
10735 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10736
10737 case BUILT_IN_PRINTF_CHK:
10738 case BUILT_IN_VPRINTF_CHK:
10739 if (!validate_arg (arg0, INTEGER_TYPE)
10740 || TREE_SIDE_EFFECTS (arg0))
10741 return NULL_TREE;
10742 else
10743 return fold_builtin_printf (loc, fndecl,
10744 arg1, NULL_TREE, ignore, fcode);
10745 break;
10746
10747 case BUILT_IN_FPRINTF:
10748 case BUILT_IN_FPRINTF_UNLOCKED:
10749 case BUILT_IN_VFPRINTF:
10750 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10751 ignore, fcode);
10752
10753 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10754 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10755
10756 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10757 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10758
10759 default:
10760 break;
10761 }
10762 return NULL_TREE;
10763 }
10764
10765 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10766 and ARG2. IGNORE is true if the result of the function call is ignored.
10767 This function returns NULL_TREE if no simplification was possible. */
10768
10769 static tree
10770 fold_builtin_3 (location_t loc, tree fndecl,
10771 tree arg0, tree arg1, tree arg2, bool ignore)
10772 {
10773 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10774 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10775 switch (fcode)
10776 {
10777
10778 CASE_FLT_FN (BUILT_IN_SINCOS):
10779 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10780
10781 CASE_FLT_FN (BUILT_IN_FMA):
10782 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10783 break;
10784
10785 CASE_FLT_FN (BUILT_IN_REMQUO):
10786 if (validate_arg (arg0, REAL_TYPE)
10787 && validate_arg (arg1, REAL_TYPE)
10788 && validate_arg (arg2, POINTER_TYPE))
10789 return do_mpfr_remquo (arg0, arg1, arg2);
10790 break;
10791
10792 case BUILT_IN_MEMSET:
10793 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10794
10795 case BUILT_IN_BCOPY:
10796 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10797 void_type_node, true, /*endp=*/3);
10798
10799 case BUILT_IN_MEMCPY:
10800 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10801 type, ignore, /*endp=*/0);
10802
10803 case BUILT_IN_MEMPCPY:
10804 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10805 type, ignore, /*endp=*/1);
10806
10807 case BUILT_IN_MEMMOVE:
10808 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10809 type, ignore, /*endp=*/3);
10810
10811 case BUILT_IN_STRNCAT:
10812 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10813
10814 case BUILT_IN_STRNCPY:
10815 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10816
10817 case BUILT_IN_STRNCMP:
10818 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10819
10820 case BUILT_IN_MEMCHR:
10821 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10822
10823 case BUILT_IN_BCMP:
10824 case BUILT_IN_MEMCMP:
10825 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10826
10827 case BUILT_IN_SPRINTF:
10828 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10829
10830 case BUILT_IN_SNPRINTF:
10831 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
10832
10833 case BUILT_IN_STRCPY_CHK:
10834 case BUILT_IN_STPCPY_CHK:
10835 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10836 ignore, fcode);
10837
10838 case BUILT_IN_STRCAT_CHK:
10839 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10840
10841 case BUILT_IN_PRINTF_CHK:
10842 case BUILT_IN_VPRINTF_CHK:
10843 if (!validate_arg (arg0, INTEGER_TYPE)
10844 || TREE_SIDE_EFFECTS (arg0))
10845 return NULL_TREE;
10846 else
10847 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10848 break;
10849
10850 case BUILT_IN_FPRINTF:
10851 case BUILT_IN_FPRINTF_UNLOCKED:
10852 case BUILT_IN_VFPRINTF:
10853 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10854 ignore, fcode);
10855
10856 case BUILT_IN_FPRINTF_CHK:
10857 case BUILT_IN_VFPRINTF_CHK:
10858 if (!validate_arg (arg1, INTEGER_TYPE)
10859 || TREE_SIDE_EFFECTS (arg1))
10860 return NULL_TREE;
10861 else
10862 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10863 ignore, fcode);
10864
10865 default:
10866 break;
10867 }
10868 return NULL_TREE;
10869 }
10870
10871 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10872 ARG2, and ARG3. IGNORE is true if the result of the function call is
10873 ignored. This function returns NULL_TREE if no simplification was
10874 possible. */
10875
10876 static tree
10877 fold_builtin_4 (location_t loc, tree fndecl,
10878 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10879 {
10880 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10881
10882 switch (fcode)
10883 {
10884 case BUILT_IN_MEMCPY_CHK:
10885 case BUILT_IN_MEMPCPY_CHK:
10886 case BUILT_IN_MEMMOVE_CHK:
10887 case BUILT_IN_MEMSET_CHK:
10888 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10889 NULL_TREE, ignore,
10890 DECL_FUNCTION_CODE (fndecl));
10891
10892 case BUILT_IN_STRNCPY_CHK:
10893 case BUILT_IN_STPNCPY_CHK:
10894 return fold_builtin_stxncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE,
10895 ignore, fcode);
10896
10897 case BUILT_IN_STRNCAT_CHK:
10898 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10899
10900 case BUILT_IN_SNPRINTF:
10901 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
10902
10903 case BUILT_IN_FPRINTF_CHK:
10904 case BUILT_IN_VFPRINTF_CHK:
10905 if (!validate_arg (arg1, INTEGER_TYPE)
10906 || TREE_SIDE_EFFECTS (arg1))
10907 return NULL_TREE;
10908 else
10909 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10910 ignore, fcode);
10911 break;
10912
10913 default:
10914 break;
10915 }
10916 return NULL_TREE;
10917 }
10918
10919 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10920 arguments, where NARGS <= 4. IGNORE is true if the result of the
10921 function call is ignored. This function returns NULL_TREE if no
10922 simplification was possible. Note that this only folds builtins with
10923 fixed argument patterns. Foldings that do varargs-to-varargs
10924 transformations, or that match calls with more than 4 arguments,
10925 need to be handled with fold_builtin_varargs instead. */
10926
10927 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10928
10929 static tree
10930 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10931 {
10932 tree ret = NULL_TREE;
10933
10934 switch (nargs)
10935 {
10936 case 0:
10937 ret = fold_builtin_0 (loc, fndecl, ignore);
10938 break;
10939 case 1:
10940 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10941 break;
10942 case 2:
10943 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10944 break;
10945 case 3:
10946 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10947 break;
10948 case 4:
10949 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10950 ignore);
10951 break;
10952 default:
10953 break;
10954 }
10955 if (ret)
10956 {
10957 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10958 SET_EXPR_LOCATION (ret, loc);
10959 TREE_NO_WARNING (ret) = 1;
10960 return ret;
10961 }
10962 return NULL_TREE;
10963 }
10964
10965 /* Builtins with folding operations that operate on "..." arguments
10966 need special handling; we need to store the arguments in a convenient
10967 data structure before attempting any folding. Fortunately there are
10968 only a few builtins that fall into this category. FNDECL is the
10969 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10970 result of the function call is ignored. */
10971
10972 static tree
10973 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10974 bool ignore ATTRIBUTE_UNUSED)
10975 {
10976 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10977 tree ret = NULL_TREE;
10978
10979 switch (fcode)
10980 {
10981 case BUILT_IN_SPRINTF_CHK:
10982 case BUILT_IN_VSPRINTF_CHK:
10983 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10984 break;
10985
10986 case BUILT_IN_SNPRINTF_CHK:
10987 case BUILT_IN_VSNPRINTF_CHK:
10988 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10989 break;
10990
10991 case BUILT_IN_FPCLASSIFY:
10992 ret = fold_builtin_fpclassify (loc, exp);
10993 break;
10994
10995 default:
10996 break;
10997 }
10998 if (ret)
10999 {
11000 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11001 SET_EXPR_LOCATION (ret, loc);
11002 TREE_NO_WARNING (ret) = 1;
11003 return ret;
11004 }
11005 return NULL_TREE;
11006 }
11007
11008 /* Return true if FNDECL shouldn't be folded right now.
11009 If a built-in function has an inline attribute always_inline
11010 wrapper, defer folding it after always_inline functions have
11011 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11012 might not be performed. */
11013
11014 bool
11015 avoid_folding_inline_builtin (tree fndecl)
11016 {
11017 return (DECL_DECLARED_INLINE_P (fndecl)
11018 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11019 && cfun
11020 && !cfun->always_inline_functions_inlined
11021 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11022 }
11023
11024 /* A wrapper function for builtin folding that prevents warnings for
11025 "statement without effect" and the like, caused by removing the
11026 call node earlier than the warning is generated. */
11027
11028 tree
11029 fold_call_expr (location_t loc, tree exp, bool ignore)
11030 {
11031 tree ret = NULL_TREE;
11032 tree fndecl = get_callee_fndecl (exp);
11033 if (fndecl
11034 && TREE_CODE (fndecl) == FUNCTION_DECL
11035 && DECL_BUILT_IN (fndecl)
11036 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11037 yet. Defer folding until we see all the arguments
11038 (after inlining). */
11039 && !CALL_EXPR_VA_ARG_PACK (exp))
11040 {
11041 int nargs = call_expr_nargs (exp);
11042
11043 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11044 instead last argument is __builtin_va_arg_pack (). Defer folding
11045 even in that case, until arguments are finalized. */
11046 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11047 {
11048 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11049 if (fndecl2
11050 && TREE_CODE (fndecl2) == FUNCTION_DECL
11051 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11052 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11053 return NULL_TREE;
11054 }
11055
11056 if (avoid_folding_inline_builtin (fndecl))
11057 return NULL_TREE;
11058
11059 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11060 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11061 CALL_EXPR_ARGP (exp), ignore);
11062 else
11063 {
11064 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11065 {
11066 tree *args = CALL_EXPR_ARGP (exp);
11067 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11068 }
11069 if (!ret)
11070 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11071 if (ret)
11072 return ret;
11073 }
11074 }
11075 return NULL_TREE;
11076 }
11077
11078 /* Conveniently construct a function call expression. FNDECL names the
11079 function to be called and N arguments are passed in the array
11080 ARGARRAY. */
11081
11082 tree
11083 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11084 {
11085 tree fntype = TREE_TYPE (fndecl);
11086 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11087
11088 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11089 }
11090
11091 /* Conveniently construct a function call expression. FNDECL names the
11092 function to be called and the arguments are passed in the vector
11093 VEC. */
11094
11095 tree
11096 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11097 {
11098 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11099 vec_safe_address (vec));
11100 }
11101
11102
11103 /* Conveniently construct a function call expression. FNDECL names the
11104 function to be called, N is the number of arguments, and the "..."
11105 parameters are the argument expressions. */
11106
11107 tree
11108 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11109 {
11110 va_list ap;
11111 tree *argarray = XALLOCAVEC (tree, n);
11112 int i;
11113
11114 va_start (ap, n);
11115 for (i = 0; i < n; i++)
11116 argarray[i] = va_arg (ap, tree);
11117 va_end (ap);
11118 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11119 }
11120
11121 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11122 varargs macros aren't supported by all bootstrap compilers. */
11123
11124 tree
11125 build_call_expr (tree fndecl, int n, ...)
11126 {
11127 va_list ap;
11128 tree *argarray = XALLOCAVEC (tree, n);
11129 int i;
11130
11131 va_start (ap, n);
11132 for (i = 0; i < n; i++)
11133 argarray[i] = va_arg (ap, tree);
11134 va_end (ap);
11135 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11136 }
11137
11138 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11139 N arguments are passed in the array ARGARRAY. */
11140
11141 tree
11142 fold_builtin_call_array (location_t loc, tree type,
11143 tree fn,
11144 int n,
11145 tree *argarray)
11146 {
11147 tree ret = NULL_TREE;
11148 tree exp;
11149
11150 if (TREE_CODE (fn) == ADDR_EXPR)
11151 {
11152 tree fndecl = TREE_OPERAND (fn, 0);
11153 if (TREE_CODE (fndecl) == FUNCTION_DECL
11154 && DECL_BUILT_IN (fndecl))
11155 {
11156 /* If last argument is __builtin_va_arg_pack (), arguments to this
11157 function are not finalized yet. Defer folding until they are. */
11158 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11159 {
11160 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11161 if (fndecl2
11162 && TREE_CODE (fndecl2) == FUNCTION_DECL
11163 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11164 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11165 return build_call_array_loc (loc, type, fn, n, argarray);
11166 }
11167 if (avoid_folding_inline_builtin (fndecl))
11168 return build_call_array_loc (loc, type, fn, n, argarray);
11169 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11170 {
11171 ret = targetm.fold_builtin (fndecl, n, argarray, false);
11172 if (ret)
11173 return ret;
11174
11175 return build_call_array_loc (loc, type, fn, n, argarray);
11176 }
11177 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11178 {
11179 /* First try the transformations that don't require consing up
11180 an exp. */
11181 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11182 if (ret)
11183 return ret;
11184 }
11185
11186 /* If we got this far, we need to build an exp. */
11187 exp = build_call_array_loc (loc, type, fn, n, argarray);
11188 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11189 return ret ? ret : exp;
11190 }
11191 }
11192
11193 return build_call_array_loc (loc, type, fn, n, argarray);
11194 }
11195
11196 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11197 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11198 of arguments in ARGS to be omitted. OLDNARGS is the number of
11199 elements in ARGS. */
11200
11201 static tree
11202 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11203 int skip, tree fndecl, int n, va_list newargs)
11204 {
11205 int nargs = oldnargs - skip + n;
11206 tree *buffer;
11207
11208 if (n > 0)
11209 {
11210 int i, j;
11211
11212 buffer = XALLOCAVEC (tree, nargs);
11213 for (i = 0; i < n; i++)
11214 buffer[i] = va_arg (newargs, tree);
11215 for (j = skip; j < oldnargs; j++, i++)
11216 buffer[i] = args[j];
11217 }
11218 else
11219 buffer = args + skip;
11220
11221 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11222 }
11223
11224 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11225 list ARGS along with N new arguments specified as the "..."
11226 parameters. SKIP is the number of arguments in ARGS to be omitted.
11227 OLDNARGS is the number of elements in ARGS. */
11228
11229 static tree
11230 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
11231 int skip, tree fndecl, int n, ...)
11232 {
11233 va_list ap;
11234 tree t;
11235
11236 va_start (ap, n);
11237 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
11238 va_end (ap);
11239
11240 return t;
11241 }
11242
11243 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11244 along with N new arguments specified as the "..." parameters. SKIP
11245 is the number of arguments in EXP to be omitted. This function is used
11246 to do varargs-to-varargs transformations. */
11247
11248 static tree
11249 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11250 {
11251 va_list ap;
11252 tree t;
11253
11254 va_start (ap, n);
11255 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11256 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11257 va_end (ap);
11258
11259 return t;
11260 }
11261
11262 /* Validate a single argument ARG against a tree code CODE representing
11263 a type. */
11264
11265 static bool
11266 validate_arg (const_tree arg, enum tree_code code)
11267 {
11268 if (!arg)
11269 return false;
11270 else if (code == POINTER_TYPE)
11271 return POINTER_TYPE_P (TREE_TYPE (arg));
11272 else if (code == INTEGER_TYPE)
11273 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11274 return code == TREE_CODE (TREE_TYPE (arg));
11275 }
11276
11277 /* This function validates the types of a function call argument list
11278 against a specified list of tree_codes. If the last specifier is a 0,
11279 that represents an ellipses, otherwise the last specifier must be a
11280 VOID_TYPE.
11281
11282 This is the GIMPLE version of validate_arglist. Eventually we want to
11283 completely convert builtins.c to work from GIMPLEs and the tree based
11284 validate_arglist will then be removed. */
11285
11286 bool
11287 validate_gimple_arglist (const_gimple call, ...)
11288 {
11289 enum tree_code code;
11290 bool res = 0;
11291 va_list ap;
11292 const_tree arg;
11293 size_t i;
11294
11295 va_start (ap, call);
11296 i = 0;
11297
11298 do
11299 {
11300 code = (enum tree_code) va_arg (ap, int);
11301 switch (code)
11302 {
11303 case 0:
11304 /* This signifies an ellipses, any further arguments are all ok. */
11305 res = true;
11306 goto end;
11307 case VOID_TYPE:
11308 /* This signifies an endlink, if no arguments remain, return
11309 true, otherwise return false. */
11310 res = (i == gimple_call_num_args (call));
11311 goto end;
11312 default:
11313 /* If no parameters remain or the parameter's code does not
11314 match the specified code, return false. Otherwise continue
11315 checking any remaining arguments. */
11316 arg = gimple_call_arg (call, i++);
11317 if (!validate_arg (arg, code))
11318 goto end;
11319 break;
11320 }
11321 }
11322 while (1);
11323
11324 /* We need gotos here since we can only have one VA_CLOSE in a
11325 function. */
11326 end: ;
11327 va_end (ap);
11328
11329 return res;
11330 }
11331
11332 /* This function validates the types of a function call argument list
11333 against a specified list of tree_codes. If the last specifier is a 0,
11334 that represents an ellipses, otherwise the last specifier must be a
11335 VOID_TYPE. */
11336
11337 bool
11338 validate_arglist (const_tree callexpr, ...)
11339 {
11340 enum tree_code code;
11341 bool res = 0;
11342 va_list ap;
11343 const_call_expr_arg_iterator iter;
11344 const_tree arg;
11345
11346 va_start (ap, callexpr);
11347 init_const_call_expr_arg_iterator (callexpr, &iter);
11348
11349 do
11350 {
11351 code = (enum tree_code) va_arg (ap, int);
11352 switch (code)
11353 {
11354 case 0:
11355 /* This signifies an ellipses, any further arguments are all ok. */
11356 res = true;
11357 goto end;
11358 case VOID_TYPE:
11359 /* This signifies an endlink, if no arguments remain, return
11360 true, otherwise return false. */
11361 res = !more_const_call_expr_args_p (&iter);
11362 goto end;
11363 default:
11364 /* If no parameters remain or the parameter's code does not
11365 match the specified code, return false. Otherwise continue
11366 checking any remaining arguments. */
11367 arg = next_const_call_expr_arg (&iter);
11368 if (!validate_arg (arg, code))
11369 goto end;
11370 break;
11371 }
11372 }
11373 while (1);
11374
11375 /* We need gotos here since we can only have one VA_CLOSE in a
11376 function. */
11377 end: ;
11378 va_end (ap);
11379
11380 return res;
11381 }
11382
11383 /* Default target-specific builtin expander that does nothing. */
11384
11385 rtx
11386 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11387 rtx target ATTRIBUTE_UNUSED,
11388 rtx subtarget ATTRIBUTE_UNUSED,
11389 enum machine_mode mode ATTRIBUTE_UNUSED,
11390 int ignore ATTRIBUTE_UNUSED)
11391 {
11392 return NULL_RTX;
11393 }
11394
11395 /* Returns true is EXP represents data that would potentially reside
11396 in a readonly section. */
11397
11398 static bool
11399 readonly_data_expr (tree exp)
11400 {
11401 STRIP_NOPS (exp);
11402
11403 if (TREE_CODE (exp) != ADDR_EXPR)
11404 return false;
11405
11406 exp = get_base_address (TREE_OPERAND (exp, 0));
11407 if (!exp)
11408 return false;
11409
11410 /* Make sure we call decl_readonly_section only for trees it
11411 can handle (since it returns true for everything it doesn't
11412 understand). */
11413 if (TREE_CODE (exp) == STRING_CST
11414 || TREE_CODE (exp) == CONSTRUCTOR
11415 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11416 return decl_readonly_section (exp, 0);
11417 else
11418 return false;
11419 }
11420
11421 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11422 to the call, and TYPE is its return type.
11423
11424 Return NULL_TREE if no simplification was possible, otherwise return the
11425 simplified form of the call as a tree.
11426
11427 The simplified form may be a constant or other expression which
11428 computes the same value, but in a more efficient manner (including
11429 calls to other builtin functions).
11430
11431 The call may contain arguments which need to be evaluated, but
11432 which are not useful to determine the result of the call. In
11433 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11434 COMPOUND_EXPR will be an argument which must be evaluated.
11435 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11436 COMPOUND_EXPR in the chain will contain the tree for the simplified
11437 form of the builtin function call. */
11438
11439 static tree
11440 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11441 {
11442 if (!validate_arg (s1, POINTER_TYPE)
11443 || !validate_arg (s2, POINTER_TYPE))
11444 return NULL_TREE;
11445 else
11446 {
11447 tree fn;
11448 const char *p1, *p2;
11449
11450 p2 = c_getstr (s2);
11451 if (p2 == NULL)
11452 return NULL_TREE;
11453
11454 p1 = c_getstr (s1);
11455 if (p1 != NULL)
11456 {
11457 const char *r = strstr (p1, p2);
11458 tree tem;
11459
11460 if (r == NULL)
11461 return build_int_cst (TREE_TYPE (s1), 0);
11462
11463 /* Return an offset into the constant string argument. */
11464 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11465 return fold_convert_loc (loc, type, tem);
11466 }
11467
11468 /* The argument is const char *, and the result is char *, so we need
11469 a type conversion here to avoid a warning. */
11470 if (p2[0] == '\0')
11471 return fold_convert_loc (loc, type, s1);
11472
11473 if (p2[1] != '\0')
11474 return NULL_TREE;
11475
11476 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11477 if (!fn)
11478 return NULL_TREE;
11479
11480 /* New argument list transforming strstr(s1, s2) to
11481 strchr(s1, s2[0]). */
11482 return build_call_expr_loc (loc, fn, 2, s1,
11483 build_int_cst (integer_type_node, p2[0]));
11484 }
11485 }
11486
11487 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11488 the call, and TYPE is its return type.
11489
11490 Return NULL_TREE if no simplification was possible, otherwise return the
11491 simplified form of the call as a tree.
11492
11493 The simplified form may be a constant or other expression which
11494 computes the same value, but in a more efficient manner (including
11495 calls to other builtin functions).
11496
11497 The call may contain arguments which need to be evaluated, but
11498 which are not useful to determine the result of the call. In
11499 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11500 COMPOUND_EXPR will be an argument which must be evaluated.
11501 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11502 COMPOUND_EXPR in the chain will contain the tree for the simplified
11503 form of the builtin function call. */
11504
11505 static tree
11506 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11507 {
11508 if (!validate_arg (s1, POINTER_TYPE)
11509 || !validate_arg (s2, INTEGER_TYPE))
11510 return NULL_TREE;
11511 else
11512 {
11513 const char *p1;
11514
11515 if (TREE_CODE (s2) != INTEGER_CST)
11516 return NULL_TREE;
11517
11518 p1 = c_getstr (s1);
11519 if (p1 != NULL)
11520 {
11521 char c;
11522 const char *r;
11523 tree tem;
11524
11525 if (target_char_cast (s2, &c))
11526 return NULL_TREE;
11527
11528 r = strchr (p1, c);
11529
11530 if (r == NULL)
11531 return build_int_cst (TREE_TYPE (s1), 0);
11532
11533 /* Return an offset into the constant string argument. */
11534 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11535 return fold_convert_loc (loc, type, tem);
11536 }
11537 return NULL_TREE;
11538 }
11539 }
11540
11541 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11542 the call, and TYPE is its return type.
11543
11544 Return NULL_TREE if no simplification was possible, otherwise return the
11545 simplified form of the call as a tree.
11546
11547 The simplified form may be a constant or other expression which
11548 computes the same value, but in a more efficient manner (including
11549 calls to other builtin functions).
11550
11551 The call may contain arguments which need to be evaluated, but
11552 which are not useful to determine the result of the call. In
11553 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11554 COMPOUND_EXPR will be an argument which must be evaluated.
11555 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11556 COMPOUND_EXPR in the chain will contain the tree for the simplified
11557 form of the builtin function call. */
11558
11559 static tree
11560 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11561 {
11562 if (!validate_arg (s1, POINTER_TYPE)
11563 || !validate_arg (s2, INTEGER_TYPE))
11564 return NULL_TREE;
11565 else
11566 {
11567 tree fn;
11568 const char *p1;
11569
11570 if (TREE_CODE (s2) != INTEGER_CST)
11571 return NULL_TREE;
11572
11573 p1 = c_getstr (s1);
11574 if (p1 != NULL)
11575 {
11576 char c;
11577 const char *r;
11578 tree tem;
11579
11580 if (target_char_cast (s2, &c))
11581 return NULL_TREE;
11582
11583 r = strrchr (p1, c);
11584
11585 if (r == NULL)
11586 return build_int_cst (TREE_TYPE (s1), 0);
11587
11588 /* Return an offset into the constant string argument. */
11589 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11590 return fold_convert_loc (loc, type, tem);
11591 }
11592
11593 if (! integer_zerop (s2))
11594 return NULL_TREE;
11595
11596 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11597 if (!fn)
11598 return NULL_TREE;
11599
11600 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11601 return build_call_expr_loc (loc, fn, 2, s1, s2);
11602 }
11603 }
11604
11605 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11606 to the call, and TYPE is its return type.
11607
11608 Return NULL_TREE if no simplification was possible, otherwise return the
11609 simplified form of the call as a tree.
11610
11611 The simplified form may be a constant or other expression which
11612 computes the same value, but in a more efficient manner (including
11613 calls to other builtin functions).
11614
11615 The call may contain arguments which need to be evaluated, but
11616 which are not useful to determine the result of the call. In
11617 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11618 COMPOUND_EXPR will be an argument which must be evaluated.
11619 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11620 COMPOUND_EXPR in the chain will contain the tree for the simplified
11621 form of the builtin function call. */
11622
11623 static tree
11624 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11625 {
11626 if (!validate_arg (s1, POINTER_TYPE)
11627 || !validate_arg (s2, POINTER_TYPE))
11628 return NULL_TREE;
11629 else
11630 {
11631 tree fn;
11632 const char *p1, *p2;
11633
11634 p2 = c_getstr (s2);
11635 if (p2 == NULL)
11636 return NULL_TREE;
11637
11638 p1 = c_getstr (s1);
11639 if (p1 != NULL)
11640 {
11641 const char *r = strpbrk (p1, p2);
11642 tree tem;
11643
11644 if (r == NULL)
11645 return build_int_cst (TREE_TYPE (s1), 0);
11646
11647 /* Return an offset into the constant string argument. */
11648 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11649 return fold_convert_loc (loc, type, tem);
11650 }
11651
11652 if (p2[0] == '\0')
11653 /* strpbrk(x, "") == NULL.
11654 Evaluate and ignore s1 in case it had side-effects. */
11655 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11656
11657 if (p2[1] != '\0')
11658 return NULL_TREE; /* Really call strpbrk. */
11659
11660 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11661 if (!fn)
11662 return NULL_TREE;
11663
11664 /* New argument list transforming strpbrk(s1, s2) to
11665 strchr(s1, s2[0]). */
11666 return build_call_expr_loc (loc, fn, 2, s1,
11667 build_int_cst (integer_type_node, p2[0]));
11668 }
11669 }
11670
11671 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11672 to the call.
11673
11674 Return NULL_TREE if no simplification was possible, otherwise return the
11675 simplified form of the call as a tree.
11676
11677 The simplified form may be a constant or other expression which
11678 computes the same value, but in a more efficient manner (including
11679 calls to other builtin functions).
11680
11681 The call may contain arguments which need to be evaluated, but
11682 which are not useful to determine the result of the call. In
11683 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11684 COMPOUND_EXPR will be an argument which must be evaluated.
11685 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11686 COMPOUND_EXPR in the chain will contain the tree for the simplified
11687 form of the builtin function call. */
11688
11689 static tree
11690 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11691 {
11692 if (!validate_arg (dst, POINTER_TYPE)
11693 || !validate_arg (src, POINTER_TYPE))
11694 return NULL_TREE;
11695 else
11696 {
11697 const char *p = c_getstr (src);
11698
11699 /* If the string length is zero, return the dst parameter. */
11700 if (p && *p == '\0')
11701 return dst;
11702
11703 if (optimize_insn_for_speed_p ())
11704 {
11705 /* See if we can store by pieces into (dst + strlen(dst)). */
11706 tree newdst, call;
11707 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11708 tree strcpy_fn = builtin_decl_implicit (BUILT_IN_STRCPY);
11709
11710 if (!strlen_fn || !strcpy_fn)
11711 return NULL_TREE;
11712
11713 /* If we don't have a movstr we don't want to emit an strcpy
11714 call. We have to do that if the length of the source string
11715 isn't computable (in that case we can use memcpy probably
11716 later expanding to a sequence of mov instructions). If we
11717 have movstr instructions we can emit strcpy calls. */
11718 if (!HAVE_movstr)
11719 {
11720 tree len = c_strlen (src, 1);
11721 if (! len || TREE_SIDE_EFFECTS (len))
11722 return NULL_TREE;
11723 }
11724
11725 /* Stabilize the argument list. */
11726 dst = builtin_save_expr (dst);
11727
11728 /* Create strlen (dst). */
11729 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11730 /* Create (dst p+ strlen (dst)). */
11731
11732 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11733 newdst = builtin_save_expr (newdst);
11734
11735 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11736 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11737 }
11738 return NULL_TREE;
11739 }
11740 }
11741
11742 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11743 arguments to the call.
11744
11745 Return NULL_TREE if no simplification was possible, otherwise return the
11746 simplified form of the call as a tree.
11747
11748 The simplified form may be a constant or other expression which
11749 computes the same value, but in a more efficient manner (including
11750 calls to other builtin functions).
11751
11752 The call may contain arguments which need to be evaluated, but
11753 which are not useful to determine the result of the call. In
11754 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11755 COMPOUND_EXPR will be an argument which must be evaluated.
11756 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11757 COMPOUND_EXPR in the chain will contain the tree for the simplified
11758 form of the builtin function call. */
11759
11760 static tree
11761 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11762 {
11763 if (!validate_arg (dst, POINTER_TYPE)
11764 || !validate_arg (src, POINTER_TYPE)
11765 || !validate_arg (len, INTEGER_TYPE))
11766 return NULL_TREE;
11767 else
11768 {
11769 const char *p = c_getstr (src);
11770
11771 /* If the requested length is zero, or the src parameter string
11772 length is zero, return the dst parameter. */
11773 if (integer_zerop (len) || (p && *p == '\0'))
11774 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11775
11776 /* If the requested len is greater than or equal to the string
11777 length, call strcat. */
11778 if (TREE_CODE (len) == INTEGER_CST && p
11779 && compare_tree_int (len, strlen (p)) >= 0)
11780 {
11781 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11782
11783 /* If the replacement _DECL isn't initialized, don't do the
11784 transformation. */
11785 if (!fn)
11786 return NULL_TREE;
11787
11788 return build_call_expr_loc (loc, fn, 2, dst, src);
11789 }
11790 return NULL_TREE;
11791 }
11792 }
11793
11794 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11795 to the call.
11796
11797 Return NULL_TREE if no simplification was possible, otherwise return the
11798 simplified form of the call as a tree.
11799
11800 The simplified form may be a constant or other expression which
11801 computes the same value, but in a more efficient manner (including
11802 calls to other builtin functions).
11803
11804 The call may contain arguments which need to be evaluated, but
11805 which are not useful to determine the result of the call. In
11806 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11807 COMPOUND_EXPR will be an argument which must be evaluated.
11808 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11809 COMPOUND_EXPR in the chain will contain the tree for the simplified
11810 form of the builtin function call. */
11811
11812 static tree
11813 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11814 {
11815 if (!validate_arg (s1, POINTER_TYPE)
11816 || !validate_arg (s2, POINTER_TYPE))
11817 return NULL_TREE;
11818 else
11819 {
11820 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11821
11822 /* If both arguments are constants, evaluate at compile-time. */
11823 if (p1 && p2)
11824 {
11825 const size_t r = strspn (p1, p2);
11826 return build_int_cst (size_type_node, r);
11827 }
11828
11829 /* If either argument is "", return NULL_TREE. */
11830 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11831 /* Evaluate and ignore both arguments in case either one has
11832 side-effects. */
11833 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11834 s1, s2);
11835 return NULL_TREE;
11836 }
11837 }
11838
11839 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11840 to the call.
11841
11842 Return NULL_TREE if no simplification was possible, otherwise return the
11843 simplified form of the call as a tree.
11844
11845 The simplified form may be a constant or other expression which
11846 computes the same value, but in a more efficient manner (including
11847 calls to other builtin functions).
11848
11849 The call may contain arguments which need to be evaluated, but
11850 which are not useful to determine the result of the call. In
11851 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11852 COMPOUND_EXPR will be an argument which must be evaluated.
11853 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11854 COMPOUND_EXPR in the chain will contain the tree for the simplified
11855 form of the builtin function call. */
11856
11857 static tree
11858 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11859 {
11860 if (!validate_arg (s1, POINTER_TYPE)
11861 || !validate_arg (s2, POINTER_TYPE))
11862 return NULL_TREE;
11863 else
11864 {
11865 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11866
11867 /* If both arguments are constants, evaluate at compile-time. */
11868 if (p1 && p2)
11869 {
11870 const size_t r = strcspn (p1, p2);
11871 return build_int_cst (size_type_node, r);
11872 }
11873
11874 /* If the first argument is "", return NULL_TREE. */
11875 if (p1 && *p1 == '\0')
11876 {
11877 /* Evaluate and ignore argument s2 in case it has
11878 side-effects. */
11879 return omit_one_operand_loc (loc, size_type_node,
11880 size_zero_node, s2);
11881 }
11882
11883 /* If the second argument is "", return __builtin_strlen(s1). */
11884 if (p2 && *p2 == '\0')
11885 {
11886 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11887
11888 /* If the replacement _DECL isn't initialized, don't do the
11889 transformation. */
11890 if (!fn)
11891 return NULL_TREE;
11892
11893 return build_call_expr_loc (loc, fn, 1, s1);
11894 }
11895 return NULL_TREE;
11896 }
11897 }
11898
11899 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11900 to the call. IGNORE is true if the value returned
11901 by the builtin will be ignored. UNLOCKED is true is true if this
11902 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11903 the known length of the string. Return NULL_TREE if no simplification
11904 was possible. */
11905
11906 tree
11907 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11908 bool ignore, bool unlocked, tree len)
11909 {
11910 /* If we're using an unlocked function, assume the other unlocked
11911 functions exist explicitly. */
11912 tree const fn_fputc = (unlocked
11913 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
11914 : builtin_decl_implicit (BUILT_IN_FPUTC));
11915 tree const fn_fwrite = (unlocked
11916 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
11917 : builtin_decl_implicit (BUILT_IN_FWRITE));
11918
11919 /* If the return value is used, don't do the transformation. */
11920 if (!ignore)
11921 return NULL_TREE;
11922
11923 /* Verify the arguments in the original call. */
11924 if (!validate_arg (arg0, POINTER_TYPE)
11925 || !validate_arg (arg1, POINTER_TYPE))
11926 return NULL_TREE;
11927
11928 if (! len)
11929 len = c_strlen (arg0, 0);
11930
11931 /* Get the length of the string passed to fputs. If the length
11932 can't be determined, punt. */
11933 if (!len
11934 || TREE_CODE (len) != INTEGER_CST)
11935 return NULL_TREE;
11936
11937 switch (compare_tree_int (len, 1))
11938 {
11939 case -1: /* length is 0, delete the call entirely . */
11940 return omit_one_operand_loc (loc, integer_type_node,
11941 integer_zero_node, arg1);;
11942
11943 case 0: /* length is 1, call fputc. */
11944 {
11945 const char *p = c_getstr (arg0);
11946
11947 if (p != NULL)
11948 {
11949 if (fn_fputc)
11950 return build_call_expr_loc (loc, fn_fputc, 2,
11951 build_int_cst
11952 (integer_type_node, p[0]), arg1);
11953 else
11954 return NULL_TREE;
11955 }
11956 }
11957 /* FALLTHROUGH */
11958 case 1: /* length is greater than 1, call fwrite. */
11959 {
11960 /* If optimizing for size keep fputs. */
11961 if (optimize_function_for_size_p (cfun))
11962 return NULL_TREE;
11963 /* New argument list transforming fputs(string, stream) to
11964 fwrite(string, 1, len, stream). */
11965 if (fn_fwrite)
11966 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11967 size_one_node, len, arg1);
11968 else
11969 return NULL_TREE;
11970 }
11971 default:
11972 gcc_unreachable ();
11973 }
11974 return NULL_TREE;
11975 }
11976
11977 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11978 produced. False otherwise. This is done so that we don't output the error
11979 or warning twice or three times. */
11980
11981 bool
11982 fold_builtin_next_arg (tree exp, bool va_start_p)
11983 {
11984 tree fntype = TREE_TYPE (current_function_decl);
11985 int nargs = call_expr_nargs (exp);
11986 tree arg;
11987 /* There is good chance the current input_location points inside the
11988 definition of the va_start macro (perhaps on the token for
11989 builtin) in a system header, so warnings will not be emitted.
11990 Use the location in real source code. */
11991 source_location current_location =
11992 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11993 NULL);
11994
11995 if (!stdarg_p (fntype))
11996 {
11997 error ("%<va_start%> used in function with fixed args");
11998 return true;
11999 }
12000
12001 if (va_start_p)
12002 {
12003 if (va_start_p && (nargs != 2))
12004 {
12005 error ("wrong number of arguments to function %<va_start%>");
12006 return true;
12007 }
12008 arg = CALL_EXPR_ARG (exp, 1);
12009 }
12010 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12011 when we checked the arguments and if needed issued a warning. */
12012 else
12013 {
12014 if (nargs == 0)
12015 {
12016 /* Evidently an out of date version of <stdarg.h>; can't validate
12017 va_start's second argument, but can still work as intended. */
12018 warning_at (current_location,
12019 OPT_Wvarargs,
12020 "%<__builtin_next_arg%> called without an argument");
12021 return true;
12022 }
12023 else if (nargs > 1)
12024 {
12025 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12026 return true;
12027 }
12028 arg = CALL_EXPR_ARG (exp, 0);
12029 }
12030
12031 if (TREE_CODE (arg) == SSA_NAME)
12032 arg = SSA_NAME_VAR (arg);
12033
12034 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12035 or __builtin_next_arg (0) the first time we see it, after checking
12036 the arguments and if needed issuing a warning. */
12037 if (!integer_zerop (arg))
12038 {
12039 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12040
12041 /* Strip off all nops for the sake of the comparison. This
12042 is not quite the same as STRIP_NOPS. It does more.
12043 We must also strip off INDIRECT_EXPR for C++ reference
12044 parameters. */
12045 while (CONVERT_EXPR_P (arg)
12046 || TREE_CODE (arg) == INDIRECT_REF)
12047 arg = TREE_OPERAND (arg, 0);
12048 if (arg != last_parm)
12049 {
12050 /* FIXME: Sometimes with the tree optimizers we can get the
12051 not the last argument even though the user used the last
12052 argument. We just warn and set the arg to be the last
12053 argument so that we will get wrong-code because of
12054 it. */
12055 warning_at (current_location,
12056 OPT_Wvarargs,
12057 "second parameter of %<va_start%> not last named argument");
12058 }
12059
12060 /* Undefined by C99 7.15.1.4p4 (va_start):
12061 "If the parameter parmN is declared with the register storage
12062 class, with a function or array type, or with a type that is
12063 not compatible with the type that results after application of
12064 the default argument promotions, the behavior is undefined."
12065 */
12066 else if (DECL_REGISTER (arg))
12067 {
12068 warning_at (current_location,
12069 OPT_Wvarargs,
12070 "undefined behaviour when second parameter of "
12071 "%<va_start%> is declared with %<register%> storage");
12072 }
12073
12074 /* We want to verify the second parameter just once before the tree
12075 optimizers are run and then avoid keeping it in the tree,
12076 as otherwise we could warn even for correct code like:
12077 void foo (int i, ...)
12078 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12079 if (va_start_p)
12080 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12081 else
12082 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12083 }
12084 return false;
12085 }
12086
12087
12088 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12089 ORIG may be null if this is a 2-argument call. We don't attempt to
12090 simplify calls with more than 3 arguments.
12091
12092 Return NULL_TREE if no simplification was possible, otherwise return the
12093 simplified form of the call as a tree. If IGNORED is true, it means that
12094 the caller does not use the returned value of the function. */
12095
12096 static tree
12097 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12098 tree orig, int ignored)
12099 {
12100 tree call, retval;
12101 const char *fmt_str = NULL;
12102
12103 /* Verify the required arguments in the original call. We deal with two
12104 types of sprintf() calls: 'sprintf (str, fmt)' and
12105 'sprintf (dest, "%s", orig)'. */
12106 if (!validate_arg (dest, POINTER_TYPE)
12107 || !validate_arg (fmt, POINTER_TYPE))
12108 return NULL_TREE;
12109 if (orig && !validate_arg (orig, POINTER_TYPE))
12110 return NULL_TREE;
12111
12112 /* Check whether the format is a literal string constant. */
12113 fmt_str = c_getstr (fmt);
12114 if (fmt_str == NULL)
12115 return NULL_TREE;
12116
12117 call = NULL_TREE;
12118 retval = NULL_TREE;
12119
12120 if (!init_target_chars ())
12121 return NULL_TREE;
12122
12123 /* If the format doesn't contain % args or %%, use strcpy. */
12124 if (strchr (fmt_str, target_percent) == NULL)
12125 {
12126 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12127
12128 if (!fn)
12129 return NULL_TREE;
12130
12131 /* Don't optimize sprintf (buf, "abc", ptr++). */
12132 if (orig)
12133 return NULL_TREE;
12134
12135 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12136 'format' is known to contain no % formats. */
12137 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12138 if (!ignored)
12139 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12140 }
12141
12142 /* If the format is "%s", use strcpy if the result isn't used. */
12143 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12144 {
12145 tree fn;
12146 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12147
12148 if (!fn)
12149 return NULL_TREE;
12150
12151 /* Don't crash on sprintf (str1, "%s"). */
12152 if (!orig)
12153 return NULL_TREE;
12154
12155 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12156 if (!ignored)
12157 {
12158 retval = c_strlen (orig, 1);
12159 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12160 return NULL_TREE;
12161 }
12162 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12163 }
12164
12165 if (call && retval)
12166 {
12167 retval = fold_convert_loc
12168 (loc, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF))),
12169 retval);
12170 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12171 }
12172 else
12173 return call;
12174 }
12175
12176 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12177 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12178 attempt to simplify calls with more than 4 arguments.
12179
12180 Return NULL_TREE if no simplification was possible, otherwise return the
12181 simplified form of the call as a tree. If IGNORED is true, it means that
12182 the caller does not use the returned value of the function. */
12183
12184 static tree
12185 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
12186 tree orig, int ignored)
12187 {
12188 tree call, retval;
12189 const char *fmt_str = NULL;
12190 unsigned HOST_WIDE_INT destlen;
12191
12192 /* Verify the required arguments in the original call. We deal with two
12193 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12194 'snprintf (dest, cst, "%s", orig)'. */
12195 if (!validate_arg (dest, POINTER_TYPE)
12196 || !validate_arg (destsize, INTEGER_TYPE)
12197 || !validate_arg (fmt, POINTER_TYPE))
12198 return NULL_TREE;
12199 if (orig && !validate_arg (orig, POINTER_TYPE))
12200 return NULL_TREE;
12201
12202 if (!host_integerp (destsize, 1))
12203 return NULL_TREE;
12204
12205 /* Check whether the format is a literal string constant. */
12206 fmt_str = c_getstr (fmt);
12207 if (fmt_str == NULL)
12208 return NULL_TREE;
12209
12210 call = NULL_TREE;
12211 retval = NULL_TREE;
12212
12213 if (!init_target_chars ())
12214 return NULL_TREE;
12215
12216 destlen = tree_low_cst (destsize, 1);
12217
12218 /* If the format doesn't contain % args or %%, use strcpy. */
12219 if (strchr (fmt_str, target_percent) == NULL)
12220 {
12221 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12222 size_t len = strlen (fmt_str);
12223
12224 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12225 if (orig)
12226 return NULL_TREE;
12227
12228 /* We could expand this as
12229 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12230 or to
12231 memcpy (str, fmt_with_nul_at_cstm1, cst);
12232 but in the former case that might increase code size
12233 and in the latter case grow .rodata section too much.
12234 So punt for now. */
12235 if (len >= destlen)
12236 return NULL_TREE;
12237
12238 if (!fn)
12239 return NULL_TREE;
12240
12241 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12242 'format' is known to contain no % formats and
12243 strlen (fmt) < cst. */
12244 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12245
12246 if (!ignored)
12247 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12248 }
12249
12250 /* If the format is "%s", use strcpy if the result isn't used. */
12251 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12252 {
12253 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12254 unsigned HOST_WIDE_INT origlen;
12255
12256 /* Don't crash on snprintf (str1, cst, "%s"). */
12257 if (!orig)
12258 return NULL_TREE;
12259
12260 retval = c_strlen (orig, 1);
12261 if (!retval || !host_integerp (retval, 1))
12262 return NULL_TREE;
12263
12264 origlen = tree_low_cst (retval, 1);
12265 /* We could expand this as
12266 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12267 or to
12268 memcpy (str1, str2_with_nul_at_cstm1, cst);
12269 but in the former case that might increase code size
12270 and in the latter case grow .rodata section too much.
12271 So punt for now. */
12272 if (origlen >= destlen)
12273 return NULL_TREE;
12274
12275 /* Convert snprintf (str1, cst, "%s", str2) into
12276 strcpy (str1, str2) if strlen (str2) < cst. */
12277 if (!fn)
12278 return NULL_TREE;
12279
12280 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12281
12282 if (ignored)
12283 retval = NULL_TREE;
12284 }
12285
12286 if (call && retval)
12287 {
12288 tree fn = builtin_decl_explicit (BUILT_IN_SNPRINTF);
12289 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
12290 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12291 }
12292 else
12293 return call;
12294 }
12295
12296 /* Expand a call EXP to __builtin_object_size. */
12297
12298 rtx
12299 expand_builtin_object_size (tree exp)
12300 {
12301 tree ost;
12302 int object_size_type;
12303 tree fndecl = get_callee_fndecl (exp);
12304
12305 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12306 {
12307 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12308 exp, fndecl);
12309 expand_builtin_trap ();
12310 return const0_rtx;
12311 }
12312
12313 ost = CALL_EXPR_ARG (exp, 1);
12314 STRIP_NOPS (ost);
12315
12316 if (TREE_CODE (ost) != INTEGER_CST
12317 || tree_int_cst_sgn (ost) < 0
12318 || compare_tree_int (ost, 3) > 0)
12319 {
12320 error ("%Klast argument of %D is not integer constant between 0 and 3",
12321 exp, fndecl);
12322 expand_builtin_trap ();
12323 return const0_rtx;
12324 }
12325
12326 object_size_type = tree_low_cst (ost, 0);
12327
12328 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12329 }
12330
12331 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12332 FCODE is the BUILT_IN_* to use.
12333 Return NULL_RTX if we failed; the caller should emit a normal call,
12334 otherwise try to get the result in TARGET, if convenient (and in
12335 mode MODE if that's convenient). */
12336
12337 static rtx
12338 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12339 enum built_in_function fcode)
12340 {
12341 tree dest, src, len, size;
12342
12343 if (!validate_arglist (exp,
12344 POINTER_TYPE,
12345 fcode == BUILT_IN_MEMSET_CHK
12346 ? INTEGER_TYPE : POINTER_TYPE,
12347 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12348 return NULL_RTX;
12349
12350 dest = CALL_EXPR_ARG (exp, 0);
12351 src = CALL_EXPR_ARG (exp, 1);
12352 len = CALL_EXPR_ARG (exp, 2);
12353 size = CALL_EXPR_ARG (exp, 3);
12354
12355 if (! host_integerp (size, 1))
12356 return NULL_RTX;
12357
12358 if (host_integerp (len, 1) || integer_all_onesp (size))
12359 {
12360 tree fn;
12361
12362 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12363 {
12364 warning_at (tree_nonartificial_location (exp),
12365 0, "%Kcall to %D will always overflow destination buffer",
12366 exp, get_callee_fndecl (exp));
12367 return NULL_RTX;
12368 }
12369
12370 fn = NULL_TREE;
12371 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12372 mem{cpy,pcpy,move,set} is available. */
12373 switch (fcode)
12374 {
12375 case BUILT_IN_MEMCPY_CHK:
12376 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12377 break;
12378 case BUILT_IN_MEMPCPY_CHK:
12379 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12380 break;
12381 case BUILT_IN_MEMMOVE_CHK:
12382 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12383 break;
12384 case BUILT_IN_MEMSET_CHK:
12385 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12386 break;
12387 default:
12388 break;
12389 }
12390
12391 if (! fn)
12392 return NULL_RTX;
12393
12394 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12395 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12396 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12397 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12398 }
12399 else if (fcode == BUILT_IN_MEMSET_CHK)
12400 return NULL_RTX;
12401 else
12402 {
12403 unsigned int dest_align = get_pointer_alignment (dest);
12404
12405 /* If DEST is not a pointer type, call the normal function. */
12406 if (dest_align == 0)
12407 return NULL_RTX;
12408
12409 /* If SRC and DEST are the same (and not volatile), do nothing. */
12410 if (operand_equal_p (src, dest, 0))
12411 {
12412 tree expr;
12413
12414 if (fcode != BUILT_IN_MEMPCPY_CHK)
12415 {
12416 /* Evaluate and ignore LEN in case it has side-effects. */
12417 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12418 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12419 }
12420
12421 expr = fold_build_pointer_plus (dest, len);
12422 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12423 }
12424
12425 /* __memmove_chk special case. */
12426 if (fcode == BUILT_IN_MEMMOVE_CHK)
12427 {
12428 unsigned int src_align = get_pointer_alignment (src);
12429
12430 if (src_align == 0)
12431 return NULL_RTX;
12432
12433 /* If src is categorized for a readonly section we can use
12434 normal __memcpy_chk. */
12435 if (readonly_data_expr (src))
12436 {
12437 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12438 if (!fn)
12439 return NULL_RTX;
12440 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12441 dest, src, len, size);
12442 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12443 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12444 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12445 }
12446 }
12447 return NULL_RTX;
12448 }
12449 }
12450
12451 /* Emit warning if a buffer overflow is detected at compile time. */
12452
12453 static void
12454 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12455 {
12456 int is_strlen = 0;
12457 tree len, size;
12458 location_t loc = tree_nonartificial_location (exp);
12459
12460 switch (fcode)
12461 {
12462 case BUILT_IN_STRCPY_CHK:
12463 case BUILT_IN_STPCPY_CHK:
12464 /* For __strcat_chk the warning will be emitted only if overflowing
12465 by at least strlen (dest) + 1 bytes. */
12466 case BUILT_IN_STRCAT_CHK:
12467 len = CALL_EXPR_ARG (exp, 1);
12468 size = CALL_EXPR_ARG (exp, 2);
12469 is_strlen = 1;
12470 break;
12471 case BUILT_IN_STRNCAT_CHK:
12472 case BUILT_IN_STRNCPY_CHK:
12473 case BUILT_IN_STPNCPY_CHK:
12474 len = CALL_EXPR_ARG (exp, 2);
12475 size = CALL_EXPR_ARG (exp, 3);
12476 break;
12477 case BUILT_IN_SNPRINTF_CHK:
12478 case BUILT_IN_VSNPRINTF_CHK:
12479 len = CALL_EXPR_ARG (exp, 1);
12480 size = CALL_EXPR_ARG (exp, 3);
12481 break;
12482 default:
12483 gcc_unreachable ();
12484 }
12485
12486 if (!len || !size)
12487 return;
12488
12489 if (! host_integerp (size, 1) || integer_all_onesp (size))
12490 return;
12491
12492 if (is_strlen)
12493 {
12494 len = c_strlen (len, 1);
12495 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12496 return;
12497 }
12498 else if (fcode == BUILT_IN_STRNCAT_CHK)
12499 {
12500 tree src = CALL_EXPR_ARG (exp, 1);
12501 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12502 return;
12503 src = c_strlen (src, 1);
12504 if (! src || ! host_integerp (src, 1))
12505 {
12506 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12507 exp, get_callee_fndecl (exp));
12508 return;
12509 }
12510 else if (tree_int_cst_lt (src, size))
12511 return;
12512 }
12513 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12514 return;
12515
12516 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12517 exp, get_callee_fndecl (exp));
12518 }
12519
12520 /* Emit warning if a buffer overflow is detected at compile time
12521 in __sprintf_chk/__vsprintf_chk calls. */
12522
12523 static void
12524 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12525 {
12526 tree size, len, fmt;
12527 const char *fmt_str;
12528 int nargs = call_expr_nargs (exp);
12529
12530 /* Verify the required arguments in the original call. */
12531
12532 if (nargs < 4)
12533 return;
12534 size = CALL_EXPR_ARG (exp, 2);
12535 fmt = CALL_EXPR_ARG (exp, 3);
12536
12537 if (! host_integerp (size, 1) || integer_all_onesp (size))
12538 return;
12539
12540 /* Check whether the format is a literal string constant. */
12541 fmt_str = c_getstr (fmt);
12542 if (fmt_str == NULL)
12543 return;
12544
12545 if (!init_target_chars ())
12546 return;
12547
12548 /* If the format doesn't contain % args or %%, we know its size. */
12549 if (strchr (fmt_str, target_percent) == 0)
12550 len = build_int_cstu (size_type_node, strlen (fmt_str));
12551 /* If the format is "%s" and first ... argument is a string literal,
12552 we know it too. */
12553 else if (fcode == BUILT_IN_SPRINTF_CHK
12554 && strcmp (fmt_str, target_percent_s) == 0)
12555 {
12556 tree arg;
12557
12558 if (nargs < 5)
12559 return;
12560 arg = CALL_EXPR_ARG (exp, 4);
12561 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12562 return;
12563
12564 len = c_strlen (arg, 1);
12565 if (!len || ! host_integerp (len, 1))
12566 return;
12567 }
12568 else
12569 return;
12570
12571 if (! tree_int_cst_lt (len, size))
12572 warning_at (tree_nonartificial_location (exp),
12573 0, "%Kcall to %D will always overflow destination buffer",
12574 exp, get_callee_fndecl (exp));
12575 }
12576
12577 /* Emit warning if a free is called with address of a variable. */
12578
12579 static void
12580 maybe_emit_free_warning (tree exp)
12581 {
12582 tree arg = CALL_EXPR_ARG (exp, 0);
12583
12584 STRIP_NOPS (arg);
12585 if (TREE_CODE (arg) != ADDR_EXPR)
12586 return;
12587
12588 arg = get_base_address (TREE_OPERAND (arg, 0));
12589 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12590 return;
12591
12592 if (SSA_VAR_P (arg))
12593 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12594 "%Kattempt to free a non-heap object %qD", exp, arg);
12595 else
12596 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12597 "%Kattempt to free a non-heap object", exp);
12598 }
12599
12600 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12601 if possible. */
12602
12603 tree
12604 fold_builtin_object_size (tree ptr, tree ost)
12605 {
12606 unsigned HOST_WIDE_INT bytes;
12607 int object_size_type;
12608
12609 if (!validate_arg (ptr, POINTER_TYPE)
12610 || !validate_arg (ost, INTEGER_TYPE))
12611 return NULL_TREE;
12612
12613 STRIP_NOPS (ost);
12614
12615 if (TREE_CODE (ost) != INTEGER_CST
12616 || tree_int_cst_sgn (ost) < 0
12617 || compare_tree_int (ost, 3) > 0)
12618 return NULL_TREE;
12619
12620 object_size_type = tree_low_cst (ost, 0);
12621
12622 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12623 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12624 and (size_t) 0 for types 2 and 3. */
12625 if (TREE_SIDE_EFFECTS (ptr))
12626 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12627
12628 if (TREE_CODE (ptr) == ADDR_EXPR)
12629 {
12630 bytes = compute_builtin_object_size (ptr, object_size_type);
12631 if (double_int_fits_to_tree_p (size_type_node,
12632 double_int::from_uhwi (bytes)))
12633 return build_int_cstu (size_type_node, bytes);
12634 }
12635 else if (TREE_CODE (ptr) == SSA_NAME)
12636 {
12637 /* If object size is not known yet, delay folding until
12638 later. Maybe subsequent passes will help determining
12639 it. */
12640 bytes = compute_builtin_object_size (ptr, object_size_type);
12641 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12642 && double_int_fits_to_tree_p (size_type_node,
12643 double_int::from_uhwi (bytes)))
12644 return build_int_cstu (size_type_node, bytes);
12645 }
12646
12647 return NULL_TREE;
12648 }
12649
12650 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12651 DEST, SRC, LEN, and SIZE are the arguments to the call.
12652 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12653 code of the builtin. If MAXLEN is not NULL, it is maximum length
12654 passed as third argument. */
12655
12656 tree
12657 fold_builtin_memory_chk (location_t loc, tree fndecl,
12658 tree dest, tree src, tree len, tree size,
12659 tree maxlen, bool ignore,
12660 enum built_in_function fcode)
12661 {
12662 tree fn;
12663
12664 if (!validate_arg (dest, POINTER_TYPE)
12665 || !validate_arg (src,
12666 (fcode == BUILT_IN_MEMSET_CHK
12667 ? INTEGER_TYPE : POINTER_TYPE))
12668 || !validate_arg (len, INTEGER_TYPE)
12669 || !validate_arg (size, INTEGER_TYPE))
12670 return NULL_TREE;
12671
12672 /* If SRC and DEST are the same (and not volatile), return DEST
12673 (resp. DEST+LEN for __mempcpy_chk). */
12674 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12675 {
12676 if (fcode != BUILT_IN_MEMPCPY_CHK)
12677 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12678 dest, len);
12679 else
12680 {
12681 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12682 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12683 }
12684 }
12685
12686 if (! host_integerp (size, 1))
12687 return NULL_TREE;
12688
12689 if (! integer_all_onesp (size))
12690 {
12691 if (! host_integerp (len, 1))
12692 {
12693 /* If LEN is not constant, try MAXLEN too.
12694 For MAXLEN only allow optimizing into non-_ocs function
12695 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12696 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12697 {
12698 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12699 {
12700 /* (void) __mempcpy_chk () can be optimized into
12701 (void) __memcpy_chk (). */
12702 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12703 if (!fn)
12704 return NULL_TREE;
12705
12706 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12707 }
12708 return NULL_TREE;
12709 }
12710 }
12711 else
12712 maxlen = len;
12713
12714 if (tree_int_cst_lt (size, maxlen))
12715 return NULL_TREE;
12716 }
12717
12718 fn = NULL_TREE;
12719 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12720 mem{cpy,pcpy,move,set} is available. */
12721 switch (fcode)
12722 {
12723 case BUILT_IN_MEMCPY_CHK:
12724 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12725 break;
12726 case BUILT_IN_MEMPCPY_CHK:
12727 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12728 break;
12729 case BUILT_IN_MEMMOVE_CHK:
12730 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12731 break;
12732 case BUILT_IN_MEMSET_CHK:
12733 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12734 break;
12735 default:
12736 break;
12737 }
12738
12739 if (!fn)
12740 return NULL_TREE;
12741
12742 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12743 }
12744
12745 /* Fold a call to the __st[rp]cpy_chk builtin.
12746 DEST, SRC, and SIZE are the arguments to the call.
12747 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12748 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12749 strings passed as second argument. */
12750
12751 tree
12752 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12753 tree src, tree size,
12754 tree maxlen, bool ignore,
12755 enum built_in_function fcode)
12756 {
12757 tree len, fn;
12758
12759 if (!validate_arg (dest, POINTER_TYPE)
12760 || !validate_arg (src, POINTER_TYPE)
12761 || !validate_arg (size, INTEGER_TYPE))
12762 return NULL_TREE;
12763
12764 /* If SRC and DEST are the same (and not volatile), return DEST. */
12765 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12766 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12767
12768 if (! host_integerp (size, 1))
12769 return NULL_TREE;
12770
12771 if (! integer_all_onesp (size))
12772 {
12773 len = c_strlen (src, 1);
12774 if (! len || ! host_integerp (len, 1))
12775 {
12776 /* If LEN is not constant, try MAXLEN too.
12777 For MAXLEN only allow optimizing into non-_ocs function
12778 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12779 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12780 {
12781 if (fcode == BUILT_IN_STPCPY_CHK)
12782 {
12783 if (! ignore)
12784 return NULL_TREE;
12785
12786 /* If return value of __stpcpy_chk is ignored,
12787 optimize into __strcpy_chk. */
12788 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
12789 if (!fn)
12790 return NULL_TREE;
12791
12792 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12793 }
12794
12795 if (! len || TREE_SIDE_EFFECTS (len))
12796 return NULL_TREE;
12797
12798 /* If c_strlen returned something, but not a constant,
12799 transform __strcpy_chk into __memcpy_chk. */
12800 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12801 if (!fn)
12802 return NULL_TREE;
12803
12804 len = fold_convert_loc (loc, size_type_node, len);
12805 len = size_binop_loc (loc, PLUS_EXPR, len,
12806 build_int_cst (size_type_node, 1));
12807 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12808 build_call_expr_loc (loc, fn, 4,
12809 dest, src, len, size));
12810 }
12811 }
12812 else
12813 maxlen = len;
12814
12815 if (! tree_int_cst_lt (maxlen, size))
12816 return NULL_TREE;
12817 }
12818
12819 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12820 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
12821 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
12822 if (!fn)
12823 return NULL_TREE;
12824
12825 return build_call_expr_loc (loc, fn, 2, dest, src);
12826 }
12827
12828 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
12829 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12830 length passed as third argument. IGNORE is true if return value can be
12831 ignored. FCODE is the BUILT_IN_* code of the builtin. */
12832
12833 tree
12834 fold_builtin_stxncpy_chk (location_t loc, tree dest, tree src,
12835 tree len, tree size, tree maxlen, bool ignore,
12836 enum built_in_function fcode)
12837 {
12838 tree fn;
12839
12840 if (!validate_arg (dest, POINTER_TYPE)
12841 || !validate_arg (src, POINTER_TYPE)
12842 || !validate_arg (len, INTEGER_TYPE)
12843 || !validate_arg (size, INTEGER_TYPE))
12844 return NULL_TREE;
12845
12846 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
12847 {
12848 /* If return value of __stpncpy_chk is ignored,
12849 optimize into __strncpy_chk. */
12850 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
12851 if (fn)
12852 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12853 }
12854
12855 if (! host_integerp (size, 1))
12856 return NULL_TREE;
12857
12858 if (! integer_all_onesp (size))
12859 {
12860 if (! host_integerp (len, 1))
12861 {
12862 /* If LEN is not constant, try MAXLEN too.
12863 For MAXLEN only allow optimizing into non-_ocs function
12864 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12865 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12866 return NULL_TREE;
12867 }
12868 else
12869 maxlen = len;
12870
12871 if (tree_int_cst_lt (size, maxlen))
12872 return NULL_TREE;
12873 }
12874
12875 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
12876 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
12877 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
12878 if (!fn)
12879 return NULL_TREE;
12880
12881 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12882 }
12883
12884 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12885 are the arguments to the call. */
12886
12887 static tree
12888 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12889 tree src, tree size)
12890 {
12891 tree fn;
12892 const char *p;
12893
12894 if (!validate_arg (dest, POINTER_TYPE)
12895 || !validate_arg (src, POINTER_TYPE)
12896 || !validate_arg (size, INTEGER_TYPE))
12897 return NULL_TREE;
12898
12899 p = c_getstr (src);
12900 /* If the SRC parameter is "", return DEST. */
12901 if (p && *p == '\0')
12902 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12903
12904 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12905 return NULL_TREE;
12906
12907 /* If __builtin_strcat_chk is used, assume strcat is available. */
12908 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
12909 if (!fn)
12910 return NULL_TREE;
12911
12912 return build_call_expr_loc (loc, fn, 2, dest, src);
12913 }
12914
12915 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12916 LEN, and SIZE. */
12917
12918 static tree
12919 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12920 tree dest, tree src, tree len, tree size)
12921 {
12922 tree fn;
12923 const char *p;
12924
12925 if (!validate_arg (dest, POINTER_TYPE)
12926 || !validate_arg (src, POINTER_TYPE)
12927 || !validate_arg (size, INTEGER_TYPE)
12928 || !validate_arg (size, INTEGER_TYPE))
12929 return NULL_TREE;
12930
12931 p = c_getstr (src);
12932 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12933 if (p && *p == '\0')
12934 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12935 else if (integer_zerop (len))
12936 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12937
12938 if (! host_integerp (size, 1))
12939 return NULL_TREE;
12940
12941 if (! integer_all_onesp (size))
12942 {
12943 tree src_len = c_strlen (src, 1);
12944 if (src_len
12945 && host_integerp (src_len, 1)
12946 && host_integerp (len, 1)
12947 && ! tree_int_cst_lt (len, src_len))
12948 {
12949 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12950 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
12951 if (!fn)
12952 return NULL_TREE;
12953
12954 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12955 }
12956 return NULL_TREE;
12957 }
12958
12959 /* If __builtin_strncat_chk is used, assume strncat is available. */
12960 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
12961 if (!fn)
12962 return NULL_TREE;
12963
12964 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12965 }
12966
12967 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
12968 Return NULL_TREE if a normal call should be emitted rather than
12969 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
12970 or BUILT_IN_VSPRINTF_CHK. */
12971
12972 static tree
12973 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
12974 enum built_in_function fcode)
12975 {
12976 tree dest, size, len, fn, fmt, flag;
12977 const char *fmt_str;
12978
12979 /* Verify the required arguments in the original call. */
12980 if (nargs < 4)
12981 return NULL_TREE;
12982 dest = args[0];
12983 if (!validate_arg (dest, POINTER_TYPE))
12984 return NULL_TREE;
12985 flag = args[1];
12986 if (!validate_arg (flag, INTEGER_TYPE))
12987 return NULL_TREE;
12988 size = args[2];
12989 if (!validate_arg (size, INTEGER_TYPE))
12990 return NULL_TREE;
12991 fmt = args[3];
12992 if (!validate_arg (fmt, POINTER_TYPE))
12993 return NULL_TREE;
12994
12995 if (! host_integerp (size, 1))
12996 return NULL_TREE;
12997
12998 len = NULL_TREE;
12999
13000 if (!init_target_chars ())
13001 return NULL_TREE;
13002
13003 /* Check whether the format is a literal string constant. */
13004 fmt_str = c_getstr (fmt);
13005 if (fmt_str != NULL)
13006 {
13007 /* If the format doesn't contain % args or %%, we know the size. */
13008 if (strchr (fmt_str, target_percent) == 0)
13009 {
13010 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13011 len = build_int_cstu (size_type_node, strlen (fmt_str));
13012 }
13013 /* If the format is "%s" and first ... argument is a string literal,
13014 we know the size too. */
13015 else if (fcode == BUILT_IN_SPRINTF_CHK
13016 && strcmp (fmt_str, target_percent_s) == 0)
13017 {
13018 tree arg;
13019
13020 if (nargs == 5)
13021 {
13022 arg = args[4];
13023 if (validate_arg (arg, POINTER_TYPE))
13024 {
13025 len = c_strlen (arg, 1);
13026 if (! len || ! host_integerp (len, 1))
13027 len = NULL_TREE;
13028 }
13029 }
13030 }
13031 }
13032
13033 if (! integer_all_onesp (size))
13034 {
13035 if (! len || ! tree_int_cst_lt (len, size))
13036 return NULL_TREE;
13037 }
13038
13039 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13040 or if format doesn't contain % chars or is "%s". */
13041 if (! integer_zerop (flag))
13042 {
13043 if (fmt_str == NULL)
13044 return NULL_TREE;
13045 if (strchr (fmt_str, target_percent) != NULL
13046 && strcmp (fmt_str, target_percent_s))
13047 return NULL_TREE;
13048 }
13049
13050 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13051 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
13052 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
13053 if (!fn)
13054 return NULL_TREE;
13055
13056 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
13057 }
13058
13059 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13060 a normal call should be emitted rather than expanding the function
13061 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13062
13063 static tree
13064 fold_builtin_sprintf_chk (location_t loc, tree exp,
13065 enum built_in_function fcode)
13066 {
13067 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
13068 CALL_EXPR_ARGP (exp), fcode);
13069 }
13070
13071 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13072 NULL_TREE if a normal call should be emitted rather than expanding
13073 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13074 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13075 passed as second argument. */
13076
13077 static tree
13078 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
13079 tree maxlen, enum built_in_function fcode)
13080 {
13081 tree dest, size, len, fn, fmt, flag;
13082 const char *fmt_str;
13083
13084 /* Verify the required arguments in the original call. */
13085 if (nargs < 5)
13086 return NULL_TREE;
13087 dest = args[0];
13088 if (!validate_arg (dest, POINTER_TYPE))
13089 return NULL_TREE;
13090 len = args[1];
13091 if (!validate_arg (len, INTEGER_TYPE))
13092 return NULL_TREE;
13093 flag = args[2];
13094 if (!validate_arg (flag, INTEGER_TYPE))
13095 return NULL_TREE;
13096 size = args[3];
13097 if (!validate_arg (size, INTEGER_TYPE))
13098 return NULL_TREE;
13099 fmt = args[4];
13100 if (!validate_arg (fmt, POINTER_TYPE))
13101 return NULL_TREE;
13102
13103 if (! host_integerp (size, 1))
13104 return NULL_TREE;
13105
13106 if (! integer_all_onesp (size))
13107 {
13108 if (! host_integerp (len, 1))
13109 {
13110 /* If LEN is not constant, try MAXLEN too.
13111 For MAXLEN only allow optimizing into non-_ocs function
13112 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13113 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13114 return NULL_TREE;
13115 }
13116 else
13117 maxlen = len;
13118
13119 if (tree_int_cst_lt (size, maxlen))
13120 return NULL_TREE;
13121 }
13122
13123 if (!init_target_chars ())
13124 return NULL_TREE;
13125
13126 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13127 or if format doesn't contain % chars or is "%s". */
13128 if (! integer_zerop (flag))
13129 {
13130 fmt_str = c_getstr (fmt);
13131 if (fmt_str == NULL)
13132 return NULL_TREE;
13133 if (strchr (fmt_str, target_percent) != NULL
13134 && strcmp (fmt_str, target_percent_s))
13135 return NULL_TREE;
13136 }
13137
13138 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13139 available. */
13140 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
13141 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
13142 if (!fn)
13143 return NULL_TREE;
13144
13145 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
13146 }
13147
13148 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13149 a normal call should be emitted rather than expanding the function
13150 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13151 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13152 passed as second argument. */
13153
13154 tree
13155 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
13156 enum built_in_function fcode)
13157 {
13158 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
13159 CALL_EXPR_ARGP (exp), maxlen, fcode);
13160 }
13161
13162 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13163 FMT and ARG are the arguments to the call; we don't fold cases with
13164 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13165
13166 Return NULL_TREE if no simplification was possible, otherwise return the
13167 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13168 code of the function to be simplified. */
13169
13170 static tree
13171 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13172 tree arg, bool ignore,
13173 enum built_in_function fcode)
13174 {
13175 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13176 const char *fmt_str = NULL;
13177
13178 /* If the return value is used, don't do the transformation. */
13179 if (! ignore)
13180 return NULL_TREE;
13181
13182 /* Verify the required arguments in the original call. */
13183 if (!validate_arg (fmt, POINTER_TYPE))
13184 return NULL_TREE;
13185
13186 /* Check whether the format is a literal string constant. */
13187 fmt_str = c_getstr (fmt);
13188 if (fmt_str == NULL)
13189 return NULL_TREE;
13190
13191 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13192 {
13193 /* If we're using an unlocked function, assume the other
13194 unlocked functions exist explicitly. */
13195 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
13196 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
13197 }
13198 else
13199 {
13200 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
13201 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
13202 }
13203
13204 if (!init_target_chars ())
13205 return NULL_TREE;
13206
13207 if (strcmp (fmt_str, target_percent_s) == 0
13208 || strchr (fmt_str, target_percent) == NULL)
13209 {
13210 const char *str;
13211
13212 if (strcmp (fmt_str, target_percent_s) == 0)
13213 {
13214 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13215 return NULL_TREE;
13216
13217 if (!arg || !validate_arg (arg, POINTER_TYPE))
13218 return NULL_TREE;
13219
13220 str = c_getstr (arg);
13221 if (str == NULL)
13222 return NULL_TREE;
13223 }
13224 else
13225 {
13226 /* The format specifier doesn't contain any '%' characters. */
13227 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13228 && arg)
13229 return NULL_TREE;
13230 str = fmt_str;
13231 }
13232
13233 /* If the string was "", printf does nothing. */
13234 if (str[0] == '\0')
13235 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13236
13237 /* If the string has length of 1, call putchar. */
13238 if (str[1] == '\0')
13239 {
13240 /* Given printf("c"), (where c is any one character,)
13241 convert "c"[0] to an int and pass that to the replacement
13242 function. */
13243 newarg = build_int_cst (integer_type_node, str[0]);
13244 if (fn_putchar)
13245 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13246 }
13247 else
13248 {
13249 /* If the string was "string\n", call puts("string"). */
13250 size_t len = strlen (str);
13251 if ((unsigned char)str[len - 1] == target_newline
13252 && (size_t) (int) len == len
13253 && (int) len > 0)
13254 {
13255 char *newstr;
13256 tree offset_node, string_cst;
13257
13258 /* Create a NUL-terminated string that's one char shorter
13259 than the original, stripping off the trailing '\n'. */
13260 newarg = build_string_literal (len, str);
13261 string_cst = string_constant (newarg, &offset_node);
13262 gcc_checking_assert (string_cst
13263 && (TREE_STRING_LENGTH (string_cst)
13264 == (int) len)
13265 && integer_zerop (offset_node)
13266 && (unsigned char)
13267 TREE_STRING_POINTER (string_cst)[len - 1]
13268 == target_newline);
13269 /* build_string_literal creates a new STRING_CST,
13270 modify it in place to avoid double copying. */
13271 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
13272 newstr[len - 1] = '\0';
13273 if (fn_puts)
13274 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13275 }
13276 else
13277 /* We'd like to arrange to call fputs(string,stdout) here,
13278 but we need stdout and don't have a way to get it yet. */
13279 return NULL_TREE;
13280 }
13281 }
13282
13283 /* The other optimizations can be done only on the non-va_list variants. */
13284 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13285 return NULL_TREE;
13286
13287 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13288 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13289 {
13290 if (!arg || !validate_arg (arg, POINTER_TYPE))
13291 return NULL_TREE;
13292 if (fn_puts)
13293 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13294 }
13295
13296 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13297 else if (strcmp (fmt_str, target_percent_c) == 0)
13298 {
13299 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13300 return NULL_TREE;
13301 if (fn_putchar)
13302 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13303 }
13304
13305 if (!call)
13306 return NULL_TREE;
13307
13308 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13309 }
13310
13311 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13312 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13313 more than 3 arguments, and ARG may be null in the 2-argument case.
13314
13315 Return NULL_TREE if no simplification was possible, otherwise return the
13316 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13317 code of the function to be simplified. */
13318
13319 static tree
13320 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13321 tree fmt, tree arg, bool ignore,
13322 enum built_in_function fcode)
13323 {
13324 tree fn_fputc, fn_fputs, call = NULL_TREE;
13325 const char *fmt_str = NULL;
13326
13327 /* If the return value is used, don't do the transformation. */
13328 if (! ignore)
13329 return NULL_TREE;
13330
13331 /* Verify the required arguments in the original call. */
13332 if (!validate_arg (fp, POINTER_TYPE))
13333 return NULL_TREE;
13334 if (!validate_arg (fmt, POINTER_TYPE))
13335 return NULL_TREE;
13336
13337 /* Check whether the format is a literal string constant. */
13338 fmt_str = c_getstr (fmt);
13339 if (fmt_str == NULL)
13340 return NULL_TREE;
13341
13342 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13343 {
13344 /* If we're using an unlocked function, assume the other
13345 unlocked functions exist explicitly. */
13346 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
13347 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
13348 }
13349 else
13350 {
13351 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
13352 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
13353 }
13354
13355 if (!init_target_chars ())
13356 return NULL_TREE;
13357
13358 /* If the format doesn't contain % args or %%, use strcpy. */
13359 if (strchr (fmt_str, target_percent) == NULL)
13360 {
13361 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13362 && arg)
13363 return NULL_TREE;
13364
13365 /* If the format specifier was "", fprintf does nothing. */
13366 if (fmt_str[0] == '\0')
13367 {
13368 /* If FP has side-effects, just wait until gimplification is
13369 done. */
13370 if (TREE_SIDE_EFFECTS (fp))
13371 return NULL_TREE;
13372
13373 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13374 }
13375
13376 /* When "string" doesn't contain %, replace all cases of
13377 fprintf (fp, string) with fputs (string, fp). The fputs
13378 builtin will take care of special cases like length == 1. */
13379 if (fn_fputs)
13380 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13381 }
13382
13383 /* The other optimizations can be done only on the non-va_list variants. */
13384 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13385 return NULL_TREE;
13386
13387 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13388 else if (strcmp (fmt_str, target_percent_s) == 0)
13389 {
13390 if (!arg || !validate_arg (arg, POINTER_TYPE))
13391 return NULL_TREE;
13392 if (fn_fputs)
13393 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13394 }
13395
13396 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13397 else if (strcmp (fmt_str, target_percent_c) == 0)
13398 {
13399 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13400 return NULL_TREE;
13401 if (fn_fputc)
13402 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13403 }
13404
13405 if (!call)
13406 return NULL_TREE;
13407 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13408 }
13409
13410 /* Initialize format string characters in the target charset. */
13411
13412 static bool
13413 init_target_chars (void)
13414 {
13415 static bool init;
13416 if (!init)
13417 {
13418 target_newline = lang_hooks.to_target_charset ('\n');
13419 target_percent = lang_hooks.to_target_charset ('%');
13420 target_c = lang_hooks.to_target_charset ('c');
13421 target_s = lang_hooks.to_target_charset ('s');
13422 if (target_newline == 0 || target_percent == 0 || target_c == 0
13423 || target_s == 0)
13424 return false;
13425
13426 target_percent_c[0] = target_percent;
13427 target_percent_c[1] = target_c;
13428 target_percent_c[2] = '\0';
13429
13430 target_percent_s[0] = target_percent;
13431 target_percent_s[1] = target_s;
13432 target_percent_s[2] = '\0';
13433
13434 target_percent_s_newline[0] = target_percent;
13435 target_percent_s_newline[1] = target_s;
13436 target_percent_s_newline[2] = target_newline;
13437 target_percent_s_newline[3] = '\0';
13438
13439 init = true;
13440 }
13441 return true;
13442 }
13443
13444 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13445 and no overflow/underflow occurred. INEXACT is true if M was not
13446 exactly calculated. TYPE is the tree type for the result. This
13447 function assumes that you cleared the MPFR flags and then
13448 calculated M to see if anything subsequently set a flag prior to
13449 entering this function. Return NULL_TREE if any checks fail. */
13450
13451 static tree
13452 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13453 {
13454 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13455 overflow/underflow occurred. If -frounding-math, proceed iff the
13456 result of calling FUNC was exact. */
13457 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13458 && (!flag_rounding_math || !inexact))
13459 {
13460 REAL_VALUE_TYPE rr;
13461
13462 real_from_mpfr (&rr, m, type, GMP_RNDN);
13463 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13464 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13465 but the mpft_t is not, then we underflowed in the
13466 conversion. */
13467 if (real_isfinite (&rr)
13468 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13469 {
13470 REAL_VALUE_TYPE rmode;
13471
13472 real_convert (&rmode, TYPE_MODE (type), &rr);
13473 /* Proceed iff the specified mode can hold the value. */
13474 if (real_identical (&rmode, &rr))
13475 return build_real (type, rmode);
13476 }
13477 }
13478 return NULL_TREE;
13479 }
13480
13481 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13482 number and no overflow/underflow occurred. INEXACT is true if M
13483 was not exactly calculated. TYPE is the tree type for the result.
13484 This function assumes that you cleared the MPFR flags and then
13485 calculated M to see if anything subsequently set a flag prior to
13486 entering this function. Return NULL_TREE if any checks fail, if
13487 FORCE_CONVERT is true, then bypass the checks. */
13488
13489 static tree
13490 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13491 {
13492 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13493 overflow/underflow occurred. If -frounding-math, proceed iff the
13494 result of calling FUNC was exact. */
13495 if (force_convert
13496 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13497 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13498 && (!flag_rounding_math || !inexact)))
13499 {
13500 REAL_VALUE_TYPE re, im;
13501
13502 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13503 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13504 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13505 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13506 but the mpft_t is not, then we underflowed in the
13507 conversion. */
13508 if (force_convert
13509 || (real_isfinite (&re) && real_isfinite (&im)
13510 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13511 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13512 {
13513 REAL_VALUE_TYPE re_mode, im_mode;
13514
13515 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13516 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13517 /* Proceed iff the specified mode can hold the value. */
13518 if (force_convert
13519 || (real_identical (&re_mode, &re)
13520 && real_identical (&im_mode, &im)))
13521 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13522 build_real (TREE_TYPE (type), im_mode));
13523 }
13524 }
13525 return NULL_TREE;
13526 }
13527
13528 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13529 FUNC on it and return the resulting value as a tree with type TYPE.
13530 If MIN and/or MAX are not NULL, then the supplied ARG must be
13531 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13532 acceptable values, otherwise they are not. The mpfr precision is
13533 set to the precision of TYPE. We assume that function FUNC returns
13534 zero if the result could be calculated exactly within the requested
13535 precision. */
13536
13537 static tree
13538 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13539 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13540 bool inclusive)
13541 {
13542 tree result = NULL_TREE;
13543
13544 STRIP_NOPS (arg);
13545
13546 /* To proceed, MPFR must exactly represent the target floating point
13547 format, which only happens when the target base equals two. */
13548 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13549 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13550 {
13551 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13552
13553 if (real_isfinite (ra)
13554 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13555 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13556 {
13557 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13558 const int prec = fmt->p;
13559 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13560 int inexact;
13561 mpfr_t m;
13562
13563 mpfr_init2 (m, prec);
13564 mpfr_from_real (m, ra, GMP_RNDN);
13565 mpfr_clear_flags ();
13566 inexact = func (m, m, rnd);
13567 result = do_mpfr_ckconv (m, type, inexact);
13568 mpfr_clear (m);
13569 }
13570 }
13571
13572 return result;
13573 }
13574
13575 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13576 FUNC on it and return the resulting value as a tree with type TYPE.
13577 The mpfr precision is set to the precision of TYPE. We assume that
13578 function FUNC returns zero if the result could be calculated
13579 exactly within the requested precision. */
13580
13581 static tree
13582 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13583 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13584 {
13585 tree result = NULL_TREE;
13586
13587 STRIP_NOPS (arg1);
13588 STRIP_NOPS (arg2);
13589
13590 /* To proceed, MPFR must exactly represent the target floating point
13591 format, which only happens when the target base equals two. */
13592 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13593 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13594 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13595 {
13596 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13597 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13598
13599 if (real_isfinite (ra1) && real_isfinite (ra2))
13600 {
13601 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13602 const int prec = fmt->p;
13603 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13604 int inexact;
13605 mpfr_t m1, m2;
13606
13607 mpfr_inits2 (prec, m1, m2, NULL);
13608 mpfr_from_real (m1, ra1, GMP_RNDN);
13609 mpfr_from_real (m2, ra2, GMP_RNDN);
13610 mpfr_clear_flags ();
13611 inexact = func (m1, m1, m2, rnd);
13612 result = do_mpfr_ckconv (m1, type, inexact);
13613 mpfr_clears (m1, m2, NULL);
13614 }
13615 }
13616
13617 return result;
13618 }
13619
13620 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13621 FUNC on it and return the resulting value as a tree with type TYPE.
13622 The mpfr precision is set to the precision of TYPE. We assume that
13623 function FUNC returns zero if the result could be calculated
13624 exactly within the requested precision. */
13625
13626 static tree
13627 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13628 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13629 {
13630 tree result = NULL_TREE;
13631
13632 STRIP_NOPS (arg1);
13633 STRIP_NOPS (arg2);
13634 STRIP_NOPS (arg3);
13635
13636 /* To proceed, MPFR must exactly represent the target floating point
13637 format, which only happens when the target base equals two. */
13638 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13639 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13640 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13641 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13642 {
13643 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13644 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13645 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13646
13647 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13648 {
13649 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13650 const int prec = fmt->p;
13651 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13652 int inexact;
13653 mpfr_t m1, m2, m3;
13654
13655 mpfr_inits2 (prec, m1, m2, m3, NULL);
13656 mpfr_from_real (m1, ra1, GMP_RNDN);
13657 mpfr_from_real (m2, ra2, GMP_RNDN);
13658 mpfr_from_real (m3, ra3, GMP_RNDN);
13659 mpfr_clear_flags ();
13660 inexact = func (m1, m1, m2, m3, rnd);
13661 result = do_mpfr_ckconv (m1, type, inexact);
13662 mpfr_clears (m1, m2, m3, NULL);
13663 }
13664 }
13665
13666 return result;
13667 }
13668
13669 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13670 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13671 If ARG_SINP and ARG_COSP are NULL then the result is returned
13672 as a complex value.
13673 The type is taken from the type of ARG and is used for setting the
13674 precision of the calculation and results. */
13675
13676 static tree
13677 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13678 {
13679 tree const type = TREE_TYPE (arg);
13680 tree result = NULL_TREE;
13681
13682 STRIP_NOPS (arg);
13683
13684 /* To proceed, MPFR must exactly represent the target floating point
13685 format, which only happens when the target base equals two. */
13686 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13687 && TREE_CODE (arg) == REAL_CST
13688 && !TREE_OVERFLOW (arg))
13689 {
13690 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13691
13692 if (real_isfinite (ra))
13693 {
13694 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13695 const int prec = fmt->p;
13696 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13697 tree result_s, result_c;
13698 int inexact;
13699 mpfr_t m, ms, mc;
13700
13701 mpfr_inits2 (prec, m, ms, mc, NULL);
13702 mpfr_from_real (m, ra, GMP_RNDN);
13703 mpfr_clear_flags ();
13704 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13705 result_s = do_mpfr_ckconv (ms, type, inexact);
13706 result_c = do_mpfr_ckconv (mc, type, inexact);
13707 mpfr_clears (m, ms, mc, NULL);
13708 if (result_s && result_c)
13709 {
13710 /* If we are to return in a complex value do so. */
13711 if (!arg_sinp && !arg_cosp)
13712 return build_complex (build_complex_type (type),
13713 result_c, result_s);
13714
13715 /* Dereference the sin/cos pointer arguments. */
13716 arg_sinp = build_fold_indirect_ref (arg_sinp);
13717 arg_cosp = build_fold_indirect_ref (arg_cosp);
13718 /* Proceed if valid pointer type were passed in. */
13719 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13720 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13721 {
13722 /* Set the values. */
13723 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13724 result_s);
13725 TREE_SIDE_EFFECTS (result_s) = 1;
13726 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13727 result_c);
13728 TREE_SIDE_EFFECTS (result_c) = 1;
13729 /* Combine the assignments into a compound expr. */
13730 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13731 result_s, result_c));
13732 }
13733 }
13734 }
13735 }
13736 return result;
13737 }
13738
13739 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13740 two-argument mpfr order N Bessel function FUNC on them and return
13741 the resulting value as a tree with type TYPE. The mpfr precision
13742 is set to the precision of TYPE. We assume that function FUNC
13743 returns zero if the result could be calculated exactly within the
13744 requested precision. */
13745 static tree
13746 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13747 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13748 const REAL_VALUE_TYPE *min, bool inclusive)
13749 {
13750 tree result = NULL_TREE;
13751
13752 STRIP_NOPS (arg1);
13753 STRIP_NOPS (arg2);
13754
13755 /* To proceed, MPFR must exactly represent the target floating point
13756 format, which only happens when the target base equals two. */
13757 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13758 && host_integerp (arg1, 0)
13759 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13760 {
13761 const HOST_WIDE_INT n = tree_low_cst (arg1, 0);
13762 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13763
13764 if (n == (long)n
13765 && real_isfinite (ra)
13766 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13767 {
13768 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13769 const int prec = fmt->p;
13770 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13771 int inexact;
13772 mpfr_t m;
13773
13774 mpfr_init2 (m, prec);
13775 mpfr_from_real (m, ra, GMP_RNDN);
13776 mpfr_clear_flags ();
13777 inexact = func (m, n, m, rnd);
13778 result = do_mpfr_ckconv (m, type, inexact);
13779 mpfr_clear (m);
13780 }
13781 }
13782
13783 return result;
13784 }
13785
13786 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13787 the pointer *(ARG_QUO) and return the result. The type is taken
13788 from the type of ARG0 and is used for setting the precision of the
13789 calculation and results. */
13790
13791 static tree
13792 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13793 {
13794 tree const type = TREE_TYPE (arg0);
13795 tree result = NULL_TREE;
13796
13797 STRIP_NOPS (arg0);
13798 STRIP_NOPS (arg1);
13799
13800 /* To proceed, MPFR must exactly represent the target floating point
13801 format, which only happens when the target base equals two. */
13802 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13803 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13804 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13805 {
13806 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13807 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13808
13809 if (real_isfinite (ra0) && real_isfinite (ra1))
13810 {
13811 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13812 const int prec = fmt->p;
13813 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13814 tree result_rem;
13815 long integer_quo;
13816 mpfr_t m0, m1;
13817
13818 mpfr_inits2 (prec, m0, m1, NULL);
13819 mpfr_from_real (m0, ra0, GMP_RNDN);
13820 mpfr_from_real (m1, ra1, GMP_RNDN);
13821 mpfr_clear_flags ();
13822 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13823 /* Remquo is independent of the rounding mode, so pass
13824 inexact=0 to do_mpfr_ckconv(). */
13825 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13826 mpfr_clears (m0, m1, NULL);
13827 if (result_rem)
13828 {
13829 /* MPFR calculates quo in the host's long so it may
13830 return more bits in quo than the target int can hold
13831 if sizeof(host long) > sizeof(target int). This can
13832 happen even for native compilers in LP64 mode. In
13833 these cases, modulo the quo value with the largest
13834 number that the target int can hold while leaving one
13835 bit for the sign. */
13836 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13837 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13838
13839 /* Dereference the quo pointer argument. */
13840 arg_quo = build_fold_indirect_ref (arg_quo);
13841 /* Proceed iff a valid pointer type was passed in. */
13842 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13843 {
13844 /* Set the value. */
13845 tree result_quo
13846 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13847 build_int_cst (TREE_TYPE (arg_quo),
13848 integer_quo));
13849 TREE_SIDE_EFFECTS (result_quo) = 1;
13850 /* Combine the quo assignment with the rem. */
13851 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13852 result_quo, result_rem));
13853 }
13854 }
13855 }
13856 }
13857 return result;
13858 }
13859
13860 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13861 resulting value as a tree with type TYPE. The mpfr precision is
13862 set to the precision of TYPE. We assume that this mpfr function
13863 returns zero if the result could be calculated exactly within the
13864 requested precision. In addition, the integer pointer represented
13865 by ARG_SG will be dereferenced and set to the appropriate signgam
13866 (-1,1) value. */
13867
13868 static tree
13869 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13870 {
13871 tree result = NULL_TREE;
13872
13873 STRIP_NOPS (arg);
13874
13875 /* To proceed, MPFR must exactly represent the target floating point
13876 format, which only happens when the target base equals two. Also
13877 verify ARG is a constant and that ARG_SG is an int pointer. */
13878 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13879 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13880 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13881 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13882 {
13883 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13884
13885 /* In addition to NaN and Inf, the argument cannot be zero or a
13886 negative integer. */
13887 if (real_isfinite (ra)
13888 && ra->cl != rvc_zero
13889 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
13890 {
13891 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13892 const int prec = fmt->p;
13893 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13894 int inexact, sg;
13895 mpfr_t m;
13896 tree result_lg;
13897
13898 mpfr_init2 (m, prec);
13899 mpfr_from_real (m, ra, GMP_RNDN);
13900 mpfr_clear_flags ();
13901 inexact = mpfr_lgamma (m, &sg, m, rnd);
13902 result_lg = do_mpfr_ckconv (m, type, inexact);
13903 mpfr_clear (m);
13904 if (result_lg)
13905 {
13906 tree result_sg;
13907
13908 /* Dereference the arg_sg pointer argument. */
13909 arg_sg = build_fold_indirect_ref (arg_sg);
13910 /* Assign the signgam value into *arg_sg. */
13911 result_sg = fold_build2 (MODIFY_EXPR,
13912 TREE_TYPE (arg_sg), arg_sg,
13913 build_int_cst (TREE_TYPE (arg_sg), sg));
13914 TREE_SIDE_EFFECTS (result_sg) = 1;
13915 /* Combine the signgam assignment with the lgamma result. */
13916 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13917 result_sg, result_lg));
13918 }
13919 }
13920 }
13921
13922 return result;
13923 }
13924
13925 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13926 function FUNC on it and return the resulting value as a tree with
13927 type TYPE. The mpfr precision is set to the precision of TYPE. We
13928 assume that function FUNC returns zero if the result could be
13929 calculated exactly within the requested precision. */
13930
13931 static tree
13932 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13933 {
13934 tree result = NULL_TREE;
13935
13936 STRIP_NOPS (arg);
13937
13938 /* To proceed, MPFR must exactly represent the target floating point
13939 format, which only happens when the target base equals two. */
13940 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13941 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13942 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13943 {
13944 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13945 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13946
13947 if (real_isfinite (re) && real_isfinite (im))
13948 {
13949 const struct real_format *const fmt =
13950 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13951 const int prec = fmt->p;
13952 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13953 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13954 int inexact;
13955 mpc_t m;
13956
13957 mpc_init2 (m, prec);
13958 mpfr_from_real (mpc_realref (m), re, rnd);
13959 mpfr_from_real (mpc_imagref (m), im, rnd);
13960 mpfr_clear_flags ();
13961 inexact = func (m, m, crnd);
13962 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13963 mpc_clear (m);
13964 }
13965 }
13966
13967 return result;
13968 }
13969
13970 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13971 mpc function FUNC on it and return the resulting value as a tree
13972 with type TYPE. The mpfr precision is set to the precision of
13973 TYPE. We assume that function FUNC returns zero if the result
13974 could be calculated exactly within the requested precision. If
13975 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13976 in the arguments and/or results. */
13977
13978 tree
13979 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13980 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13981 {
13982 tree result = NULL_TREE;
13983
13984 STRIP_NOPS (arg0);
13985 STRIP_NOPS (arg1);
13986
13987 /* To proceed, MPFR must exactly represent the target floating point
13988 format, which only happens when the target base equals two. */
13989 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13990 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13991 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13992 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13993 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13994 {
13995 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13996 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13997 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13998 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13999
14000 if (do_nonfinite
14001 || (real_isfinite (re0) && real_isfinite (im0)
14002 && real_isfinite (re1) && real_isfinite (im1)))
14003 {
14004 const struct real_format *const fmt =
14005 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14006 const int prec = fmt->p;
14007 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14008 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14009 int inexact;
14010 mpc_t m0, m1;
14011
14012 mpc_init2 (m0, prec);
14013 mpc_init2 (m1, prec);
14014 mpfr_from_real (mpc_realref (m0), re0, rnd);
14015 mpfr_from_real (mpc_imagref (m0), im0, rnd);
14016 mpfr_from_real (mpc_realref (m1), re1, rnd);
14017 mpfr_from_real (mpc_imagref (m1), im1, rnd);
14018 mpfr_clear_flags ();
14019 inexact = func (m0, m0, m1, crnd);
14020 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
14021 mpc_clear (m0);
14022 mpc_clear (m1);
14023 }
14024 }
14025
14026 return result;
14027 }
14028
14029 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14030 a normal call should be emitted rather than expanding the function
14031 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14032
14033 static tree
14034 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
14035 {
14036 int nargs = gimple_call_num_args (stmt);
14037
14038 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
14039 (nargs > 0
14040 ? gimple_call_arg_ptr (stmt, 0)
14041 : &error_mark_node), fcode);
14042 }
14043
14044 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14045 a normal call should be emitted rather than expanding the function
14046 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14047 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14048 passed as second argument. */
14049
14050 tree
14051 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14052 enum built_in_function fcode)
14053 {
14054 int nargs = gimple_call_num_args (stmt);
14055
14056 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
14057 (nargs > 0
14058 ? gimple_call_arg_ptr (stmt, 0)
14059 : &error_mark_node), maxlen, fcode);
14060 }
14061
14062 /* Builtins with folding operations that operate on "..." arguments
14063 need special handling; we need to store the arguments in a convenient
14064 data structure before attempting any folding. Fortunately there are
14065 only a few builtins that fall into this category. FNDECL is the
14066 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14067 result of the function call is ignored. */
14068
14069 static tree
14070 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14071 bool ignore ATTRIBUTE_UNUSED)
14072 {
14073 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14074 tree ret = NULL_TREE;
14075
14076 switch (fcode)
14077 {
14078 case BUILT_IN_SPRINTF_CHK:
14079 case BUILT_IN_VSPRINTF_CHK:
14080 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14081 break;
14082
14083 case BUILT_IN_SNPRINTF_CHK:
14084 case BUILT_IN_VSNPRINTF_CHK:
14085 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14086
14087 default:
14088 break;
14089 }
14090 if (ret)
14091 {
14092 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14093 TREE_NO_WARNING (ret) = 1;
14094 return ret;
14095 }
14096 return NULL_TREE;
14097 }
14098
14099 /* A wrapper function for builtin folding that prevents warnings for
14100 "statement without effect" and the like, caused by removing the
14101 call node earlier than the warning is generated. */
14102
14103 tree
14104 fold_call_stmt (gimple stmt, bool ignore)
14105 {
14106 tree ret = NULL_TREE;
14107 tree fndecl = gimple_call_fndecl (stmt);
14108 location_t loc = gimple_location (stmt);
14109 if (fndecl
14110 && TREE_CODE (fndecl) == FUNCTION_DECL
14111 && DECL_BUILT_IN (fndecl)
14112 && !gimple_call_va_arg_pack_p (stmt))
14113 {
14114 int nargs = gimple_call_num_args (stmt);
14115 tree *args = (nargs > 0
14116 ? gimple_call_arg_ptr (stmt, 0)
14117 : &error_mark_node);
14118
14119 if (avoid_folding_inline_builtin (fndecl))
14120 return NULL_TREE;
14121 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14122 {
14123 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14124 }
14125 else
14126 {
14127 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14128 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14129 if (!ret)
14130 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14131 if (ret)
14132 {
14133 /* Propagate location information from original call to
14134 expansion of builtin. Otherwise things like
14135 maybe_emit_chk_warning, that operate on the expansion
14136 of a builtin, will use the wrong location information. */
14137 if (gimple_has_location (stmt))
14138 {
14139 tree realret = ret;
14140 if (TREE_CODE (ret) == NOP_EXPR)
14141 realret = TREE_OPERAND (ret, 0);
14142 if (CAN_HAVE_LOCATION_P (realret)
14143 && !EXPR_HAS_LOCATION (realret))
14144 SET_EXPR_LOCATION (realret, loc);
14145 return realret;
14146 }
14147 return ret;
14148 }
14149 }
14150 }
14151 return NULL_TREE;
14152 }
14153
14154 /* Look up the function in builtin_decl that corresponds to DECL
14155 and set ASMSPEC as its user assembler name. DECL must be a
14156 function decl that declares a builtin. */
14157
14158 void
14159 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14160 {
14161 tree builtin;
14162 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
14163 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
14164 && asmspec != 0);
14165
14166 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14167 set_user_assembler_name (builtin, asmspec);
14168 switch (DECL_FUNCTION_CODE (decl))
14169 {
14170 case BUILT_IN_MEMCPY:
14171 init_block_move_fn (asmspec);
14172 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
14173 break;
14174 case BUILT_IN_MEMSET:
14175 init_block_clear_fn (asmspec);
14176 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
14177 break;
14178 case BUILT_IN_MEMMOVE:
14179 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
14180 break;
14181 case BUILT_IN_MEMCMP:
14182 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
14183 break;
14184 case BUILT_IN_ABORT:
14185 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
14186 break;
14187 case BUILT_IN_FFS:
14188 if (INT_TYPE_SIZE < BITS_PER_WORD)
14189 {
14190 set_user_assembler_libfunc ("ffs", asmspec);
14191 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
14192 MODE_INT, 0), "ffs");
14193 }
14194 break;
14195 default:
14196 break;
14197 }
14198 }
14199
14200 /* Return true if DECL is a builtin that expands to a constant or similarly
14201 simple code. */
14202 bool
14203 is_simple_builtin (tree decl)
14204 {
14205 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14206 switch (DECL_FUNCTION_CODE (decl))
14207 {
14208 /* Builtins that expand to constants. */
14209 case BUILT_IN_CONSTANT_P:
14210 case BUILT_IN_EXPECT:
14211 case BUILT_IN_OBJECT_SIZE:
14212 case BUILT_IN_UNREACHABLE:
14213 /* Simple register moves or loads from stack. */
14214 case BUILT_IN_ASSUME_ALIGNED:
14215 case BUILT_IN_RETURN_ADDRESS:
14216 case BUILT_IN_EXTRACT_RETURN_ADDR:
14217 case BUILT_IN_FROB_RETURN_ADDR:
14218 case BUILT_IN_RETURN:
14219 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14220 case BUILT_IN_FRAME_ADDRESS:
14221 case BUILT_IN_VA_END:
14222 case BUILT_IN_STACK_SAVE:
14223 case BUILT_IN_STACK_RESTORE:
14224 /* Exception state returns or moves registers around. */
14225 case BUILT_IN_EH_FILTER:
14226 case BUILT_IN_EH_POINTER:
14227 case BUILT_IN_EH_COPY_VALUES:
14228 return true;
14229
14230 default:
14231 return false;
14232 }
14233
14234 return false;
14235 }
14236
14237 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14238 most probably expanded inline into reasonably simple code. This is a
14239 superset of is_simple_builtin. */
14240 bool
14241 is_inexpensive_builtin (tree decl)
14242 {
14243 if (!decl)
14244 return false;
14245 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14246 return true;
14247 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14248 switch (DECL_FUNCTION_CODE (decl))
14249 {
14250 case BUILT_IN_ABS:
14251 case BUILT_IN_ALLOCA:
14252 case BUILT_IN_ALLOCA_WITH_ALIGN:
14253 case BUILT_IN_BSWAP16:
14254 case BUILT_IN_BSWAP32:
14255 case BUILT_IN_BSWAP64:
14256 case BUILT_IN_CLZ:
14257 case BUILT_IN_CLZIMAX:
14258 case BUILT_IN_CLZL:
14259 case BUILT_IN_CLZLL:
14260 case BUILT_IN_CTZ:
14261 case BUILT_IN_CTZIMAX:
14262 case BUILT_IN_CTZL:
14263 case BUILT_IN_CTZLL:
14264 case BUILT_IN_FFS:
14265 case BUILT_IN_FFSIMAX:
14266 case BUILT_IN_FFSL:
14267 case BUILT_IN_FFSLL:
14268 case BUILT_IN_IMAXABS:
14269 case BUILT_IN_FINITE:
14270 case BUILT_IN_FINITEF:
14271 case BUILT_IN_FINITEL:
14272 case BUILT_IN_FINITED32:
14273 case BUILT_IN_FINITED64:
14274 case BUILT_IN_FINITED128:
14275 case BUILT_IN_FPCLASSIFY:
14276 case BUILT_IN_ISFINITE:
14277 case BUILT_IN_ISINF_SIGN:
14278 case BUILT_IN_ISINF:
14279 case BUILT_IN_ISINFF:
14280 case BUILT_IN_ISINFL:
14281 case BUILT_IN_ISINFD32:
14282 case BUILT_IN_ISINFD64:
14283 case BUILT_IN_ISINFD128:
14284 case BUILT_IN_ISNAN:
14285 case BUILT_IN_ISNANF:
14286 case BUILT_IN_ISNANL:
14287 case BUILT_IN_ISNAND32:
14288 case BUILT_IN_ISNAND64:
14289 case BUILT_IN_ISNAND128:
14290 case BUILT_IN_ISNORMAL:
14291 case BUILT_IN_ISGREATER:
14292 case BUILT_IN_ISGREATEREQUAL:
14293 case BUILT_IN_ISLESS:
14294 case BUILT_IN_ISLESSEQUAL:
14295 case BUILT_IN_ISLESSGREATER:
14296 case BUILT_IN_ISUNORDERED:
14297 case BUILT_IN_VA_ARG_PACK:
14298 case BUILT_IN_VA_ARG_PACK_LEN:
14299 case BUILT_IN_VA_COPY:
14300 case BUILT_IN_TRAP:
14301 case BUILT_IN_SAVEREGS:
14302 case BUILT_IN_POPCOUNTL:
14303 case BUILT_IN_POPCOUNTLL:
14304 case BUILT_IN_POPCOUNTIMAX:
14305 case BUILT_IN_POPCOUNT:
14306 case BUILT_IN_PARITYL:
14307 case BUILT_IN_PARITYLL:
14308 case BUILT_IN_PARITYIMAX:
14309 case BUILT_IN_PARITY:
14310 case BUILT_IN_LABS:
14311 case BUILT_IN_LLABS:
14312 case BUILT_IN_PREFETCH:
14313 return true;
14314
14315 default:
14316 return is_simple_builtin (decl);
14317 }
14318
14319 return false;
14320 }