decl.c, [...]: Remove redundant enum from machine_mode.
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "calls.h"
30 #include "varasm.h"
31 #include "tree-object-size.h"
32 #include "realmpfr.h"
33 #include "predict.h"
34 #include "vec.h"
35 #include "hashtab.h"
36 #include "hash-set.h"
37 #include "hard-reg-set.h"
38 #include "input.h"
39 #include "function.h"
40 #include "cfgrtl.h"
41 #include "basic-block.h"
42 #include "tree-ssa-alias.h"
43 #include "internal-fn.h"
44 #include "gimple-expr.h"
45 #include "is-a.h"
46 #include "gimple.h"
47 #include "flags.h"
48 #include "regs.h"
49 #include "except.h"
50 #include "insn-config.h"
51 #include "expr.h"
52 #include "optabs.h"
53 #include "libfuncs.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "typeclass.h"
57 #include "tm_p.h"
58 #include "target.h"
59 #include "langhooks.h"
60 #include "tree-ssanames.h"
61 #include "tree-dfa.h"
62 #include "value-prof.h"
63 #include "diagnostic-core.h"
64 #include "builtins.h"
65 #include "asan.h"
66 #include "ubsan.h"
67 #include "cilk.h"
68
69
70 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
71
72 struct target_builtins default_target_builtins;
73 #if SWITCHABLE_TARGET
74 struct target_builtins *this_target_builtins = &default_target_builtins;
75 #endif
76
77 /* Define the names of the builtin function types and codes. */
78 const char *const built_in_class_names[BUILT_IN_LAST]
79 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
80
81 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
82 const char * built_in_names[(int) END_BUILTINS] =
83 {
84 #include "builtins.def"
85 };
86 #undef DEF_BUILTIN
87
88 /* Setup an array of _DECL trees, make sure each element is
89 initialized to NULL_TREE. */
90 builtin_info_type builtin_info;
91
92 /* Non-zero if __builtin_constant_p should be folded right away. */
93 bool force_folding_builtin_constant_p;
94
95 static rtx c_readstr (const char *, machine_mode);
96 static int target_char_cast (tree, char *);
97 static rtx get_memory_rtx (tree, tree);
98 static int apply_args_size (void);
99 static int apply_result_size (void);
100 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
101 static rtx result_vector (int, rtx);
102 #endif
103 static void expand_builtin_update_setjmp_buf (rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static void expand_errno_check (tree, rtx);
112 static rtx expand_builtin_mathfn (tree, rtx, rtx);
113 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
114 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
115 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
116 static rtx expand_builtin_interclass_mathfn (tree, rtx);
117 static rtx expand_builtin_sincos (tree);
118 static rtx expand_builtin_cexpi (tree, rtx);
119 static rtx expand_builtin_int_roundingfn (tree, rtx);
120 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
121 static rtx expand_builtin_next_arg (void);
122 static rtx expand_builtin_va_start (tree);
123 static rtx expand_builtin_va_end (tree);
124 static rtx expand_builtin_va_copy (tree);
125 static rtx expand_builtin_memcmp (tree, rtx, machine_mode);
126 static rtx expand_builtin_strcmp (tree, rtx);
127 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
128 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
129 static rtx expand_builtin_memcpy (tree, rtx);
130 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
131 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
132 machine_mode, int);
133 static rtx expand_builtin_strcpy (tree, rtx);
134 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
135 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
136 static rtx expand_builtin_strncpy (tree, rtx);
137 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
138 static rtx expand_builtin_memset (tree, rtx, machine_mode);
139 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
140 static rtx expand_builtin_bzero (tree);
141 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
142 static rtx expand_builtin_alloca (tree, bool);
143 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
144 static rtx expand_builtin_frame_address (tree, tree);
145 static tree stabilize_va_list_loc (location_t, tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_classify_type (tree);
149 static tree fold_builtin_strlen (location_t, tree, tree);
150 static tree fold_builtin_inf (location_t, tree, int);
151 static tree fold_builtin_nan (tree, tree, int);
152 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
153 static bool validate_arg (const_tree, enum tree_code code);
154 static bool integer_valued_real_p (tree);
155 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
156 static rtx expand_builtin_fabs (tree, rtx, rtx);
157 static rtx expand_builtin_signbit (tree, rtx);
158 static tree fold_builtin_sqrt (location_t, tree, tree);
159 static tree fold_builtin_cbrt (location_t, tree, tree);
160 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
161 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
162 static tree fold_builtin_cos (location_t, tree, tree, tree);
163 static tree fold_builtin_cosh (location_t, tree, tree, tree);
164 static tree fold_builtin_tan (tree, tree);
165 static tree fold_builtin_trunc (location_t, tree, tree);
166 static tree fold_builtin_floor (location_t, tree, tree);
167 static tree fold_builtin_ceil (location_t, tree, tree);
168 static tree fold_builtin_round (location_t, tree, tree);
169 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
170 static tree fold_builtin_bitop (tree, tree);
171 static tree fold_builtin_strchr (location_t, tree, tree, tree);
172 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
173 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
174 static tree fold_builtin_strcmp (location_t, tree, tree);
175 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
176 static tree fold_builtin_signbit (location_t, tree, tree);
177 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
178 static tree fold_builtin_isascii (location_t, tree);
179 static tree fold_builtin_toascii (location_t, tree);
180 static tree fold_builtin_isdigit (location_t, tree);
181 static tree fold_builtin_fabs (location_t, tree, tree);
182 static tree fold_builtin_abs (location_t, tree, tree);
183 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
184 enum tree_code);
185 static tree fold_builtin_0 (location_t, tree, bool);
186 static tree fold_builtin_1 (location_t, tree, tree, bool);
187 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
188 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
189 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
190 static tree fold_builtin_varargs (location_t, tree, tree, bool);
191
192 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
193 static tree fold_builtin_strstr (location_t, tree, tree, tree);
194 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
195 static tree fold_builtin_strncat (location_t, tree, tree, tree);
196 static tree fold_builtin_strspn (location_t, tree, tree);
197 static tree fold_builtin_strcspn (location_t, tree, tree);
198
199 static rtx expand_builtin_object_size (tree);
200 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
201 enum built_in_function);
202 static void maybe_emit_chk_warning (tree, enum built_in_function);
203 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
204 static void maybe_emit_free_warning (tree);
205 static tree fold_builtin_object_size (tree, tree);
206 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
207 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
208 enum built_in_function);
209
210 static unsigned HOST_WIDE_INT target_newline;
211 unsigned HOST_WIDE_INT target_percent;
212 static unsigned HOST_WIDE_INT target_c;
213 static unsigned HOST_WIDE_INT target_s;
214 static char target_percent_c[3];
215 char target_percent_s[3];
216 static char target_percent_s_newline[4];
217 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
218 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
219 static tree do_mpfr_arg2 (tree, tree, tree,
220 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
221 static tree do_mpfr_arg3 (tree, tree, tree, tree,
222 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
223 static tree do_mpfr_sincos (tree, tree, tree);
224 static tree do_mpfr_bessel_n (tree, tree, tree,
225 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
226 const REAL_VALUE_TYPE *, bool);
227 static tree do_mpfr_remquo (tree, tree, tree);
228 static tree do_mpfr_lgamma_r (tree, tree, tree);
229 static void expand_builtin_sync_synchronize (void);
230
231 /* Return true if NAME starts with __builtin_ or __sync_. */
232
233 static bool
234 is_builtin_name (const char *name)
235 {
236 if (strncmp (name, "__builtin_", 10) == 0)
237 return true;
238 if (strncmp (name, "__sync_", 7) == 0)
239 return true;
240 if (strncmp (name, "__atomic_", 9) == 0)
241 return true;
242 if (flag_cilkplus
243 && (!strcmp (name, "__cilkrts_detach")
244 || !strcmp (name, "__cilkrts_pop_frame")))
245 return true;
246 return false;
247 }
248
249
250 /* Return true if DECL is a function symbol representing a built-in. */
251
252 bool
253 is_builtin_fn (tree decl)
254 {
255 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
256 }
257
258 /* Return true if NODE should be considered for inline expansion regardless
259 of the optimization level. This means whenever a function is invoked with
260 its "internal" name, which normally contains the prefix "__builtin". */
261
262 static bool
263 called_as_built_in (tree node)
264 {
265 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
266 we want the name used to call the function, not the name it
267 will have. */
268 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
269 return is_builtin_name (name);
270 }
271
272 /* Compute values M and N such that M divides (address of EXP - N) and such
273 that N < M. If these numbers can be determined, store M in alignp and N in
274 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
275 *alignp and any bit-offset to *bitposp.
276
277 Note that the address (and thus the alignment) computed here is based
278 on the address to which a symbol resolves, whereas DECL_ALIGN is based
279 on the address at which an object is actually located. These two
280 addresses are not always the same. For example, on ARM targets,
281 the address &foo of a Thumb function foo() has the lowest bit set,
282 whereas foo() itself starts on an even address.
283
284 If ADDR_P is true we are taking the address of the memory reference EXP
285 and thus cannot rely on the access taking place. */
286
287 static bool
288 get_object_alignment_2 (tree exp, unsigned int *alignp,
289 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
290 {
291 HOST_WIDE_INT bitsize, bitpos;
292 tree offset;
293 machine_mode mode;
294 int unsignedp, volatilep;
295 unsigned int align = BITS_PER_UNIT;
296 bool known_alignment = false;
297
298 /* Get the innermost object and the constant (bitpos) and possibly
299 variable (offset) offset of the access. */
300 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
301 &mode, &unsignedp, &volatilep, true);
302
303 /* Extract alignment information from the innermost object and
304 possibly adjust bitpos and offset. */
305 if (TREE_CODE (exp) == FUNCTION_DECL)
306 {
307 /* Function addresses can encode extra information besides their
308 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
309 allows the low bit to be used as a virtual bit, we know
310 that the address itself must be at least 2-byte aligned. */
311 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
312 align = 2 * BITS_PER_UNIT;
313 }
314 else if (TREE_CODE (exp) == LABEL_DECL)
315 ;
316 else if (TREE_CODE (exp) == CONST_DECL)
317 {
318 /* The alignment of a CONST_DECL is determined by its initializer. */
319 exp = DECL_INITIAL (exp);
320 align = TYPE_ALIGN (TREE_TYPE (exp));
321 #ifdef CONSTANT_ALIGNMENT
322 if (CONSTANT_CLASS_P (exp))
323 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
324 #endif
325 known_alignment = true;
326 }
327 else if (DECL_P (exp))
328 {
329 align = DECL_ALIGN (exp);
330 known_alignment = true;
331 }
332 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
333 {
334 align = TYPE_ALIGN (TREE_TYPE (exp));
335 }
336 else if (TREE_CODE (exp) == INDIRECT_REF
337 || TREE_CODE (exp) == MEM_REF
338 || TREE_CODE (exp) == TARGET_MEM_REF)
339 {
340 tree addr = TREE_OPERAND (exp, 0);
341 unsigned ptr_align;
342 unsigned HOST_WIDE_INT ptr_bitpos;
343
344 if (TREE_CODE (addr) == BIT_AND_EXPR
345 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
346 {
347 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
348 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
349 align *= BITS_PER_UNIT;
350 addr = TREE_OPERAND (addr, 0);
351 }
352
353 known_alignment
354 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
355 align = MAX (ptr_align, align);
356
357 /* The alignment of the pointer operand in a TARGET_MEM_REF
358 has to take the variable offset parts into account. */
359 if (TREE_CODE (exp) == TARGET_MEM_REF)
360 {
361 if (TMR_INDEX (exp))
362 {
363 unsigned HOST_WIDE_INT step = 1;
364 if (TMR_STEP (exp))
365 step = TREE_INT_CST_LOW (TMR_STEP (exp));
366 align = MIN (align, (step & -step) * BITS_PER_UNIT);
367 }
368 if (TMR_INDEX2 (exp))
369 align = BITS_PER_UNIT;
370 known_alignment = false;
371 }
372
373 /* When EXP is an actual memory reference then we can use
374 TYPE_ALIGN of a pointer indirection to derive alignment.
375 Do so only if get_pointer_alignment_1 did not reveal absolute
376 alignment knowledge and if using that alignment would
377 improve the situation. */
378 if (!addr_p && !known_alignment
379 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
380 align = TYPE_ALIGN (TREE_TYPE (exp));
381 else
382 {
383 /* Else adjust bitpos accordingly. */
384 bitpos += ptr_bitpos;
385 if (TREE_CODE (exp) == MEM_REF
386 || TREE_CODE (exp) == TARGET_MEM_REF)
387 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
388 }
389 }
390 else if (TREE_CODE (exp) == STRING_CST)
391 {
392 /* STRING_CST are the only constant objects we allow to be not
393 wrapped inside a CONST_DECL. */
394 align = TYPE_ALIGN (TREE_TYPE (exp));
395 #ifdef CONSTANT_ALIGNMENT
396 if (CONSTANT_CLASS_P (exp))
397 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
398 #endif
399 known_alignment = true;
400 }
401
402 /* If there is a non-constant offset part extract the maximum
403 alignment that can prevail. */
404 if (offset)
405 {
406 unsigned int trailing_zeros = tree_ctz (offset);
407 if (trailing_zeros < HOST_BITS_PER_INT)
408 {
409 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
410 if (inner)
411 align = MIN (align, inner);
412 }
413 }
414
415 *alignp = align;
416 *bitposp = bitpos & (*alignp - 1);
417 return known_alignment;
418 }
419
420 /* For a memory reference expression EXP compute values M and N such that M
421 divides (&EXP - N) and such that N < M. If these numbers can be determined,
422 store M in alignp and N in *BITPOSP and return true. Otherwise return false
423 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
424
425 bool
426 get_object_alignment_1 (tree exp, unsigned int *alignp,
427 unsigned HOST_WIDE_INT *bitposp)
428 {
429 return get_object_alignment_2 (exp, alignp, bitposp, false);
430 }
431
432 /* Return the alignment in bits of EXP, an object. */
433
434 unsigned int
435 get_object_alignment (tree exp)
436 {
437 unsigned HOST_WIDE_INT bitpos = 0;
438 unsigned int align;
439
440 get_object_alignment_1 (exp, &align, &bitpos);
441
442 /* align and bitpos now specify known low bits of the pointer.
443 ptr & (align - 1) == bitpos. */
444
445 if (bitpos != 0)
446 align = (bitpos & -bitpos);
447 return align;
448 }
449
450 /* For a pointer valued expression EXP compute values M and N such that M
451 divides (EXP - N) and such that N < M. If these numbers can be determined,
452 store M in alignp and N in *BITPOSP and return true. Return false if
453 the results are just a conservative approximation.
454
455 If EXP is not a pointer, false is returned too. */
456
457 bool
458 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
459 unsigned HOST_WIDE_INT *bitposp)
460 {
461 STRIP_NOPS (exp);
462
463 if (TREE_CODE (exp) == ADDR_EXPR)
464 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
465 alignp, bitposp, true);
466 else if (TREE_CODE (exp) == SSA_NAME
467 && POINTER_TYPE_P (TREE_TYPE (exp)))
468 {
469 unsigned int ptr_align, ptr_misalign;
470 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
471
472 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
473 {
474 *bitposp = ptr_misalign * BITS_PER_UNIT;
475 *alignp = ptr_align * BITS_PER_UNIT;
476 /* We cannot really tell whether this result is an approximation. */
477 return true;
478 }
479 else
480 {
481 *bitposp = 0;
482 *alignp = BITS_PER_UNIT;
483 return false;
484 }
485 }
486 else if (TREE_CODE (exp) == INTEGER_CST)
487 {
488 *alignp = BIGGEST_ALIGNMENT;
489 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
490 & (BIGGEST_ALIGNMENT - 1));
491 return true;
492 }
493
494 *bitposp = 0;
495 *alignp = BITS_PER_UNIT;
496 return false;
497 }
498
499 /* Return the alignment in bits of EXP, a pointer valued expression.
500 The alignment returned is, by default, the alignment of the thing that
501 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
502
503 Otherwise, look at the expression to see if we can do better, i.e., if the
504 expression is actually pointing at an object whose alignment is tighter. */
505
506 unsigned int
507 get_pointer_alignment (tree exp)
508 {
509 unsigned HOST_WIDE_INT bitpos = 0;
510 unsigned int align;
511
512 get_pointer_alignment_1 (exp, &align, &bitpos);
513
514 /* align and bitpos now specify known low bits of the pointer.
515 ptr & (align - 1) == bitpos. */
516
517 if (bitpos != 0)
518 align = (bitpos & -bitpos);
519
520 return align;
521 }
522
523 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
524 way, because it could contain a zero byte in the middle.
525 TREE_STRING_LENGTH is the size of the character array, not the string.
526
527 ONLY_VALUE should be nonzero if the result is not going to be emitted
528 into the instruction stream and zero if it is going to be expanded.
529 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
530 is returned, otherwise NULL, since
531 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
532 evaluate the side-effects.
533
534 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
535 accesses. Note that this implies the result is not going to be emitted
536 into the instruction stream.
537
538 The value returned is of type `ssizetype'.
539
540 Unfortunately, string_constant can't access the values of const char
541 arrays with initializers, so neither can we do so here. */
542
543 tree
544 c_strlen (tree src, int only_value)
545 {
546 tree offset_node;
547 HOST_WIDE_INT offset;
548 int max;
549 const char *ptr;
550 location_t loc;
551
552 STRIP_NOPS (src);
553 if (TREE_CODE (src) == COND_EXPR
554 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
555 {
556 tree len1, len2;
557
558 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
559 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
560 if (tree_int_cst_equal (len1, len2))
561 return len1;
562 }
563
564 if (TREE_CODE (src) == COMPOUND_EXPR
565 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
566 return c_strlen (TREE_OPERAND (src, 1), only_value);
567
568 loc = EXPR_LOC_OR_LOC (src, input_location);
569
570 src = string_constant (src, &offset_node);
571 if (src == 0)
572 return NULL_TREE;
573
574 max = TREE_STRING_LENGTH (src) - 1;
575 ptr = TREE_STRING_POINTER (src);
576
577 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
578 {
579 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
580 compute the offset to the following null if we don't know where to
581 start searching for it. */
582 int i;
583
584 for (i = 0; i < max; i++)
585 if (ptr[i] == 0)
586 return NULL_TREE;
587
588 /* We don't know the starting offset, but we do know that the string
589 has no internal zero bytes. We can assume that the offset falls
590 within the bounds of the string; otherwise, the programmer deserves
591 what he gets. Subtract the offset from the length of the string,
592 and return that. This would perhaps not be valid if we were dealing
593 with named arrays in addition to literal string constants. */
594
595 return size_diffop_loc (loc, size_int (max), offset_node);
596 }
597
598 /* We have a known offset into the string. Start searching there for
599 a null character if we can represent it as a single HOST_WIDE_INT. */
600 if (offset_node == 0)
601 offset = 0;
602 else if (! tree_fits_shwi_p (offset_node))
603 offset = -1;
604 else
605 offset = tree_to_shwi (offset_node);
606
607 /* If the offset is known to be out of bounds, warn, and call strlen at
608 runtime. */
609 if (offset < 0 || offset > max)
610 {
611 /* Suppress multiple warnings for propagated constant strings. */
612 if (only_value != 2
613 && !TREE_NO_WARNING (src))
614 {
615 warning_at (loc, 0, "offset outside bounds of constant string");
616 TREE_NO_WARNING (src) = 1;
617 }
618 return NULL_TREE;
619 }
620
621 /* Use strlen to search for the first zero byte. Since any strings
622 constructed with build_string will have nulls appended, we win even
623 if we get handed something like (char[4])"abcd".
624
625 Since OFFSET is our starting index into the string, no further
626 calculation is needed. */
627 return ssize_int (strlen (ptr + offset));
628 }
629
630 /* Return a char pointer for a C string if it is a string constant
631 or sum of string constant and integer constant. */
632
633 const char *
634 c_getstr (tree src)
635 {
636 tree offset_node;
637
638 src = string_constant (src, &offset_node);
639 if (src == 0)
640 return 0;
641
642 if (offset_node == 0)
643 return TREE_STRING_POINTER (src);
644 else if (!tree_fits_uhwi_p (offset_node)
645 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
646 return 0;
647
648 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
649 }
650
651 /* Return a constant integer corresponding to target reading
652 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
653
654 static rtx
655 c_readstr (const char *str, machine_mode mode)
656 {
657 HOST_WIDE_INT ch;
658 unsigned int i, j;
659 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
660
661 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
662 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
663 / HOST_BITS_PER_WIDE_INT;
664
665 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
666 for (i = 0; i < len; i++)
667 tmp[i] = 0;
668
669 ch = 1;
670 for (i = 0; i < GET_MODE_SIZE (mode); i++)
671 {
672 j = i;
673 if (WORDS_BIG_ENDIAN)
674 j = GET_MODE_SIZE (mode) - i - 1;
675 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
676 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
677 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
678 j *= BITS_PER_UNIT;
679
680 if (ch)
681 ch = (unsigned char) str[i];
682 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
683 }
684
685 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
686 return immed_wide_int_const (c, mode);
687 }
688
689 /* Cast a target constant CST to target CHAR and if that value fits into
690 host char type, return zero and put that value into variable pointed to by
691 P. */
692
693 static int
694 target_char_cast (tree cst, char *p)
695 {
696 unsigned HOST_WIDE_INT val, hostval;
697
698 if (TREE_CODE (cst) != INTEGER_CST
699 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
700 return 1;
701
702 /* Do not care if it fits or not right here. */
703 val = TREE_INT_CST_LOW (cst);
704
705 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
706 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
707
708 hostval = val;
709 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
710 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
711
712 if (val != hostval)
713 return 1;
714
715 *p = hostval;
716 return 0;
717 }
718
719 /* Similar to save_expr, but assumes that arbitrary code is not executed
720 in between the multiple evaluations. In particular, we assume that a
721 non-addressable local variable will not be modified. */
722
723 static tree
724 builtin_save_expr (tree exp)
725 {
726 if (TREE_CODE (exp) == SSA_NAME
727 || (TREE_ADDRESSABLE (exp) == 0
728 && (TREE_CODE (exp) == PARM_DECL
729 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
730 return exp;
731
732 return save_expr (exp);
733 }
734
735 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
736 times to get the address of either a higher stack frame, or a return
737 address located within it (depending on FNDECL_CODE). */
738
739 static rtx
740 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
741 {
742 int i;
743
744 #ifdef INITIAL_FRAME_ADDRESS_RTX
745 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
746 #else
747 rtx tem;
748
749 /* For a zero count with __builtin_return_address, we don't care what
750 frame address we return, because target-specific definitions will
751 override us. Therefore frame pointer elimination is OK, and using
752 the soft frame pointer is OK.
753
754 For a nonzero count, or a zero count with __builtin_frame_address,
755 we require a stable offset from the current frame pointer to the
756 previous one, so we must use the hard frame pointer, and
757 we must disable frame pointer elimination. */
758 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
759 tem = frame_pointer_rtx;
760 else
761 {
762 tem = hard_frame_pointer_rtx;
763
764 /* Tell reload not to eliminate the frame pointer. */
765 crtl->accesses_prior_frames = 1;
766 }
767 #endif
768
769 /* Some machines need special handling before we can access
770 arbitrary frames. For example, on the SPARC, we must first flush
771 all register windows to the stack. */
772 #ifdef SETUP_FRAME_ADDRESSES
773 if (count > 0)
774 SETUP_FRAME_ADDRESSES ();
775 #endif
776
777 /* On the SPARC, the return address is not in the frame, it is in a
778 register. There is no way to access it off of the current frame
779 pointer, but it can be accessed off the previous frame pointer by
780 reading the value from the register window save area. */
781 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
782 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
783 count--;
784 #endif
785
786 /* Scan back COUNT frames to the specified frame. */
787 for (i = 0; i < count; i++)
788 {
789 /* Assume the dynamic chain pointer is in the word that the
790 frame address points to, unless otherwise specified. */
791 #ifdef DYNAMIC_CHAIN_ADDRESS
792 tem = DYNAMIC_CHAIN_ADDRESS (tem);
793 #endif
794 tem = memory_address (Pmode, tem);
795 tem = gen_frame_mem (Pmode, tem);
796 tem = copy_to_reg (tem);
797 }
798
799 /* For __builtin_frame_address, return what we've got. But, on
800 the SPARC for example, we may have to add a bias. */
801 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
802 #ifdef FRAME_ADDR_RTX
803 return FRAME_ADDR_RTX (tem);
804 #else
805 return tem;
806 #endif
807
808 /* For __builtin_return_address, get the return address from that frame. */
809 #ifdef RETURN_ADDR_RTX
810 tem = RETURN_ADDR_RTX (count, tem);
811 #else
812 tem = memory_address (Pmode,
813 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
814 tem = gen_frame_mem (Pmode, tem);
815 #endif
816 return tem;
817 }
818
819 /* Alias set used for setjmp buffer. */
820 static alias_set_type setjmp_alias_set = -1;
821
822 /* Construct the leading half of a __builtin_setjmp call. Control will
823 return to RECEIVER_LABEL. This is also called directly by the SJLJ
824 exception handling code. */
825
826 void
827 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
828 {
829 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
830 rtx stack_save;
831 rtx mem;
832
833 if (setjmp_alias_set == -1)
834 setjmp_alias_set = new_alias_set ();
835
836 buf_addr = convert_memory_address (Pmode, buf_addr);
837
838 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
839
840 /* We store the frame pointer and the address of receiver_label in
841 the buffer and use the rest of it for the stack save area, which
842 is machine-dependent. */
843
844 mem = gen_rtx_MEM (Pmode, buf_addr);
845 set_mem_alias_set (mem, setjmp_alias_set);
846 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
847
848 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
849 GET_MODE_SIZE (Pmode))),
850 set_mem_alias_set (mem, setjmp_alias_set);
851
852 emit_move_insn (validize_mem (mem),
853 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
854
855 stack_save = gen_rtx_MEM (sa_mode,
856 plus_constant (Pmode, buf_addr,
857 2 * GET_MODE_SIZE (Pmode)));
858 set_mem_alias_set (stack_save, setjmp_alias_set);
859 emit_stack_save (SAVE_NONLOCAL, &stack_save);
860
861 /* If there is further processing to do, do it. */
862 #ifdef HAVE_builtin_setjmp_setup
863 if (HAVE_builtin_setjmp_setup)
864 emit_insn (gen_builtin_setjmp_setup (buf_addr));
865 #endif
866
867 /* We have a nonlocal label. */
868 cfun->has_nonlocal_label = 1;
869 }
870
871 /* Construct the trailing part of a __builtin_setjmp call. This is
872 also called directly by the SJLJ exception handling code.
873 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
874
875 void
876 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
877 {
878 rtx chain;
879
880 /* Mark the FP as used when we get here, so we have to make sure it's
881 marked as used by this function. */
882 emit_use (hard_frame_pointer_rtx);
883
884 /* Mark the static chain as clobbered here so life information
885 doesn't get messed up for it. */
886 chain = targetm.calls.static_chain (current_function_decl, true);
887 if (chain && REG_P (chain))
888 emit_clobber (chain);
889
890 /* Now put in the code to restore the frame pointer, and argument
891 pointer, if needed. */
892 #ifdef HAVE_nonlocal_goto
893 if (! HAVE_nonlocal_goto)
894 #endif
895 {
896 /* First adjust our frame pointer to its actual value. It was
897 previously set to the start of the virtual area corresponding to
898 the stacked variables when we branched here and now needs to be
899 adjusted to the actual hardware fp value.
900
901 Assignments to virtual registers are converted by
902 instantiate_virtual_regs into the corresponding assignment
903 to the underlying register (fp in this case) that makes
904 the original assignment true.
905 So the following insn will actually be decrementing fp by
906 STARTING_FRAME_OFFSET. */
907 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
908
909 /* Restoring the frame pointer also modifies the hard frame pointer.
910 Mark it used (so that the previous assignment remains live once
911 the frame pointer is eliminated) and clobbered (to represent the
912 implicit update from the assignment). */
913 emit_use (hard_frame_pointer_rtx);
914 emit_clobber (hard_frame_pointer_rtx);
915 }
916
917 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
918 if (fixed_regs[ARG_POINTER_REGNUM])
919 {
920 #ifdef ELIMINABLE_REGS
921 /* If the argument pointer can be eliminated in favor of the
922 frame pointer, we don't need to restore it. We assume here
923 that if such an elimination is present, it can always be used.
924 This is the case on all known machines; if we don't make this
925 assumption, we do unnecessary saving on many machines. */
926 size_t i;
927 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
928
929 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
930 if (elim_regs[i].from == ARG_POINTER_REGNUM
931 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
932 break;
933
934 if (i == ARRAY_SIZE (elim_regs))
935 #endif
936 {
937 /* Now restore our arg pointer from the address at which it
938 was saved in our stack frame. */
939 emit_move_insn (crtl->args.internal_arg_pointer,
940 copy_to_reg (get_arg_pointer_save_area ()));
941 }
942 }
943 #endif
944
945 #ifdef HAVE_builtin_setjmp_receiver
946 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
947 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
948 else
949 #endif
950 #ifdef HAVE_nonlocal_goto_receiver
951 if (HAVE_nonlocal_goto_receiver)
952 emit_insn (gen_nonlocal_goto_receiver ());
953 else
954 #endif
955 { /* Nothing */ }
956
957 /* We must not allow the code we just generated to be reordered by
958 scheduling. Specifically, the update of the frame pointer must
959 happen immediately, not later. */
960 emit_insn (gen_blockage ());
961 }
962
963 /* __builtin_longjmp is passed a pointer to an array of five words (not
964 all will be used on all machines). It operates similarly to the C
965 library function of the same name, but is more efficient. Much of
966 the code below is copied from the handling of non-local gotos. */
967
968 static void
969 expand_builtin_longjmp (rtx buf_addr, rtx value)
970 {
971 rtx fp, lab, stack;
972 rtx_insn *insn, *last;
973 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
974
975 /* DRAP is needed for stack realign if longjmp is expanded to current
976 function */
977 if (SUPPORTS_STACK_ALIGNMENT)
978 crtl->need_drap = true;
979
980 if (setjmp_alias_set == -1)
981 setjmp_alias_set = new_alias_set ();
982
983 buf_addr = convert_memory_address (Pmode, buf_addr);
984
985 buf_addr = force_reg (Pmode, buf_addr);
986
987 /* We require that the user must pass a second argument of 1, because
988 that is what builtin_setjmp will return. */
989 gcc_assert (value == const1_rtx);
990
991 last = get_last_insn ();
992 #ifdef HAVE_builtin_longjmp
993 if (HAVE_builtin_longjmp)
994 emit_insn (gen_builtin_longjmp (buf_addr));
995 else
996 #endif
997 {
998 fp = gen_rtx_MEM (Pmode, buf_addr);
999 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1000 GET_MODE_SIZE (Pmode)));
1001
1002 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1003 2 * GET_MODE_SIZE (Pmode)));
1004 set_mem_alias_set (fp, setjmp_alias_set);
1005 set_mem_alias_set (lab, setjmp_alias_set);
1006 set_mem_alias_set (stack, setjmp_alias_set);
1007
1008 /* Pick up FP, label, and SP from the block and jump. This code is
1009 from expand_goto in stmt.c; see there for detailed comments. */
1010 #ifdef HAVE_nonlocal_goto
1011 if (HAVE_nonlocal_goto)
1012 /* We have to pass a value to the nonlocal_goto pattern that will
1013 get copied into the static_chain pointer, but it does not matter
1014 what that value is, because builtin_setjmp does not use it. */
1015 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1016 else
1017 #endif
1018 {
1019 lab = copy_to_reg (lab);
1020
1021 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1022 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1023
1024 emit_move_insn (hard_frame_pointer_rtx, fp);
1025 emit_stack_restore (SAVE_NONLOCAL, stack);
1026
1027 emit_use (hard_frame_pointer_rtx);
1028 emit_use (stack_pointer_rtx);
1029 emit_indirect_jump (lab);
1030 }
1031 }
1032
1033 /* Search backwards and mark the jump insn as a non-local goto.
1034 Note that this precludes the use of __builtin_longjmp to a
1035 __builtin_setjmp target in the same function. However, we've
1036 already cautioned the user that these functions are for
1037 internal exception handling use only. */
1038 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1039 {
1040 gcc_assert (insn != last);
1041
1042 if (JUMP_P (insn))
1043 {
1044 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1045 break;
1046 }
1047 else if (CALL_P (insn))
1048 break;
1049 }
1050 }
1051
1052 static inline bool
1053 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1054 {
1055 return (iter->i < iter->n);
1056 }
1057
1058 /* This function validates the types of a function call argument list
1059 against a specified list of tree_codes. If the last specifier is a 0,
1060 that represents an ellipses, otherwise the last specifier must be a
1061 VOID_TYPE. */
1062
1063 static bool
1064 validate_arglist (const_tree callexpr, ...)
1065 {
1066 enum tree_code code;
1067 bool res = 0;
1068 va_list ap;
1069 const_call_expr_arg_iterator iter;
1070 const_tree arg;
1071
1072 va_start (ap, callexpr);
1073 init_const_call_expr_arg_iterator (callexpr, &iter);
1074
1075 do
1076 {
1077 code = (enum tree_code) va_arg (ap, int);
1078 switch (code)
1079 {
1080 case 0:
1081 /* This signifies an ellipses, any further arguments are all ok. */
1082 res = true;
1083 goto end;
1084 case VOID_TYPE:
1085 /* This signifies an endlink, if no arguments remain, return
1086 true, otherwise return false. */
1087 res = !more_const_call_expr_args_p (&iter);
1088 goto end;
1089 default:
1090 /* If no parameters remain or the parameter's code does not
1091 match the specified code, return false. Otherwise continue
1092 checking any remaining arguments. */
1093 arg = next_const_call_expr_arg (&iter);
1094 if (!validate_arg (arg, code))
1095 goto end;
1096 break;
1097 }
1098 }
1099 while (1);
1100
1101 /* We need gotos here since we can only have one VA_CLOSE in a
1102 function. */
1103 end: ;
1104 va_end (ap);
1105
1106 return res;
1107 }
1108
1109 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1110 and the address of the save area. */
1111
1112 static rtx
1113 expand_builtin_nonlocal_goto (tree exp)
1114 {
1115 tree t_label, t_save_area;
1116 rtx r_label, r_save_area, r_fp, r_sp;
1117 rtx_insn *insn;
1118
1119 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1120 return NULL_RTX;
1121
1122 t_label = CALL_EXPR_ARG (exp, 0);
1123 t_save_area = CALL_EXPR_ARG (exp, 1);
1124
1125 r_label = expand_normal (t_label);
1126 r_label = convert_memory_address (Pmode, r_label);
1127 r_save_area = expand_normal (t_save_area);
1128 r_save_area = convert_memory_address (Pmode, r_save_area);
1129 /* Copy the address of the save location to a register just in case it was
1130 based on the frame pointer. */
1131 r_save_area = copy_to_reg (r_save_area);
1132 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1133 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1134 plus_constant (Pmode, r_save_area,
1135 GET_MODE_SIZE (Pmode)));
1136
1137 crtl->has_nonlocal_goto = 1;
1138
1139 #ifdef HAVE_nonlocal_goto
1140 /* ??? We no longer need to pass the static chain value, afaik. */
1141 if (HAVE_nonlocal_goto)
1142 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1143 else
1144 #endif
1145 {
1146 r_label = copy_to_reg (r_label);
1147
1148 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1149 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1150
1151 /* Restore frame pointer for containing function. */
1152 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1153 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1154
1155 /* USE of hard_frame_pointer_rtx added for consistency;
1156 not clear if really needed. */
1157 emit_use (hard_frame_pointer_rtx);
1158 emit_use (stack_pointer_rtx);
1159
1160 /* If the architecture is using a GP register, we must
1161 conservatively assume that the target function makes use of it.
1162 The prologue of functions with nonlocal gotos must therefore
1163 initialize the GP register to the appropriate value, and we
1164 must then make sure that this value is live at the point
1165 of the jump. (Note that this doesn't necessarily apply
1166 to targets with a nonlocal_goto pattern; they are free
1167 to implement it in their own way. Note also that this is
1168 a no-op if the GP register is a global invariant.) */
1169 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1170 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1171 emit_use (pic_offset_table_rtx);
1172
1173 emit_indirect_jump (r_label);
1174 }
1175
1176 /* Search backwards to the jump insn and mark it as a
1177 non-local goto. */
1178 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1179 {
1180 if (JUMP_P (insn))
1181 {
1182 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1183 break;
1184 }
1185 else if (CALL_P (insn))
1186 break;
1187 }
1188
1189 return const0_rtx;
1190 }
1191
1192 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1193 (not all will be used on all machines) that was passed to __builtin_setjmp.
1194 It updates the stack pointer in that block to correspond to the current
1195 stack pointer. */
1196
1197 static void
1198 expand_builtin_update_setjmp_buf (rtx buf_addr)
1199 {
1200 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1201 rtx stack_save
1202 = gen_rtx_MEM (sa_mode,
1203 memory_address
1204 (sa_mode,
1205 plus_constant (Pmode, buf_addr,
1206 2 * GET_MODE_SIZE (Pmode))));
1207
1208 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1209 }
1210
1211 /* Expand a call to __builtin_prefetch. For a target that does not support
1212 data prefetch, evaluate the memory address argument in case it has side
1213 effects. */
1214
1215 static void
1216 expand_builtin_prefetch (tree exp)
1217 {
1218 tree arg0, arg1, arg2;
1219 int nargs;
1220 rtx op0, op1, op2;
1221
1222 if (!validate_arglist (exp, POINTER_TYPE, 0))
1223 return;
1224
1225 arg0 = CALL_EXPR_ARG (exp, 0);
1226
1227 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1228 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1229 locality). */
1230 nargs = call_expr_nargs (exp);
1231 if (nargs > 1)
1232 arg1 = CALL_EXPR_ARG (exp, 1);
1233 else
1234 arg1 = integer_zero_node;
1235 if (nargs > 2)
1236 arg2 = CALL_EXPR_ARG (exp, 2);
1237 else
1238 arg2 = integer_three_node;
1239
1240 /* Argument 0 is an address. */
1241 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1242
1243 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1244 if (TREE_CODE (arg1) != INTEGER_CST)
1245 {
1246 error ("second argument to %<__builtin_prefetch%> must be a constant");
1247 arg1 = integer_zero_node;
1248 }
1249 op1 = expand_normal (arg1);
1250 /* Argument 1 must be either zero or one. */
1251 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1252 {
1253 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1254 " using zero");
1255 op1 = const0_rtx;
1256 }
1257
1258 /* Argument 2 (locality) must be a compile-time constant int. */
1259 if (TREE_CODE (arg2) != INTEGER_CST)
1260 {
1261 error ("third argument to %<__builtin_prefetch%> must be a constant");
1262 arg2 = integer_zero_node;
1263 }
1264 op2 = expand_normal (arg2);
1265 /* Argument 2 must be 0, 1, 2, or 3. */
1266 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1267 {
1268 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1269 op2 = const0_rtx;
1270 }
1271
1272 #ifdef HAVE_prefetch
1273 if (HAVE_prefetch)
1274 {
1275 struct expand_operand ops[3];
1276
1277 create_address_operand (&ops[0], op0);
1278 create_integer_operand (&ops[1], INTVAL (op1));
1279 create_integer_operand (&ops[2], INTVAL (op2));
1280 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1281 return;
1282 }
1283 #endif
1284
1285 /* Don't do anything with direct references to volatile memory, but
1286 generate code to handle other side effects. */
1287 if (!MEM_P (op0) && side_effects_p (op0))
1288 emit_insn (op0);
1289 }
1290
1291 /* Get a MEM rtx for expression EXP which is the address of an operand
1292 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1293 the maximum length of the block of memory that might be accessed or
1294 NULL if unknown. */
1295
1296 static rtx
1297 get_memory_rtx (tree exp, tree len)
1298 {
1299 tree orig_exp = exp;
1300 rtx addr, mem;
1301
1302 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1303 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1304 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1305 exp = TREE_OPERAND (exp, 0);
1306
1307 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1308 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1309
1310 /* Get an expression we can use to find the attributes to assign to MEM.
1311 First remove any nops. */
1312 while (CONVERT_EXPR_P (exp)
1313 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1314 exp = TREE_OPERAND (exp, 0);
1315
1316 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1317 (as builtin stringops may alias with anything). */
1318 exp = fold_build2 (MEM_REF,
1319 build_array_type (char_type_node,
1320 build_range_type (sizetype,
1321 size_one_node, len)),
1322 exp, build_int_cst (ptr_type_node, 0));
1323
1324 /* If the MEM_REF has no acceptable address, try to get the base object
1325 from the original address we got, and build an all-aliasing
1326 unknown-sized access to that one. */
1327 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1328 set_mem_attributes (mem, exp, 0);
1329 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1330 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1331 0))))
1332 {
1333 exp = build_fold_addr_expr (exp);
1334 exp = fold_build2 (MEM_REF,
1335 build_array_type (char_type_node,
1336 build_range_type (sizetype,
1337 size_zero_node,
1338 NULL)),
1339 exp, build_int_cst (ptr_type_node, 0));
1340 set_mem_attributes (mem, exp, 0);
1341 }
1342 set_mem_alias_set (mem, 0);
1343 return mem;
1344 }
1345 \f
1346 /* Built-in functions to perform an untyped call and return. */
1347
1348 #define apply_args_mode \
1349 (this_target_builtins->x_apply_args_mode)
1350 #define apply_result_mode \
1351 (this_target_builtins->x_apply_result_mode)
1352
1353 /* Return the size required for the block returned by __builtin_apply_args,
1354 and initialize apply_args_mode. */
1355
1356 static int
1357 apply_args_size (void)
1358 {
1359 static int size = -1;
1360 int align;
1361 unsigned int regno;
1362 machine_mode mode;
1363
1364 /* The values computed by this function never change. */
1365 if (size < 0)
1366 {
1367 /* The first value is the incoming arg-pointer. */
1368 size = GET_MODE_SIZE (Pmode);
1369
1370 /* The second value is the structure value address unless this is
1371 passed as an "invisible" first argument. */
1372 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1373 size += GET_MODE_SIZE (Pmode);
1374
1375 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1376 if (FUNCTION_ARG_REGNO_P (regno))
1377 {
1378 mode = targetm.calls.get_raw_arg_mode (regno);
1379
1380 gcc_assert (mode != VOIDmode);
1381
1382 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1383 if (size % align != 0)
1384 size = CEIL (size, align) * align;
1385 size += GET_MODE_SIZE (mode);
1386 apply_args_mode[regno] = mode;
1387 }
1388 else
1389 {
1390 apply_args_mode[regno] = VOIDmode;
1391 }
1392 }
1393 return size;
1394 }
1395
1396 /* Return the size required for the block returned by __builtin_apply,
1397 and initialize apply_result_mode. */
1398
1399 static int
1400 apply_result_size (void)
1401 {
1402 static int size = -1;
1403 int align, regno;
1404 machine_mode mode;
1405
1406 /* The values computed by this function never change. */
1407 if (size < 0)
1408 {
1409 size = 0;
1410
1411 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1412 if (targetm.calls.function_value_regno_p (regno))
1413 {
1414 mode = targetm.calls.get_raw_result_mode (regno);
1415
1416 gcc_assert (mode != VOIDmode);
1417
1418 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1419 if (size % align != 0)
1420 size = CEIL (size, align) * align;
1421 size += GET_MODE_SIZE (mode);
1422 apply_result_mode[regno] = mode;
1423 }
1424 else
1425 apply_result_mode[regno] = VOIDmode;
1426
1427 /* Allow targets that use untyped_call and untyped_return to override
1428 the size so that machine-specific information can be stored here. */
1429 #ifdef APPLY_RESULT_SIZE
1430 size = APPLY_RESULT_SIZE;
1431 #endif
1432 }
1433 return size;
1434 }
1435
1436 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1437 /* Create a vector describing the result block RESULT. If SAVEP is true,
1438 the result block is used to save the values; otherwise it is used to
1439 restore the values. */
1440
1441 static rtx
1442 result_vector (int savep, rtx result)
1443 {
1444 int regno, size, align, nelts;
1445 machine_mode mode;
1446 rtx reg, mem;
1447 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1448
1449 size = nelts = 0;
1450 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1451 if ((mode = apply_result_mode[regno]) != VOIDmode)
1452 {
1453 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1454 if (size % align != 0)
1455 size = CEIL (size, align) * align;
1456 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1457 mem = adjust_address (result, mode, size);
1458 savevec[nelts++] = (savep
1459 ? gen_rtx_SET (VOIDmode, mem, reg)
1460 : gen_rtx_SET (VOIDmode, reg, mem));
1461 size += GET_MODE_SIZE (mode);
1462 }
1463 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1464 }
1465 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1466
1467 /* Save the state required to perform an untyped call with the same
1468 arguments as were passed to the current function. */
1469
1470 static rtx
1471 expand_builtin_apply_args_1 (void)
1472 {
1473 rtx registers, tem;
1474 int size, align, regno;
1475 machine_mode mode;
1476 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1477
1478 /* Create a block where the arg-pointer, structure value address,
1479 and argument registers can be saved. */
1480 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1481
1482 /* Walk past the arg-pointer and structure value address. */
1483 size = GET_MODE_SIZE (Pmode);
1484 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1485 size += GET_MODE_SIZE (Pmode);
1486
1487 /* Save each register used in calling a function to the block. */
1488 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1489 if ((mode = apply_args_mode[regno]) != VOIDmode)
1490 {
1491 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1492 if (size % align != 0)
1493 size = CEIL (size, align) * align;
1494
1495 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1496
1497 emit_move_insn (adjust_address (registers, mode, size), tem);
1498 size += GET_MODE_SIZE (mode);
1499 }
1500
1501 /* Save the arg pointer to the block. */
1502 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1503 #ifdef STACK_GROWS_DOWNWARD
1504 /* We need the pointer as the caller actually passed them to us, not
1505 as we might have pretended they were passed. Make sure it's a valid
1506 operand, as emit_move_insn isn't expected to handle a PLUS. */
1507 tem
1508 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1509 NULL_RTX);
1510 #endif
1511 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1512
1513 size = GET_MODE_SIZE (Pmode);
1514
1515 /* Save the structure value address unless this is passed as an
1516 "invisible" first argument. */
1517 if (struct_incoming_value)
1518 {
1519 emit_move_insn (adjust_address (registers, Pmode, size),
1520 copy_to_reg (struct_incoming_value));
1521 size += GET_MODE_SIZE (Pmode);
1522 }
1523
1524 /* Return the address of the block. */
1525 return copy_addr_to_reg (XEXP (registers, 0));
1526 }
1527
1528 /* __builtin_apply_args returns block of memory allocated on
1529 the stack into which is stored the arg pointer, structure
1530 value address, static chain, and all the registers that might
1531 possibly be used in performing a function call. The code is
1532 moved to the start of the function so the incoming values are
1533 saved. */
1534
1535 static rtx
1536 expand_builtin_apply_args (void)
1537 {
1538 /* Don't do __builtin_apply_args more than once in a function.
1539 Save the result of the first call and reuse it. */
1540 if (apply_args_value != 0)
1541 return apply_args_value;
1542 {
1543 /* When this function is called, it means that registers must be
1544 saved on entry to this function. So we migrate the
1545 call to the first insn of this function. */
1546 rtx temp;
1547 rtx seq;
1548
1549 start_sequence ();
1550 temp = expand_builtin_apply_args_1 ();
1551 seq = get_insns ();
1552 end_sequence ();
1553
1554 apply_args_value = temp;
1555
1556 /* Put the insns after the NOTE that starts the function.
1557 If this is inside a start_sequence, make the outer-level insn
1558 chain current, so the code is placed at the start of the
1559 function. If internal_arg_pointer is a non-virtual pseudo,
1560 it needs to be placed after the function that initializes
1561 that pseudo. */
1562 push_topmost_sequence ();
1563 if (REG_P (crtl->args.internal_arg_pointer)
1564 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1565 emit_insn_before (seq, parm_birth_insn);
1566 else
1567 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1568 pop_topmost_sequence ();
1569 return temp;
1570 }
1571 }
1572
1573 /* Perform an untyped call and save the state required to perform an
1574 untyped return of whatever value was returned by the given function. */
1575
1576 static rtx
1577 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1578 {
1579 int size, align, regno;
1580 machine_mode mode;
1581 rtx incoming_args, result, reg, dest, src;
1582 rtx_call_insn *call_insn;
1583 rtx old_stack_level = 0;
1584 rtx call_fusage = 0;
1585 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1586
1587 arguments = convert_memory_address (Pmode, arguments);
1588
1589 /* Create a block where the return registers can be saved. */
1590 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1591
1592 /* Fetch the arg pointer from the ARGUMENTS block. */
1593 incoming_args = gen_reg_rtx (Pmode);
1594 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1595 #ifndef STACK_GROWS_DOWNWARD
1596 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1597 incoming_args, 0, OPTAB_LIB_WIDEN);
1598 #endif
1599
1600 /* Push a new argument block and copy the arguments. Do not allow
1601 the (potential) memcpy call below to interfere with our stack
1602 manipulations. */
1603 do_pending_stack_adjust ();
1604 NO_DEFER_POP;
1605
1606 /* Save the stack with nonlocal if available. */
1607 #ifdef HAVE_save_stack_nonlocal
1608 if (HAVE_save_stack_nonlocal)
1609 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1610 else
1611 #endif
1612 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1613
1614 /* Allocate a block of memory onto the stack and copy the memory
1615 arguments to the outgoing arguments address. We can pass TRUE
1616 as the 4th argument because we just saved the stack pointer
1617 and will restore it right after the call. */
1618 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1619
1620 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1621 may have already set current_function_calls_alloca to true.
1622 current_function_calls_alloca won't be set if argsize is zero,
1623 so we have to guarantee need_drap is true here. */
1624 if (SUPPORTS_STACK_ALIGNMENT)
1625 crtl->need_drap = true;
1626
1627 dest = virtual_outgoing_args_rtx;
1628 #ifndef STACK_GROWS_DOWNWARD
1629 if (CONST_INT_P (argsize))
1630 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1631 else
1632 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1633 #endif
1634 dest = gen_rtx_MEM (BLKmode, dest);
1635 set_mem_align (dest, PARM_BOUNDARY);
1636 src = gen_rtx_MEM (BLKmode, incoming_args);
1637 set_mem_align (src, PARM_BOUNDARY);
1638 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1639
1640 /* Refer to the argument block. */
1641 apply_args_size ();
1642 arguments = gen_rtx_MEM (BLKmode, arguments);
1643 set_mem_align (arguments, PARM_BOUNDARY);
1644
1645 /* Walk past the arg-pointer and structure value address. */
1646 size = GET_MODE_SIZE (Pmode);
1647 if (struct_value)
1648 size += GET_MODE_SIZE (Pmode);
1649
1650 /* Restore each of the registers previously saved. Make USE insns
1651 for each of these registers for use in making the call. */
1652 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1653 if ((mode = apply_args_mode[regno]) != VOIDmode)
1654 {
1655 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1656 if (size % align != 0)
1657 size = CEIL (size, align) * align;
1658 reg = gen_rtx_REG (mode, regno);
1659 emit_move_insn (reg, adjust_address (arguments, mode, size));
1660 use_reg (&call_fusage, reg);
1661 size += GET_MODE_SIZE (mode);
1662 }
1663
1664 /* Restore the structure value address unless this is passed as an
1665 "invisible" first argument. */
1666 size = GET_MODE_SIZE (Pmode);
1667 if (struct_value)
1668 {
1669 rtx value = gen_reg_rtx (Pmode);
1670 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1671 emit_move_insn (struct_value, value);
1672 if (REG_P (struct_value))
1673 use_reg (&call_fusage, struct_value);
1674 size += GET_MODE_SIZE (Pmode);
1675 }
1676
1677 /* All arguments and registers used for the call are set up by now! */
1678 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1679
1680 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1681 and we don't want to load it into a register as an optimization,
1682 because prepare_call_address already did it if it should be done. */
1683 if (GET_CODE (function) != SYMBOL_REF)
1684 function = memory_address (FUNCTION_MODE, function);
1685
1686 /* Generate the actual call instruction and save the return value. */
1687 #ifdef HAVE_untyped_call
1688 if (HAVE_untyped_call)
1689 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1690 result, result_vector (1, result)));
1691 else
1692 #endif
1693 #ifdef HAVE_call_value
1694 if (HAVE_call_value)
1695 {
1696 rtx valreg = 0;
1697
1698 /* Locate the unique return register. It is not possible to
1699 express a call that sets more than one return register using
1700 call_value; use untyped_call for that. In fact, untyped_call
1701 only needs to save the return registers in the given block. */
1702 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1703 if ((mode = apply_result_mode[regno]) != VOIDmode)
1704 {
1705 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1706
1707 valreg = gen_rtx_REG (mode, regno);
1708 }
1709
1710 emit_call_insn (GEN_CALL_VALUE (valreg,
1711 gen_rtx_MEM (FUNCTION_MODE, function),
1712 const0_rtx, NULL_RTX, const0_rtx));
1713
1714 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1715 }
1716 else
1717 #endif
1718 gcc_unreachable ();
1719
1720 /* Find the CALL insn we just emitted, and attach the register usage
1721 information. */
1722 call_insn = last_call_insn ();
1723 add_function_usage_to (call_insn, call_fusage);
1724
1725 /* Restore the stack. */
1726 #ifdef HAVE_save_stack_nonlocal
1727 if (HAVE_save_stack_nonlocal)
1728 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1729 else
1730 #endif
1731 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1732 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1733
1734 OK_DEFER_POP;
1735
1736 /* Return the address of the result block. */
1737 result = copy_addr_to_reg (XEXP (result, 0));
1738 return convert_memory_address (ptr_mode, result);
1739 }
1740
1741 /* Perform an untyped return. */
1742
1743 static void
1744 expand_builtin_return (rtx result)
1745 {
1746 int size, align, regno;
1747 machine_mode mode;
1748 rtx reg;
1749 rtx_insn *call_fusage = 0;
1750
1751 result = convert_memory_address (Pmode, result);
1752
1753 apply_result_size ();
1754 result = gen_rtx_MEM (BLKmode, result);
1755
1756 #ifdef HAVE_untyped_return
1757 if (HAVE_untyped_return)
1758 {
1759 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1760 emit_barrier ();
1761 return;
1762 }
1763 #endif
1764
1765 /* Restore the return value and note that each value is used. */
1766 size = 0;
1767 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1768 if ((mode = apply_result_mode[regno]) != VOIDmode)
1769 {
1770 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1771 if (size % align != 0)
1772 size = CEIL (size, align) * align;
1773 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1774 emit_move_insn (reg, adjust_address (result, mode, size));
1775
1776 push_to_sequence (call_fusage);
1777 emit_use (reg);
1778 call_fusage = get_insns ();
1779 end_sequence ();
1780 size += GET_MODE_SIZE (mode);
1781 }
1782
1783 /* Put the USE insns before the return. */
1784 emit_insn (call_fusage);
1785
1786 /* Return whatever values was restored by jumping directly to the end
1787 of the function. */
1788 expand_naked_return ();
1789 }
1790
1791 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1792
1793 static enum type_class
1794 type_to_class (tree type)
1795 {
1796 switch (TREE_CODE (type))
1797 {
1798 case VOID_TYPE: return void_type_class;
1799 case INTEGER_TYPE: return integer_type_class;
1800 case ENUMERAL_TYPE: return enumeral_type_class;
1801 case BOOLEAN_TYPE: return boolean_type_class;
1802 case POINTER_TYPE: return pointer_type_class;
1803 case REFERENCE_TYPE: return reference_type_class;
1804 case OFFSET_TYPE: return offset_type_class;
1805 case REAL_TYPE: return real_type_class;
1806 case COMPLEX_TYPE: return complex_type_class;
1807 case FUNCTION_TYPE: return function_type_class;
1808 case METHOD_TYPE: return method_type_class;
1809 case RECORD_TYPE: return record_type_class;
1810 case UNION_TYPE:
1811 case QUAL_UNION_TYPE: return union_type_class;
1812 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1813 ? string_type_class : array_type_class);
1814 case LANG_TYPE: return lang_type_class;
1815 default: return no_type_class;
1816 }
1817 }
1818
1819 /* Expand a call EXP to __builtin_classify_type. */
1820
1821 static rtx
1822 expand_builtin_classify_type (tree exp)
1823 {
1824 if (call_expr_nargs (exp))
1825 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1826 return GEN_INT (no_type_class);
1827 }
1828
1829 /* This helper macro, meant to be used in mathfn_built_in below,
1830 determines which among a set of three builtin math functions is
1831 appropriate for a given type mode. The `F' and `L' cases are
1832 automatically generated from the `double' case. */
1833 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1834 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1835 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1836 fcodel = BUILT_IN_MATHFN##L ; break;
1837 /* Similar to above, but appends _R after any F/L suffix. */
1838 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1839 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1840 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1841 fcodel = BUILT_IN_MATHFN##L_R ; break;
1842
1843 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1844 if available. If IMPLICIT is true use the implicit builtin declaration,
1845 otherwise use the explicit declaration. If we can't do the conversion,
1846 return zero. */
1847
1848 static tree
1849 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1850 {
1851 enum built_in_function fcode, fcodef, fcodel, fcode2;
1852
1853 switch (fn)
1854 {
1855 CASE_MATHFN (BUILT_IN_ACOS)
1856 CASE_MATHFN (BUILT_IN_ACOSH)
1857 CASE_MATHFN (BUILT_IN_ASIN)
1858 CASE_MATHFN (BUILT_IN_ASINH)
1859 CASE_MATHFN (BUILT_IN_ATAN)
1860 CASE_MATHFN (BUILT_IN_ATAN2)
1861 CASE_MATHFN (BUILT_IN_ATANH)
1862 CASE_MATHFN (BUILT_IN_CBRT)
1863 CASE_MATHFN (BUILT_IN_CEIL)
1864 CASE_MATHFN (BUILT_IN_CEXPI)
1865 CASE_MATHFN (BUILT_IN_COPYSIGN)
1866 CASE_MATHFN (BUILT_IN_COS)
1867 CASE_MATHFN (BUILT_IN_COSH)
1868 CASE_MATHFN (BUILT_IN_DREM)
1869 CASE_MATHFN (BUILT_IN_ERF)
1870 CASE_MATHFN (BUILT_IN_ERFC)
1871 CASE_MATHFN (BUILT_IN_EXP)
1872 CASE_MATHFN (BUILT_IN_EXP10)
1873 CASE_MATHFN (BUILT_IN_EXP2)
1874 CASE_MATHFN (BUILT_IN_EXPM1)
1875 CASE_MATHFN (BUILT_IN_FABS)
1876 CASE_MATHFN (BUILT_IN_FDIM)
1877 CASE_MATHFN (BUILT_IN_FLOOR)
1878 CASE_MATHFN (BUILT_IN_FMA)
1879 CASE_MATHFN (BUILT_IN_FMAX)
1880 CASE_MATHFN (BUILT_IN_FMIN)
1881 CASE_MATHFN (BUILT_IN_FMOD)
1882 CASE_MATHFN (BUILT_IN_FREXP)
1883 CASE_MATHFN (BUILT_IN_GAMMA)
1884 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1885 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1886 CASE_MATHFN (BUILT_IN_HYPOT)
1887 CASE_MATHFN (BUILT_IN_ILOGB)
1888 CASE_MATHFN (BUILT_IN_ICEIL)
1889 CASE_MATHFN (BUILT_IN_IFLOOR)
1890 CASE_MATHFN (BUILT_IN_INF)
1891 CASE_MATHFN (BUILT_IN_IRINT)
1892 CASE_MATHFN (BUILT_IN_IROUND)
1893 CASE_MATHFN (BUILT_IN_ISINF)
1894 CASE_MATHFN (BUILT_IN_J0)
1895 CASE_MATHFN (BUILT_IN_J1)
1896 CASE_MATHFN (BUILT_IN_JN)
1897 CASE_MATHFN (BUILT_IN_LCEIL)
1898 CASE_MATHFN (BUILT_IN_LDEXP)
1899 CASE_MATHFN (BUILT_IN_LFLOOR)
1900 CASE_MATHFN (BUILT_IN_LGAMMA)
1901 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1902 CASE_MATHFN (BUILT_IN_LLCEIL)
1903 CASE_MATHFN (BUILT_IN_LLFLOOR)
1904 CASE_MATHFN (BUILT_IN_LLRINT)
1905 CASE_MATHFN (BUILT_IN_LLROUND)
1906 CASE_MATHFN (BUILT_IN_LOG)
1907 CASE_MATHFN (BUILT_IN_LOG10)
1908 CASE_MATHFN (BUILT_IN_LOG1P)
1909 CASE_MATHFN (BUILT_IN_LOG2)
1910 CASE_MATHFN (BUILT_IN_LOGB)
1911 CASE_MATHFN (BUILT_IN_LRINT)
1912 CASE_MATHFN (BUILT_IN_LROUND)
1913 CASE_MATHFN (BUILT_IN_MODF)
1914 CASE_MATHFN (BUILT_IN_NAN)
1915 CASE_MATHFN (BUILT_IN_NANS)
1916 CASE_MATHFN (BUILT_IN_NEARBYINT)
1917 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1918 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1919 CASE_MATHFN (BUILT_IN_POW)
1920 CASE_MATHFN (BUILT_IN_POWI)
1921 CASE_MATHFN (BUILT_IN_POW10)
1922 CASE_MATHFN (BUILT_IN_REMAINDER)
1923 CASE_MATHFN (BUILT_IN_REMQUO)
1924 CASE_MATHFN (BUILT_IN_RINT)
1925 CASE_MATHFN (BUILT_IN_ROUND)
1926 CASE_MATHFN (BUILT_IN_SCALB)
1927 CASE_MATHFN (BUILT_IN_SCALBLN)
1928 CASE_MATHFN (BUILT_IN_SCALBN)
1929 CASE_MATHFN (BUILT_IN_SIGNBIT)
1930 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1931 CASE_MATHFN (BUILT_IN_SIN)
1932 CASE_MATHFN (BUILT_IN_SINCOS)
1933 CASE_MATHFN (BUILT_IN_SINH)
1934 CASE_MATHFN (BUILT_IN_SQRT)
1935 CASE_MATHFN (BUILT_IN_TAN)
1936 CASE_MATHFN (BUILT_IN_TANH)
1937 CASE_MATHFN (BUILT_IN_TGAMMA)
1938 CASE_MATHFN (BUILT_IN_TRUNC)
1939 CASE_MATHFN (BUILT_IN_Y0)
1940 CASE_MATHFN (BUILT_IN_Y1)
1941 CASE_MATHFN (BUILT_IN_YN)
1942
1943 default:
1944 return NULL_TREE;
1945 }
1946
1947 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1948 fcode2 = fcode;
1949 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1950 fcode2 = fcodef;
1951 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1952 fcode2 = fcodel;
1953 else
1954 return NULL_TREE;
1955
1956 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1957 return NULL_TREE;
1958
1959 return builtin_decl_explicit (fcode2);
1960 }
1961
1962 /* Like mathfn_built_in_1(), but always use the implicit array. */
1963
1964 tree
1965 mathfn_built_in (tree type, enum built_in_function fn)
1966 {
1967 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1968 }
1969
1970 /* If errno must be maintained, expand the RTL to check if the result,
1971 TARGET, of a built-in function call, EXP, is NaN, and if so set
1972 errno to EDOM. */
1973
1974 static void
1975 expand_errno_check (tree exp, rtx target)
1976 {
1977 rtx_code_label *lab = gen_label_rtx ();
1978
1979 /* Test the result; if it is NaN, set errno=EDOM because
1980 the argument was not in the domain. */
1981 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1982 NULL_RTX, NULL_RTX, lab,
1983 /* The jump is very likely. */
1984 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1985
1986 #ifdef TARGET_EDOM
1987 /* If this built-in doesn't throw an exception, set errno directly. */
1988 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1989 {
1990 #ifdef GEN_ERRNO_RTX
1991 rtx errno_rtx = GEN_ERRNO_RTX;
1992 #else
1993 rtx errno_rtx
1994 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1995 #endif
1996 emit_move_insn (errno_rtx,
1997 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1998 emit_label (lab);
1999 return;
2000 }
2001 #endif
2002
2003 /* Make sure the library call isn't expanded as a tail call. */
2004 CALL_EXPR_TAILCALL (exp) = 0;
2005
2006 /* We can't set errno=EDOM directly; let the library call do it.
2007 Pop the arguments right away in case the call gets deleted. */
2008 NO_DEFER_POP;
2009 expand_call (exp, target, 0);
2010 OK_DEFER_POP;
2011 emit_label (lab);
2012 }
2013
2014 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2015 Return NULL_RTX if a normal call should be emitted rather than expanding
2016 the function in-line. EXP is the expression that is a call to the builtin
2017 function; if convenient, the result should be placed in TARGET.
2018 SUBTARGET may be used as the target for computing one of EXP's operands. */
2019
2020 static rtx
2021 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2022 {
2023 optab builtin_optab;
2024 rtx op0;
2025 rtx_insn *insns;
2026 tree fndecl = get_callee_fndecl (exp);
2027 machine_mode mode;
2028 bool errno_set = false;
2029 bool try_widening = false;
2030 tree arg;
2031
2032 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2033 return NULL_RTX;
2034
2035 arg = CALL_EXPR_ARG (exp, 0);
2036
2037 switch (DECL_FUNCTION_CODE (fndecl))
2038 {
2039 CASE_FLT_FN (BUILT_IN_SQRT):
2040 errno_set = ! tree_expr_nonnegative_p (arg);
2041 try_widening = true;
2042 builtin_optab = sqrt_optab;
2043 break;
2044 CASE_FLT_FN (BUILT_IN_EXP):
2045 errno_set = true; builtin_optab = exp_optab; break;
2046 CASE_FLT_FN (BUILT_IN_EXP10):
2047 CASE_FLT_FN (BUILT_IN_POW10):
2048 errno_set = true; builtin_optab = exp10_optab; break;
2049 CASE_FLT_FN (BUILT_IN_EXP2):
2050 errno_set = true; builtin_optab = exp2_optab; break;
2051 CASE_FLT_FN (BUILT_IN_EXPM1):
2052 errno_set = true; builtin_optab = expm1_optab; break;
2053 CASE_FLT_FN (BUILT_IN_LOGB):
2054 errno_set = true; builtin_optab = logb_optab; break;
2055 CASE_FLT_FN (BUILT_IN_LOG):
2056 errno_set = true; builtin_optab = log_optab; break;
2057 CASE_FLT_FN (BUILT_IN_LOG10):
2058 errno_set = true; builtin_optab = log10_optab; break;
2059 CASE_FLT_FN (BUILT_IN_LOG2):
2060 errno_set = true; builtin_optab = log2_optab; break;
2061 CASE_FLT_FN (BUILT_IN_LOG1P):
2062 errno_set = true; builtin_optab = log1p_optab; break;
2063 CASE_FLT_FN (BUILT_IN_ASIN):
2064 builtin_optab = asin_optab; break;
2065 CASE_FLT_FN (BUILT_IN_ACOS):
2066 builtin_optab = acos_optab; break;
2067 CASE_FLT_FN (BUILT_IN_TAN):
2068 builtin_optab = tan_optab; break;
2069 CASE_FLT_FN (BUILT_IN_ATAN):
2070 builtin_optab = atan_optab; break;
2071 CASE_FLT_FN (BUILT_IN_FLOOR):
2072 builtin_optab = floor_optab; break;
2073 CASE_FLT_FN (BUILT_IN_CEIL):
2074 builtin_optab = ceil_optab; break;
2075 CASE_FLT_FN (BUILT_IN_TRUNC):
2076 builtin_optab = btrunc_optab; break;
2077 CASE_FLT_FN (BUILT_IN_ROUND):
2078 builtin_optab = round_optab; break;
2079 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2080 builtin_optab = nearbyint_optab;
2081 if (flag_trapping_math)
2082 break;
2083 /* Else fallthrough and expand as rint. */
2084 CASE_FLT_FN (BUILT_IN_RINT):
2085 builtin_optab = rint_optab; break;
2086 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2087 builtin_optab = significand_optab; break;
2088 default:
2089 gcc_unreachable ();
2090 }
2091
2092 /* Make a suitable register to place result in. */
2093 mode = TYPE_MODE (TREE_TYPE (exp));
2094
2095 if (! flag_errno_math || ! HONOR_NANS (mode))
2096 errno_set = false;
2097
2098 /* Before working hard, check whether the instruction is available, but try
2099 to widen the mode for specific operations. */
2100 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2101 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2102 && (!errno_set || !optimize_insn_for_size_p ()))
2103 {
2104 rtx result = gen_reg_rtx (mode);
2105
2106 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2107 need to expand the argument again. This way, we will not perform
2108 side-effects more the once. */
2109 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2110
2111 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2112
2113 start_sequence ();
2114
2115 /* Compute into RESULT.
2116 Set RESULT to wherever the result comes back. */
2117 result = expand_unop (mode, builtin_optab, op0, result, 0);
2118
2119 if (result != 0)
2120 {
2121 if (errno_set)
2122 expand_errno_check (exp, result);
2123
2124 /* Output the entire sequence. */
2125 insns = get_insns ();
2126 end_sequence ();
2127 emit_insn (insns);
2128 return result;
2129 }
2130
2131 /* If we were unable to expand via the builtin, stop the sequence
2132 (without outputting the insns) and call to the library function
2133 with the stabilized argument list. */
2134 end_sequence ();
2135 }
2136
2137 return expand_call (exp, target, target == const0_rtx);
2138 }
2139
2140 /* Expand a call to the builtin binary math functions (pow and atan2).
2141 Return NULL_RTX if a normal call should be emitted rather than expanding the
2142 function in-line. EXP is the expression that is a call to the builtin
2143 function; if convenient, the result should be placed in TARGET.
2144 SUBTARGET may be used as the target for computing one of EXP's
2145 operands. */
2146
2147 static rtx
2148 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2149 {
2150 optab builtin_optab;
2151 rtx op0, op1, result;
2152 rtx_insn *insns;
2153 int op1_type = REAL_TYPE;
2154 tree fndecl = get_callee_fndecl (exp);
2155 tree arg0, arg1;
2156 machine_mode mode;
2157 bool errno_set = true;
2158
2159 switch (DECL_FUNCTION_CODE (fndecl))
2160 {
2161 CASE_FLT_FN (BUILT_IN_SCALBN):
2162 CASE_FLT_FN (BUILT_IN_SCALBLN):
2163 CASE_FLT_FN (BUILT_IN_LDEXP):
2164 op1_type = INTEGER_TYPE;
2165 default:
2166 break;
2167 }
2168
2169 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2170 return NULL_RTX;
2171
2172 arg0 = CALL_EXPR_ARG (exp, 0);
2173 arg1 = CALL_EXPR_ARG (exp, 1);
2174
2175 switch (DECL_FUNCTION_CODE (fndecl))
2176 {
2177 CASE_FLT_FN (BUILT_IN_POW):
2178 builtin_optab = pow_optab; break;
2179 CASE_FLT_FN (BUILT_IN_ATAN2):
2180 builtin_optab = atan2_optab; break;
2181 CASE_FLT_FN (BUILT_IN_SCALB):
2182 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2183 return 0;
2184 builtin_optab = scalb_optab; break;
2185 CASE_FLT_FN (BUILT_IN_SCALBN):
2186 CASE_FLT_FN (BUILT_IN_SCALBLN):
2187 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2188 return 0;
2189 /* Fall through... */
2190 CASE_FLT_FN (BUILT_IN_LDEXP):
2191 builtin_optab = ldexp_optab; break;
2192 CASE_FLT_FN (BUILT_IN_FMOD):
2193 builtin_optab = fmod_optab; break;
2194 CASE_FLT_FN (BUILT_IN_REMAINDER):
2195 CASE_FLT_FN (BUILT_IN_DREM):
2196 builtin_optab = remainder_optab; break;
2197 default:
2198 gcc_unreachable ();
2199 }
2200
2201 /* Make a suitable register to place result in. */
2202 mode = TYPE_MODE (TREE_TYPE (exp));
2203
2204 /* Before working hard, check whether the instruction is available. */
2205 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2206 return NULL_RTX;
2207
2208 result = gen_reg_rtx (mode);
2209
2210 if (! flag_errno_math || ! HONOR_NANS (mode))
2211 errno_set = false;
2212
2213 if (errno_set && optimize_insn_for_size_p ())
2214 return 0;
2215
2216 /* Always stabilize the argument list. */
2217 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2218 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2219
2220 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2221 op1 = expand_normal (arg1);
2222
2223 start_sequence ();
2224
2225 /* Compute into RESULT.
2226 Set RESULT to wherever the result comes back. */
2227 result = expand_binop (mode, builtin_optab, op0, op1,
2228 result, 0, OPTAB_DIRECT);
2229
2230 /* If we were unable to expand via the builtin, stop the sequence
2231 (without outputting the insns) and call to the library function
2232 with the stabilized argument list. */
2233 if (result == 0)
2234 {
2235 end_sequence ();
2236 return expand_call (exp, target, target == const0_rtx);
2237 }
2238
2239 if (errno_set)
2240 expand_errno_check (exp, result);
2241
2242 /* Output the entire sequence. */
2243 insns = get_insns ();
2244 end_sequence ();
2245 emit_insn (insns);
2246
2247 return result;
2248 }
2249
2250 /* Expand a call to the builtin trinary math functions (fma).
2251 Return NULL_RTX if a normal call should be emitted rather than expanding the
2252 function in-line. EXP is the expression that is a call to the builtin
2253 function; if convenient, the result should be placed in TARGET.
2254 SUBTARGET may be used as the target for computing one of EXP's
2255 operands. */
2256
2257 static rtx
2258 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2259 {
2260 optab builtin_optab;
2261 rtx op0, op1, op2, result;
2262 rtx_insn *insns;
2263 tree fndecl = get_callee_fndecl (exp);
2264 tree arg0, arg1, arg2;
2265 machine_mode mode;
2266
2267 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2268 return NULL_RTX;
2269
2270 arg0 = CALL_EXPR_ARG (exp, 0);
2271 arg1 = CALL_EXPR_ARG (exp, 1);
2272 arg2 = CALL_EXPR_ARG (exp, 2);
2273
2274 switch (DECL_FUNCTION_CODE (fndecl))
2275 {
2276 CASE_FLT_FN (BUILT_IN_FMA):
2277 builtin_optab = fma_optab; break;
2278 default:
2279 gcc_unreachable ();
2280 }
2281
2282 /* Make a suitable register to place result in. */
2283 mode = TYPE_MODE (TREE_TYPE (exp));
2284
2285 /* Before working hard, check whether the instruction is available. */
2286 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2287 return NULL_RTX;
2288
2289 result = gen_reg_rtx (mode);
2290
2291 /* Always stabilize the argument list. */
2292 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2293 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2294 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2295
2296 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2297 op1 = expand_normal (arg1);
2298 op2 = expand_normal (arg2);
2299
2300 start_sequence ();
2301
2302 /* Compute into RESULT.
2303 Set RESULT to wherever the result comes back. */
2304 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2305 result, 0);
2306
2307 /* If we were unable to expand via the builtin, stop the sequence
2308 (without outputting the insns) and call to the library function
2309 with the stabilized argument list. */
2310 if (result == 0)
2311 {
2312 end_sequence ();
2313 return expand_call (exp, target, target == const0_rtx);
2314 }
2315
2316 /* Output the entire sequence. */
2317 insns = get_insns ();
2318 end_sequence ();
2319 emit_insn (insns);
2320
2321 return result;
2322 }
2323
2324 /* Expand a call to the builtin sin and cos math functions.
2325 Return NULL_RTX if a normal call should be emitted rather than expanding the
2326 function in-line. EXP is the expression that is a call to the builtin
2327 function; if convenient, the result should be placed in TARGET.
2328 SUBTARGET may be used as the target for computing one of EXP's
2329 operands. */
2330
2331 static rtx
2332 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2333 {
2334 optab builtin_optab;
2335 rtx op0;
2336 rtx_insn *insns;
2337 tree fndecl = get_callee_fndecl (exp);
2338 machine_mode mode;
2339 tree arg;
2340
2341 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2342 return NULL_RTX;
2343
2344 arg = CALL_EXPR_ARG (exp, 0);
2345
2346 switch (DECL_FUNCTION_CODE (fndecl))
2347 {
2348 CASE_FLT_FN (BUILT_IN_SIN):
2349 CASE_FLT_FN (BUILT_IN_COS):
2350 builtin_optab = sincos_optab; break;
2351 default:
2352 gcc_unreachable ();
2353 }
2354
2355 /* Make a suitable register to place result in. */
2356 mode = TYPE_MODE (TREE_TYPE (exp));
2357
2358 /* Check if sincos insn is available, otherwise fallback
2359 to sin or cos insn. */
2360 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2361 switch (DECL_FUNCTION_CODE (fndecl))
2362 {
2363 CASE_FLT_FN (BUILT_IN_SIN):
2364 builtin_optab = sin_optab; break;
2365 CASE_FLT_FN (BUILT_IN_COS):
2366 builtin_optab = cos_optab; break;
2367 default:
2368 gcc_unreachable ();
2369 }
2370
2371 /* Before working hard, check whether the instruction is available. */
2372 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2373 {
2374 rtx result = gen_reg_rtx (mode);
2375
2376 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2377 need to expand the argument again. This way, we will not perform
2378 side-effects more the once. */
2379 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2380
2381 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2382
2383 start_sequence ();
2384
2385 /* Compute into RESULT.
2386 Set RESULT to wherever the result comes back. */
2387 if (builtin_optab == sincos_optab)
2388 {
2389 int ok;
2390
2391 switch (DECL_FUNCTION_CODE (fndecl))
2392 {
2393 CASE_FLT_FN (BUILT_IN_SIN):
2394 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2395 break;
2396 CASE_FLT_FN (BUILT_IN_COS):
2397 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2398 break;
2399 default:
2400 gcc_unreachable ();
2401 }
2402 gcc_assert (ok);
2403 }
2404 else
2405 result = expand_unop (mode, builtin_optab, op0, result, 0);
2406
2407 if (result != 0)
2408 {
2409 /* Output the entire sequence. */
2410 insns = get_insns ();
2411 end_sequence ();
2412 emit_insn (insns);
2413 return result;
2414 }
2415
2416 /* If we were unable to expand via the builtin, stop the sequence
2417 (without outputting the insns) and call to the library function
2418 with the stabilized argument list. */
2419 end_sequence ();
2420 }
2421
2422 return expand_call (exp, target, target == const0_rtx);
2423 }
2424
2425 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2426 return an RTL instruction code that implements the functionality.
2427 If that isn't possible or available return CODE_FOR_nothing. */
2428
2429 static enum insn_code
2430 interclass_mathfn_icode (tree arg, tree fndecl)
2431 {
2432 bool errno_set = false;
2433 optab builtin_optab = unknown_optab;
2434 machine_mode mode;
2435
2436 switch (DECL_FUNCTION_CODE (fndecl))
2437 {
2438 CASE_FLT_FN (BUILT_IN_ILOGB):
2439 errno_set = true; builtin_optab = ilogb_optab; break;
2440 CASE_FLT_FN (BUILT_IN_ISINF):
2441 builtin_optab = isinf_optab; break;
2442 case BUILT_IN_ISNORMAL:
2443 case BUILT_IN_ISFINITE:
2444 CASE_FLT_FN (BUILT_IN_FINITE):
2445 case BUILT_IN_FINITED32:
2446 case BUILT_IN_FINITED64:
2447 case BUILT_IN_FINITED128:
2448 case BUILT_IN_ISINFD32:
2449 case BUILT_IN_ISINFD64:
2450 case BUILT_IN_ISINFD128:
2451 /* These builtins have no optabs (yet). */
2452 break;
2453 default:
2454 gcc_unreachable ();
2455 }
2456
2457 /* There's no easy way to detect the case we need to set EDOM. */
2458 if (flag_errno_math && errno_set)
2459 return CODE_FOR_nothing;
2460
2461 /* Optab mode depends on the mode of the input argument. */
2462 mode = TYPE_MODE (TREE_TYPE (arg));
2463
2464 if (builtin_optab)
2465 return optab_handler (builtin_optab, mode);
2466 return CODE_FOR_nothing;
2467 }
2468
2469 /* Expand a call to one of the builtin math functions that operate on
2470 floating point argument and output an integer result (ilogb, isinf,
2471 isnan, etc).
2472 Return 0 if a normal call should be emitted rather than expanding the
2473 function in-line. EXP is the expression that is a call to the builtin
2474 function; if convenient, the result should be placed in TARGET. */
2475
2476 static rtx
2477 expand_builtin_interclass_mathfn (tree exp, rtx target)
2478 {
2479 enum insn_code icode = CODE_FOR_nothing;
2480 rtx op0;
2481 tree fndecl = get_callee_fndecl (exp);
2482 machine_mode mode;
2483 tree arg;
2484
2485 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2486 return NULL_RTX;
2487
2488 arg = CALL_EXPR_ARG (exp, 0);
2489 icode = interclass_mathfn_icode (arg, fndecl);
2490 mode = TYPE_MODE (TREE_TYPE (arg));
2491
2492 if (icode != CODE_FOR_nothing)
2493 {
2494 struct expand_operand ops[1];
2495 rtx_insn *last = get_last_insn ();
2496 tree orig_arg = arg;
2497
2498 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2499 need to expand the argument again. This way, we will not perform
2500 side-effects more the once. */
2501 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2502
2503 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2504
2505 if (mode != GET_MODE (op0))
2506 op0 = convert_to_mode (mode, op0, 0);
2507
2508 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2509 if (maybe_legitimize_operands (icode, 0, 1, ops)
2510 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2511 return ops[0].value;
2512
2513 delete_insns_since (last);
2514 CALL_EXPR_ARG (exp, 0) = orig_arg;
2515 }
2516
2517 return NULL_RTX;
2518 }
2519
2520 /* Expand a call to the builtin sincos math function.
2521 Return NULL_RTX if a normal call should be emitted rather than expanding the
2522 function in-line. EXP is the expression that is a call to the builtin
2523 function. */
2524
2525 static rtx
2526 expand_builtin_sincos (tree exp)
2527 {
2528 rtx op0, op1, op2, target1, target2;
2529 machine_mode mode;
2530 tree arg, sinp, cosp;
2531 int result;
2532 location_t loc = EXPR_LOCATION (exp);
2533 tree alias_type, alias_off;
2534
2535 if (!validate_arglist (exp, REAL_TYPE,
2536 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2537 return NULL_RTX;
2538
2539 arg = CALL_EXPR_ARG (exp, 0);
2540 sinp = CALL_EXPR_ARG (exp, 1);
2541 cosp = CALL_EXPR_ARG (exp, 2);
2542
2543 /* Make a suitable register to place result in. */
2544 mode = TYPE_MODE (TREE_TYPE (arg));
2545
2546 /* Check if sincos insn is available, otherwise emit the call. */
2547 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2548 return NULL_RTX;
2549
2550 target1 = gen_reg_rtx (mode);
2551 target2 = gen_reg_rtx (mode);
2552
2553 op0 = expand_normal (arg);
2554 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2555 alias_off = build_int_cst (alias_type, 0);
2556 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2557 sinp, alias_off));
2558 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2559 cosp, alias_off));
2560
2561 /* Compute into target1 and target2.
2562 Set TARGET to wherever the result comes back. */
2563 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2564 gcc_assert (result);
2565
2566 /* Move target1 and target2 to the memory locations indicated
2567 by op1 and op2. */
2568 emit_move_insn (op1, target1);
2569 emit_move_insn (op2, target2);
2570
2571 return const0_rtx;
2572 }
2573
2574 /* Expand a call to the internal cexpi builtin to the sincos math function.
2575 EXP is the expression that is a call to the builtin function; if convenient,
2576 the result should be placed in TARGET. */
2577
2578 static rtx
2579 expand_builtin_cexpi (tree exp, rtx target)
2580 {
2581 tree fndecl = get_callee_fndecl (exp);
2582 tree arg, type;
2583 machine_mode mode;
2584 rtx op0, op1, op2;
2585 location_t loc = EXPR_LOCATION (exp);
2586
2587 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2588 return NULL_RTX;
2589
2590 arg = CALL_EXPR_ARG (exp, 0);
2591 type = TREE_TYPE (arg);
2592 mode = TYPE_MODE (TREE_TYPE (arg));
2593
2594 /* Try expanding via a sincos optab, fall back to emitting a libcall
2595 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2596 is only generated from sincos, cexp or if we have either of them. */
2597 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2598 {
2599 op1 = gen_reg_rtx (mode);
2600 op2 = gen_reg_rtx (mode);
2601
2602 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2603
2604 /* Compute into op1 and op2. */
2605 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2606 }
2607 else if (targetm.libc_has_function (function_sincos))
2608 {
2609 tree call, fn = NULL_TREE;
2610 tree top1, top2;
2611 rtx op1a, op2a;
2612
2613 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2614 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2615 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2616 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2617 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2618 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2619 else
2620 gcc_unreachable ();
2621
2622 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2623 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2624 op1a = copy_addr_to_reg (XEXP (op1, 0));
2625 op2a = copy_addr_to_reg (XEXP (op2, 0));
2626 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2627 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2628
2629 /* Make sure not to fold the sincos call again. */
2630 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2631 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2632 call, 3, arg, top1, top2));
2633 }
2634 else
2635 {
2636 tree call, fn = NULL_TREE, narg;
2637 tree ctype = build_complex_type (type);
2638
2639 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2640 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2641 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2642 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2643 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2644 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2645 else
2646 gcc_unreachable ();
2647
2648 /* If we don't have a decl for cexp create one. This is the
2649 friendliest fallback if the user calls __builtin_cexpi
2650 without full target C99 function support. */
2651 if (fn == NULL_TREE)
2652 {
2653 tree fntype;
2654 const char *name = NULL;
2655
2656 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2657 name = "cexpf";
2658 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2659 name = "cexp";
2660 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2661 name = "cexpl";
2662
2663 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2664 fn = build_fn_decl (name, fntype);
2665 }
2666
2667 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2668 build_real (type, dconst0), arg);
2669
2670 /* Make sure not to fold the cexp call again. */
2671 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2672 return expand_expr (build_call_nary (ctype, call, 1, narg),
2673 target, VOIDmode, EXPAND_NORMAL);
2674 }
2675
2676 /* Now build the proper return type. */
2677 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2678 make_tree (TREE_TYPE (arg), op2),
2679 make_tree (TREE_TYPE (arg), op1)),
2680 target, VOIDmode, EXPAND_NORMAL);
2681 }
2682
2683 /* Conveniently construct a function call expression. FNDECL names the
2684 function to be called, N is the number of arguments, and the "..."
2685 parameters are the argument expressions. Unlike build_call_exr
2686 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2687
2688 static tree
2689 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2690 {
2691 va_list ap;
2692 tree fntype = TREE_TYPE (fndecl);
2693 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2694
2695 va_start (ap, n);
2696 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2697 va_end (ap);
2698 SET_EXPR_LOCATION (fn, loc);
2699 return fn;
2700 }
2701
2702 /* Expand a call to one of the builtin rounding functions gcc defines
2703 as an extension (lfloor and lceil). As these are gcc extensions we
2704 do not need to worry about setting errno to EDOM.
2705 If expanding via optab fails, lower expression to (int)(floor(x)).
2706 EXP is the expression that is a call to the builtin function;
2707 if convenient, the result should be placed in TARGET. */
2708
2709 static rtx
2710 expand_builtin_int_roundingfn (tree exp, rtx target)
2711 {
2712 convert_optab builtin_optab;
2713 rtx op0, tmp;
2714 rtx_insn *insns;
2715 tree fndecl = get_callee_fndecl (exp);
2716 enum built_in_function fallback_fn;
2717 tree fallback_fndecl;
2718 machine_mode mode;
2719 tree arg;
2720
2721 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2722 gcc_unreachable ();
2723
2724 arg = CALL_EXPR_ARG (exp, 0);
2725
2726 switch (DECL_FUNCTION_CODE (fndecl))
2727 {
2728 CASE_FLT_FN (BUILT_IN_ICEIL):
2729 CASE_FLT_FN (BUILT_IN_LCEIL):
2730 CASE_FLT_FN (BUILT_IN_LLCEIL):
2731 builtin_optab = lceil_optab;
2732 fallback_fn = BUILT_IN_CEIL;
2733 break;
2734
2735 CASE_FLT_FN (BUILT_IN_IFLOOR):
2736 CASE_FLT_FN (BUILT_IN_LFLOOR):
2737 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2738 builtin_optab = lfloor_optab;
2739 fallback_fn = BUILT_IN_FLOOR;
2740 break;
2741
2742 default:
2743 gcc_unreachable ();
2744 }
2745
2746 /* Make a suitable register to place result in. */
2747 mode = TYPE_MODE (TREE_TYPE (exp));
2748
2749 target = gen_reg_rtx (mode);
2750
2751 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2752 need to expand the argument again. This way, we will not perform
2753 side-effects more the once. */
2754 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2755
2756 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2757
2758 start_sequence ();
2759
2760 /* Compute into TARGET. */
2761 if (expand_sfix_optab (target, op0, builtin_optab))
2762 {
2763 /* Output the entire sequence. */
2764 insns = get_insns ();
2765 end_sequence ();
2766 emit_insn (insns);
2767 return target;
2768 }
2769
2770 /* If we were unable to expand via the builtin, stop the sequence
2771 (without outputting the insns). */
2772 end_sequence ();
2773
2774 /* Fall back to floating point rounding optab. */
2775 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2776
2777 /* For non-C99 targets we may end up without a fallback fndecl here
2778 if the user called __builtin_lfloor directly. In this case emit
2779 a call to the floor/ceil variants nevertheless. This should result
2780 in the best user experience for not full C99 targets. */
2781 if (fallback_fndecl == NULL_TREE)
2782 {
2783 tree fntype;
2784 const char *name = NULL;
2785
2786 switch (DECL_FUNCTION_CODE (fndecl))
2787 {
2788 case BUILT_IN_ICEIL:
2789 case BUILT_IN_LCEIL:
2790 case BUILT_IN_LLCEIL:
2791 name = "ceil";
2792 break;
2793 case BUILT_IN_ICEILF:
2794 case BUILT_IN_LCEILF:
2795 case BUILT_IN_LLCEILF:
2796 name = "ceilf";
2797 break;
2798 case BUILT_IN_ICEILL:
2799 case BUILT_IN_LCEILL:
2800 case BUILT_IN_LLCEILL:
2801 name = "ceill";
2802 break;
2803 case BUILT_IN_IFLOOR:
2804 case BUILT_IN_LFLOOR:
2805 case BUILT_IN_LLFLOOR:
2806 name = "floor";
2807 break;
2808 case BUILT_IN_IFLOORF:
2809 case BUILT_IN_LFLOORF:
2810 case BUILT_IN_LLFLOORF:
2811 name = "floorf";
2812 break;
2813 case BUILT_IN_IFLOORL:
2814 case BUILT_IN_LFLOORL:
2815 case BUILT_IN_LLFLOORL:
2816 name = "floorl";
2817 break;
2818 default:
2819 gcc_unreachable ();
2820 }
2821
2822 fntype = build_function_type_list (TREE_TYPE (arg),
2823 TREE_TYPE (arg), NULL_TREE);
2824 fallback_fndecl = build_fn_decl (name, fntype);
2825 }
2826
2827 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2828
2829 tmp = expand_normal (exp);
2830 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2831
2832 /* Truncate the result of floating point optab to integer
2833 via expand_fix (). */
2834 target = gen_reg_rtx (mode);
2835 expand_fix (target, tmp, 0);
2836
2837 return target;
2838 }
2839
2840 /* Expand a call to one of the builtin math functions doing integer
2841 conversion (lrint).
2842 Return 0 if a normal call should be emitted rather than expanding the
2843 function in-line. EXP is the expression that is a call to the builtin
2844 function; if convenient, the result should be placed in TARGET. */
2845
2846 static rtx
2847 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2848 {
2849 convert_optab builtin_optab;
2850 rtx op0;
2851 rtx_insn *insns;
2852 tree fndecl = get_callee_fndecl (exp);
2853 tree arg;
2854 machine_mode mode;
2855 enum built_in_function fallback_fn = BUILT_IN_NONE;
2856
2857 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2858 gcc_unreachable ();
2859
2860 arg = CALL_EXPR_ARG (exp, 0);
2861
2862 switch (DECL_FUNCTION_CODE (fndecl))
2863 {
2864 CASE_FLT_FN (BUILT_IN_IRINT):
2865 fallback_fn = BUILT_IN_LRINT;
2866 /* FALLTHRU */
2867 CASE_FLT_FN (BUILT_IN_LRINT):
2868 CASE_FLT_FN (BUILT_IN_LLRINT):
2869 builtin_optab = lrint_optab;
2870 break;
2871
2872 CASE_FLT_FN (BUILT_IN_IROUND):
2873 fallback_fn = BUILT_IN_LROUND;
2874 /* FALLTHRU */
2875 CASE_FLT_FN (BUILT_IN_LROUND):
2876 CASE_FLT_FN (BUILT_IN_LLROUND):
2877 builtin_optab = lround_optab;
2878 break;
2879
2880 default:
2881 gcc_unreachable ();
2882 }
2883
2884 /* There's no easy way to detect the case we need to set EDOM. */
2885 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2886 return NULL_RTX;
2887
2888 /* Make a suitable register to place result in. */
2889 mode = TYPE_MODE (TREE_TYPE (exp));
2890
2891 /* There's no easy way to detect the case we need to set EDOM. */
2892 if (!flag_errno_math)
2893 {
2894 rtx result = gen_reg_rtx (mode);
2895
2896 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2897 need to expand the argument again. This way, we will not perform
2898 side-effects more the once. */
2899 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2900
2901 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2902
2903 start_sequence ();
2904
2905 if (expand_sfix_optab (result, op0, builtin_optab))
2906 {
2907 /* Output the entire sequence. */
2908 insns = get_insns ();
2909 end_sequence ();
2910 emit_insn (insns);
2911 return result;
2912 }
2913
2914 /* If we were unable to expand via the builtin, stop the sequence
2915 (without outputting the insns) and call to the library function
2916 with the stabilized argument list. */
2917 end_sequence ();
2918 }
2919
2920 if (fallback_fn != BUILT_IN_NONE)
2921 {
2922 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2923 targets, (int) round (x) should never be transformed into
2924 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2925 a call to lround in the hope that the target provides at least some
2926 C99 functions. This should result in the best user experience for
2927 not full C99 targets. */
2928 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2929 fallback_fn, 0);
2930
2931 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2932 fallback_fndecl, 1, arg);
2933
2934 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2935 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2936 return convert_to_mode (mode, target, 0);
2937 }
2938
2939 return expand_call (exp, target, target == const0_rtx);
2940 }
2941
2942 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2943 a normal call should be emitted rather than expanding the function
2944 in-line. EXP is the expression that is a call to the builtin
2945 function; if convenient, the result should be placed in TARGET. */
2946
2947 static rtx
2948 expand_builtin_powi (tree exp, rtx target)
2949 {
2950 tree arg0, arg1;
2951 rtx op0, op1;
2952 machine_mode mode;
2953 machine_mode mode2;
2954
2955 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2956 return NULL_RTX;
2957
2958 arg0 = CALL_EXPR_ARG (exp, 0);
2959 arg1 = CALL_EXPR_ARG (exp, 1);
2960 mode = TYPE_MODE (TREE_TYPE (exp));
2961
2962 /* Emit a libcall to libgcc. */
2963
2964 /* Mode of the 2nd argument must match that of an int. */
2965 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2966
2967 if (target == NULL_RTX)
2968 target = gen_reg_rtx (mode);
2969
2970 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2971 if (GET_MODE (op0) != mode)
2972 op0 = convert_to_mode (mode, op0, 0);
2973 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2974 if (GET_MODE (op1) != mode2)
2975 op1 = convert_to_mode (mode2, op1, 0);
2976
2977 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2978 target, LCT_CONST, mode, 2,
2979 op0, mode, op1, mode2);
2980
2981 return target;
2982 }
2983
2984 /* Expand expression EXP which is a call to the strlen builtin. Return
2985 NULL_RTX if we failed the caller should emit a normal call, otherwise
2986 try to get the result in TARGET, if convenient. */
2987
2988 static rtx
2989 expand_builtin_strlen (tree exp, rtx target,
2990 machine_mode target_mode)
2991 {
2992 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2993 return NULL_RTX;
2994 else
2995 {
2996 struct expand_operand ops[4];
2997 rtx pat;
2998 tree len;
2999 tree src = CALL_EXPR_ARG (exp, 0);
3000 rtx src_reg;
3001 rtx_insn *before_strlen;
3002 machine_mode insn_mode = target_mode;
3003 enum insn_code icode = CODE_FOR_nothing;
3004 unsigned int align;
3005
3006 /* If the length can be computed at compile-time, return it. */
3007 len = c_strlen (src, 0);
3008 if (len)
3009 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3010
3011 /* If the length can be computed at compile-time and is constant
3012 integer, but there are side-effects in src, evaluate
3013 src for side-effects, then return len.
3014 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3015 can be optimized into: i++; x = 3; */
3016 len = c_strlen (src, 1);
3017 if (len && TREE_CODE (len) == INTEGER_CST)
3018 {
3019 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3020 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3021 }
3022
3023 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3024
3025 /* If SRC is not a pointer type, don't do this operation inline. */
3026 if (align == 0)
3027 return NULL_RTX;
3028
3029 /* Bail out if we can't compute strlen in the right mode. */
3030 while (insn_mode != VOIDmode)
3031 {
3032 icode = optab_handler (strlen_optab, insn_mode);
3033 if (icode != CODE_FOR_nothing)
3034 break;
3035
3036 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3037 }
3038 if (insn_mode == VOIDmode)
3039 return NULL_RTX;
3040
3041 /* Make a place to hold the source address. We will not expand
3042 the actual source until we are sure that the expansion will
3043 not fail -- there are trees that cannot be expanded twice. */
3044 src_reg = gen_reg_rtx (Pmode);
3045
3046 /* Mark the beginning of the strlen sequence so we can emit the
3047 source operand later. */
3048 before_strlen = get_last_insn ();
3049
3050 create_output_operand (&ops[0], target, insn_mode);
3051 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3052 create_integer_operand (&ops[2], 0);
3053 create_integer_operand (&ops[3], align);
3054 if (!maybe_expand_insn (icode, 4, ops))
3055 return NULL_RTX;
3056
3057 /* Now that we are assured of success, expand the source. */
3058 start_sequence ();
3059 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3060 if (pat != src_reg)
3061 {
3062 #ifdef POINTERS_EXTEND_UNSIGNED
3063 if (GET_MODE (pat) != Pmode)
3064 pat = convert_to_mode (Pmode, pat,
3065 POINTERS_EXTEND_UNSIGNED);
3066 #endif
3067 emit_move_insn (src_reg, pat);
3068 }
3069 pat = get_insns ();
3070 end_sequence ();
3071
3072 if (before_strlen)
3073 emit_insn_after (pat, before_strlen);
3074 else
3075 emit_insn_before (pat, get_insns ());
3076
3077 /* Return the value in the proper mode for this function. */
3078 if (GET_MODE (ops[0].value) == target_mode)
3079 target = ops[0].value;
3080 else if (target != 0)
3081 convert_move (target, ops[0].value, 0);
3082 else
3083 target = convert_to_mode (target_mode, ops[0].value, 0);
3084
3085 return target;
3086 }
3087 }
3088
3089 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3090 bytes from constant string DATA + OFFSET and return it as target
3091 constant. */
3092
3093 static rtx
3094 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3095 machine_mode mode)
3096 {
3097 const char *str = (const char *) data;
3098
3099 gcc_assert (offset >= 0
3100 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3101 <= strlen (str) + 1));
3102
3103 return c_readstr (str + offset, mode);
3104 }
3105
3106 /* LEN specify length of the block of memcpy/memset operation.
3107 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3108 In some cases we can make very likely guess on max size, then we
3109 set it into PROBABLE_MAX_SIZE. */
3110
3111 static void
3112 determine_block_size (tree len, rtx len_rtx,
3113 unsigned HOST_WIDE_INT *min_size,
3114 unsigned HOST_WIDE_INT *max_size,
3115 unsigned HOST_WIDE_INT *probable_max_size)
3116 {
3117 if (CONST_INT_P (len_rtx))
3118 {
3119 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3120 return;
3121 }
3122 else
3123 {
3124 wide_int min, max;
3125 enum value_range_type range_type = VR_UNDEFINED;
3126
3127 /* Determine bounds from the type. */
3128 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3129 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3130 else
3131 *min_size = 0;
3132 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3133 *probable_max_size = *max_size
3134 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3135 else
3136 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3137
3138 if (TREE_CODE (len) == SSA_NAME)
3139 range_type = get_range_info (len, &min, &max);
3140 if (range_type == VR_RANGE)
3141 {
3142 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3143 *min_size = min.to_uhwi ();
3144 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3145 *probable_max_size = *max_size = max.to_uhwi ();
3146 }
3147 else if (range_type == VR_ANTI_RANGE)
3148 {
3149 /* Anti range 0...N lets us to determine minimal size to N+1. */
3150 if (min == 0)
3151 {
3152 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3153 *min_size = max.to_uhwi () + 1;
3154 }
3155 /* Code like
3156
3157 int n;
3158 if (n < 100)
3159 memcpy (a, b, n)
3160
3161 Produce anti range allowing negative values of N. We still
3162 can use the information and make a guess that N is not negative.
3163 */
3164 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3165 *probable_max_size = min.to_uhwi () - 1;
3166 }
3167 }
3168 gcc_checking_assert (*max_size <=
3169 (unsigned HOST_WIDE_INT)
3170 GET_MODE_MASK (GET_MODE (len_rtx)));
3171 }
3172
3173 /* Expand a call EXP to the memcpy builtin.
3174 Return NULL_RTX if we failed, the caller should emit a normal call,
3175 otherwise try to get the result in TARGET, if convenient (and in
3176 mode MODE if that's convenient). */
3177
3178 static rtx
3179 expand_builtin_memcpy (tree exp, rtx target)
3180 {
3181 if (!validate_arglist (exp,
3182 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3183 return NULL_RTX;
3184 else
3185 {
3186 tree dest = CALL_EXPR_ARG (exp, 0);
3187 tree src = CALL_EXPR_ARG (exp, 1);
3188 tree len = CALL_EXPR_ARG (exp, 2);
3189 const char *src_str;
3190 unsigned int src_align = get_pointer_alignment (src);
3191 unsigned int dest_align = get_pointer_alignment (dest);
3192 rtx dest_mem, src_mem, dest_addr, len_rtx;
3193 HOST_WIDE_INT expected_size = -1;
3194 unsigned int expected_align = 0;
3195 unsigned HOST_WIDE_INT min_size;
3196 unsigned HOST_WIDE_INT max_size;
3197 unsigned HOST_WIDE_INT probable_max_size;
3198
3199 /* If DEST is not a pointer type, call the normal function. */
3200 if (dest_align == 0)
3201 return NULL_RTX;
3202
3203 /* If either SRC is not a pointer type, don't do this
3204 operation in-line. */
3205 if (src_align == 0)
3206 return NULL_RTX;
3207
3208 if (currently_expanding_gimple_stmt)
3209 stringop_block_profile (currently_expanding_gimple_stmt,
3210 &expected_align, &expected_size);
3211
3212 if (expected_align < dest_align)
3213 expected_align = dest_align;
3214 dest_mem = get_memory_rtx (dest, len);
3215 set_mem_align (dest_mem, dest_align);
3216 len_rtx = expand_normal (len);
3217 determine_block_size (len, len_rtx, &min_size, &max_size,
3218 &probable_max_size);
3219 src_str = c_getstr (src);
3220
3221 /* If SRC is a string constant and block move would be done
3222 by pieces, we can avoid loading the string from memory
3223 and only stored the computed constants. */
3224 if (src_str
3225 && CONST_INT_P (len_rtx)
3226 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3227 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3228 CONST_CAST (char *, src_str),
3229 dest_align, false))
3230 {
3231 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3232 builtin_memcpy_read_str,
3233 CONST_CAST (char *, src_str),
3234 dest_align, false, 0);
3235 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3236 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3237 return dest_mem;
3238 }
3239
3240 src_mem = get_memory_rtx (src, len);
3241 set_mem_align (src_mem, src_align);
3242
3243 /* Copy word part most expediently. */
3244 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3245 CALL_EXPR_TAILCALL (exp)
3246 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3247 expected_align, expected_size,
3248 min_size, max_size, probable_max_size);
3249
3250 if (dest_addr == 0)
3251 {
3252 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3253 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3254 }
3255 return dest_addr;
3256 }
3257 }
3258
3259 /* Expand a call EXP to the mempcpy builtin.
3260 Return NULL_RTX if we failed; the caller should emit a normal call,
3261 otherwise try to get the result in TARGET, if convenient (and in
3262 mode MODE if that's convenient). If ENDP is 0 return the
3263 destination pointer, if ENDP is 1 return the end pointer ala
3264 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3265 stpcpy. */
3266
3267 static rtx
3268 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3269 {
3270 if (!validate_arglist (exp,
3271 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3272 return NULL_RTX;
3273 else
3274 {
3275 tree dest = CALL_EXPR_ARG (exp, 0);
3276 tree src = CALL_EXPR_ARG (exp, 1);
3277 tree len = CALL_EXPR_ARG (exp, 2);
3278 return expand_builtin_mempcpy_args (dest, src, len,
3279 target, mode, /*endp=*/ 1);
3280 }
3281 }
3282
3283 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3284 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3285 so that this can also be called without constructing an actual CALL_EXPR.
3286 The other arguments and return value are the same as for
3287 expand_builtin_mempcpy. */
3288
3289 static rtx
3290 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3291 rtx target, machine_mode mode, int endp)
3292 {
3293 /* If return value is ignored, transform mempcpy into memcpy. */
3294 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3295 {
3296 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3297 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3298 dest, src, len);
3299 return expand_expr (result, target, mode, EXPAND_NORMAL);
3300 }
3301 else
3302 {
3303 const char *src_str;
3304 unsigned int src_align = get_pointer_alignment (src);
3305 unsigned int dest_align = get_pointer_alignment (dest);
3306 rtx dest_mem, src_mem, len_rtx;
3307
3308 /* If either SRC or DEST is not a pointer type, don't do this
3309 operation in-line. */
3310 if (dest_align == 0 || src_align == 0)
3311 return NULL_RTX;
3312
3313 /* If LEN is not constant, call the normal function. */
3314 if (! tree_fits_uhwi_p (len))
3315 return NULL_RTX;
3316
3317 len_rtx = expand_normal (len);
3318 src_str = c_getstr (src);
3319
3320 /* If SRC is a string constant and block move would be done
3321 by pieces, we can avoid loading the string from memory
3322 and only stored the computed constants. */
3323 if (src_str
3324 && CONST_INT_P (len_rtx)
3325 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3326 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3327 CONST_CAST (char *, src_str),
3328 dest_align, false))
3329 {
3330 dest_mem = get_memory_rtx (dest, len);
3331 set_mem_align (dest_mem, dest_align);
3332 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3333 builtin_memcpy_read_str,
3334 CONST_CAST (char *, src_str),
3335 dest_align, false, endp);
3336 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3337 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3338 return dest_mem;
3339 }
3340
3341 if (CONST_INT_P (len_rtx)
3342 && can_move_by_pieces (INTVAL (len_rtx),
3343 MIN (dest_align, src_align)))
3344 {
3345 dest_mem = get_memory_rtx (dest, len);
3346 set_mem_align (dest_mem, dest_align);
3347 src_mem = get_memory_rtx (src, len);
3348 set_mem_align (src_mem, src_align);
3349 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3350 MIN (dest_align, src_align), endp);
3351 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3352 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3353 return dest_mem;
3354 }
3355
3356 return NULL_RTX;
3357 }
3358 }
3359
3360 #ifndef HAVE_movstr
3361 # define HAVE_movstr 0
3362 # define CODE_FOR_movstr CODE_FOR_nothing
3363 #endif
3364
3365 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3366 we failed, the caller should emit a normal call, otherwise try to
3367 get the result in TARGET, if convenient. If ENDP is 0 return the
3368 destination pointer, if ENDP is 1 return the end pointer ala
3369 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3370 stpcpy. */
3371
3372 static rtx
3373 expand_movstr (tree dest, tree src, rtx target, int endp)
3374 {
3375 struct expand_operand ops[3];
3376 rtx dest_mem;
3377 rtx src_mem;
3378
3379 if (!HAVE_movstr)
3380 return NULL_RTX;
3381
3382 dest_mem = get_memory_rtx (dest, NULL);
3383 src_mem = get_memory_rtx (src, NULL);
3384 if (!endp)
3385 {
3386 target = force_reg (Pmode, XEXP (dest_mem, 0));
3387 dest_mem = replace_equiv_address (dest_mem, target);
3388 }
3389
3390 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3391 create_fixed_operand (&ops[1], dest_mem);
3392 create_fixed_operand (&ops[2], src_mem);
3393 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3394 return NULL_RTX;
3395
3396 if (endp && target != const0_rtx)
3397 {
3398 target = ops[0].value;
3399 /* movstr is supposed to set end to the address of the NUL
3400 terminator. If the caller requested a mempcpy-like return value,
3401 adjust it. */
3402 if (endp == 1)
3403 {
3404 rtx tem = plus_constant (GET_MODE (target),
3405 gen_lowpart (GET_MODE (target), target), 1);
3406 emit_move_insn (target, force_operand (tem, NULL_RTX));
3407 }
3408 }
3409 return target;
3410 }
3411
3412 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3413 NULL_RTX if we failed the caller should emit a normal call, otherwise
3414 try to get the result in TARGET, if convenient (and in mode MODE if that's
3415 convenient). */
3416
3417 static rtx
3418 expand_builtin_strcpy (tree exp, rtx target)
3419 {
3420 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3421 {
3422 tree dest = CALL_EXPR_ARG (exp, 0);
3423 tree src = CALL_EXPR_ARG (exp, 1);
3424 return expand_builtin_strcpy_args (dest, src, target);
3425 }
3426 return NULL_RTX;
3427 }
3428
3429 /* Helper function to do the actual work for expand_builtin_strcpy. The
3430 arguments to the builtin_strcpy call DEST and SRC are broken out
3431 so that this can also be called without constructing an actual CALL_EXPR.
3432 The other arguments and return value are the same as for
3433 expand_builtin_strcpy. */
3434
3435 static rtx
3436 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3437 {
3438 return expand_movstr (dest, src, target, /*endp=*/0);
3439 }
3440
3441 /* Expand a call EXP to the stpcpy builtin.
3442 Return NULL_RTX if we failed the caller should emit a normal call,
3443 otherwise try to get the result in TARGET, if convenient (and in
3444 mode MODE if that's convenient). */
3445
3446 static rtx
3447 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3448 {
3449 tree dst, src;
3450 location_t loc = EXPR_LOCATION (exp);
3451
3452 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3453 return NULL_RTX;
3454
3455 dst = CALL_EXPR_ARG (exp, 0);
3456 src = CALL_EXPR_ARG (exp, 1);
3457
3458 /* If return value is ignored, transform stpcpy into strcpy. */
3459 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3460 {
3461 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3462 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3463 return expand_expr (result, target, mode, EXPAND_NORMAL);
3464 }
3465 else
3466 {
3467 tree len, lenp1;
3468 rtx ret;
3469
3470 /* Ensure we get an actual string whose length can be evaluated at
3471 compile-time, not an expression containing a string. This is
3472 because the latter will potentially produce pessimized code
3473 when used to produce the return value. */
3474 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3475 return expand_movstr (dst, src, target, /*endp=*/2);
3476
3477 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3478 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3479 target, mode, /*endp=*/2);
3480
3481 if (ret)
3482 return ret;
3483
3484 if (TREE_CODE (len) == INTEGER_CST)
3485 {
3486 rtx len_rtx = expand_normal (len);
3487
3488 if (CONST_INT_P (len_rtx))
3489 {
3490 ret = expand_builtin_strcpy_args (dst, src, target);
3491
3492 if (ret)
3493 {
3494 if (! target)
3495 {
3496 if (mode != VOIDmode)
3497 target = gen_reg_rtx (mode);
3498 else
3499 target = gen_reg_rtx (GET_MODE (ret));
3500 }
3501 if (GET_MODE (target) != GET_MODE (ret))
3502 ret = gen_lowpart (GET_MODE (target), ret);
3503
3504 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3505 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3506 gcc_assert (ret);
3507
3508 return target;
3509 }
3510 }
3511 }
3512
3513 return expand_movstr (dst, src, target, /*endp=*/2);
3514 }
3515 }
3516
3517 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3518 bytes from constant string DATA + OFFSET and return it as target
3519 constant. */
3520
3521 rtx
3522 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3523 machine_mode mode)
3524 {
3525 const char *str = (const char *) data;
3526
3527 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3528 return const0_rtx;
3529
3530 return c_readstr (str + offset, mode);
3531 }
3532
3533 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3534 NULL_RTX if we failed the caller should emit a normal call. */
3535
3536 static rtx
3537 expand_builtin_strncpy (tree exp, rtx target)
3538 {
3539 location_t loc = EXPR_LOCATION (exp);
3540
3541 if (validate_arglist (exp,
3542 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3543 {
3544 tree dest = CALL_EXPR_ARG (exp, 0);
3545 tree src = CALL_EXPR_ARG (exp, 1);
3546 tree len = CALL_EXPR_ARG (exp, 2);
3547 tree slen = c_strlen (src, 1);
3548
3549 /* We must be passed a constant len and src parameter. */
3550 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3551 return NULL_RTX;
3552
3553 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3554
3555 /* We're required to pad with trailing zeros if the requested
3556 len is greater than strlen(s2)+1. In that case try to
3557 use store_by_pieces, if it fails, punt. */
3558 if (tree_int_cst_lt (slen, len))
3559 {
3560 unsigned int dest_align = get_pointer_alignment (dest);
3561 const char *p = c_getstr (src);
3562 rtx dest_mem;
3563
3564 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3565 || !can_store_by_pieces (tree_to_uhwi (len),
3566 builtin_strncpy_read_str,
3567 CONST_CAST (char *, p),
3568 dest_align, false))
3569 return NULL_RTX;
3570
3571 dest_mem = get_memory_rtx (dest, len);
3572 store_by_pieces (dest_mem, tree_to_uhwi (len),
3573 builtin_strncpy_read_str,
3574 CONST_CAST (char *, p), dest_align, false, 0);
3575 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3576 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3577 return dest_mem;
3578 }
3579 }
3580 return NULL_RTX;
3581 }
3582
3583 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3584 bytes from constant string DATA + OFFSET and return it as target
3585 constant. */
3586
3587 rtx
3588 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3589 machine_mode mode)
3590 {
3591 const char *c = (const char *) data;
3592 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3593
3594 memset (p, *c, GET_MODE_SIZE (mode));
3595
3596 return c_readstr (p, mode);
3597 }
3598
3599 /* Callback routine for store_by_pieces. Return the RTL of a register
3600 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3601 char value given in the RTL register data. For example, if mode is
3602 4 bytes wide, return the RTL for 0x01010101*data. */
3603
3604 static rtx
3605 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3606 machine_mode mode)
3607 {
3608 rtx target, coeff;
3609 size_t size;
3610 char *p;
3611
3612 size = GET_MODE_SIZE (mode);
3613 if (size == 1)
3614 return (rtx) data;
3615
3616 p = XALLOCAVEC (char, size);
3617 memset (p, 1, size);
3618 coeff = c_readstr (p, mode);
3619
3620 target = convert_to_mode (mode, (rtx) data, 1);
3621 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3622 return force_reg (mode, target);
3623 }
3624
3625 /* Expand expression EXP, which is a call to the memset builtin. Return
3626 NULL_RTX if we failed the caller should emit a normal call, otherwise
3627 try to get the result in TARGET, if convenient (and in mode MODE if that's
3628 convenient). */
3629
3630 static rtx
3631 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3632 {
3633 if (!validate_arglist (exp,
3634 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3635 return NULL_RTX;
3636 else
3637 {
3638 tree dest = CALL_EXPR_ARG (exp, 0);
3639 tree val = CALL_EXPR_ARG (exp, 1);
3640 tree len = CALL_EXPR_ARG (exp, 2);
3641 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3642 }
3643 }
3644
3645 /* Helper function to do the actual work for expand_builtin_memset. The
3646 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3647 so that this can also be called without constructing an actual CALL_EXPR.
3648 The other arguments and return value are the same as for
3649 expand_builtin_memset. */
3650
3651 static rtx
3652 expand_builtin_memset_args (tree dest, tree val, tree len,
3653 rtx target, machine_mode mode, tree orig_exp)
3654 {
3655 tree fndecl, fn;
3656 enum built_in_function fcode;
3657 machine_mode val_mode;
3658 char c;
3659 unsigned int dest_align;
3660 rtx dest_mem, dest_addr, len_rtx;
3661 HOST_WIDE_INT expected_size = -1;
3662 unsigned int expected_align = 0;
3663 unsigned HOST_WIDE_INT min_size;
3664 unsigned HOST_WIDE_INT max_size;
3665 unsigned HOST_WIDE_INT probable_max_size;
3666
3667 dest_align = get_pointer_alignment (dest);
3668
3669 /* If DEST is not a pointer type, don't do this operation in-line. */
3670 if (dest_align == 0)
3671 return NULL_RTX;
3672
3673 if (currently_expanding_gimple_stmt)
3674 stringop_block_profile (currently_expanding_gimple_stmt,
3675 &expected_align, &expected_size);
3676
3677 if (expected_align < dest_align)
3678 expected_align = dest_align;
3679
3680 /* If the LEN parameter is zero, return DEST. */
3681 if (integer_zerop (len))
3682 {
3683 /* Evaluate and ignore VAL in case it has side-effects. */
3684 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3685 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3686 }
3687
3688 /* Stabilize the arguments in case we fail. */
3689 dest = builtin_save_expr (dest);
3690 val = builtin_save_expr (val);
3691 len = builtin_save_expr (len);
3692
3693 len_rtx = expand_normal (len);
3694 determine_block_size (len, len_rtx, &min_size, &max_size,
3695 &probable_max_size);
3696 dest_mem = get_memory_rtx (dest, len);
3697 val_mode = TYPE_MODE (unsigned_char_type_node);
3698
3699 if (TREE_CODE (val) != INTEGER_CST)
3700 {
3701 rtx val_rtx;
3702
3703 val_rtx = expand_normal (val);
3704 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3705
3706 /* Assume that we can memset by pieces if we can store
3707 * the coefficients by pieces (in the required modes).
3708 * We can't pass builtin_memset_gen_str as that emits RTL. */
3709 c = 1;
3710 if (tree_fits_uhwi_p (len)
3711 && can_store_by_pieces (tree_to_uhwi (len),
3712 builtin_memset_read_str, &c, dest_align,
3713 true))
3714 {
3715 val_rtx = force_reg (val_mode, val_rtx);
3716 store_by_pieces (dest_mem, tree_to_uhwi (len),
3717 builtin_memset_gen_str, val_rtx, dest_align,
3718 true, 0);
3719 }
3720 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3721 dest_align, expected_align,
3722 expected_size, min_size, max_size,
3723 probable_max_size))
3724 goto do_libcall;
3725
3726 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3727 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3728 return dest_mem;
3729 }
3730
3731 if (target_char_cast (val, &c))
3732 goto do_libcall;
3733
3734 if (c)
3735 {
3736 if (tree_fits_uhwi_p (len)
3737 && can_store_by_pieces (tree_to_uhwi (len),
3738 builtin_memset_read_str, &c, dest_align,
3739 true))
3740 store_by_pieces (dest_mem, tree_to_uhwi (len),
3741 builtin_memset_read_str, &c, dest_align, true, 0);
3742 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3743 gen_int_mode (c, val_mode),
3744 dest_align, expected_align,
3745 expected_size, min_size, max_size,
3746 probable_max_size))
3747 goto do_libcall;
3748
3749 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3750 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3751 return dest_mem;
3752 }
3753
3754 set_mem_align (dest_mem, dest_align);
3755 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3756 CALL_EXPR_TAILCALL (orig_exp)
3757 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3758 expected_align, expected_size,
3759 min_size, max_size,
3760 probable_max_size);
3761
3762 if (dest_addr == 0)
3763 {
3764 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3765 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3766 }
3767
3768 return dest_addr;
3769
3770 do_libcall:
3771 fndecl = get_callee_fndecl (orig_exp);
3772 fcode = DECL_FUNCTION_CODE (fndecl);
3773 if (fcode == BUILT_IN_MEMSET)
3774 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3775 dest, val, len);
3776 else if (fcode == BUILT_IN_BZERO)
3777 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3778 dest, len);
3779 else
3780 gcc_unreachable ();
3781 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3782 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3783 return expand_call (fn, target, target == const0_rtx);
3784 }
3785
3786 /* Expand expression EXP, which is a call to the bzero builtin. Return
3787 NULL_RTX if we failed the caller should emit a normal call. */
3788
3789 static rtx
3790 expand_builtin_bzero (tree exp)
3791 {
3792 tree dest, size;
3793 location_t loc = EXPR_LOCATION (exp);
3794
3795 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3796 return NULL_RTX;
3797
3798 dest = CALL_EXPR_ARG (exp, 0);
3799 size = CALL_EXPR_ARG (exp, 1);
3800
3801 /* New argument list transforming bzero(ptr x, int y) to
3802 memset(ptr x, int 0, size_t y). This is done this way
3803 so that if it isn't expanded inline, we fallback to
3804 calling bzero instead of memset. */
3805
3806 return expand_builtin_memset_args (dest, integer_zero_node,
3807 fold_convert_loc (loc,
3808 size_type_node, size),
3809 const0_rtx, VOIDmode, exp);
3810 }
3811
3812 /* Expand expression EXP, which is a call to the memcmp built-in function.
3813 Return NULL_RTX if we failed and the caller should emit a normal call,
3814 otherwise try to get the result in TARGET, if convenient (and in mode
3815 MODE, if that's convenient). */
3816
3817 static rtx
3818 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3819 ATTRIBUTE_UNUSED machine_mode mode)
3820 {
3821 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3822
3823 if (!validate_arglist (exp,
3824 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3825 return NULL_RTX;
3826
3827 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3828 implementing memcmp because it will stop if it encounters two
3829 zero bytes. */
3830 #if defined HAVE_cmpmemsi
3831 {
3832 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3833 rtx result;
3834 rtx insn;
3835 tree arg1 = CALL_EXPR_ARG (exp, 0);
3836 tree arg2 = CALL_EXPR_ARG (exp, 1);
3837 tree len = CALL_EXPR_ARG (exp, 2);
3838
3839 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3840 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3841 machine_mode insn_mode;
3842
3843 if (HAVE_cmpmemsi)
3844 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3845 else
3846 return NULL_RTX;
3847
3848 /* If we don't have POINTER_TYPE, call the function. */
3849 if (arg1_align == 0 || arg2_align == 0)
3850 return NULL_RTX;
3851
3852 /* Make a place to write the result of the instruction. */
3853 result = target;
3854 if (! (result != 0
3855 && REG_P (result) && GET_MODE (result) == insn_mode
3856 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3857 result = gen_reg_rtx (insn_mode);
3858
3859 arg1_rtx = get_memory_rtx (arg1, len);
3860 arg2_rtx = get_memory_rtx (arg2, len);
3861 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3862
3863 /* Set MEM_SIZE as appropriate. */
3864 if (CONST_INT_P (arg3_rtx))
3865 {
3866 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3867 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3868 }
3869
3870 if (HAVE_cmpmemsi)
3871 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3872 GEN_INT (MIN (arg1_align, arg2_align)));
3873 else
3874 gcc_unreachable ();
3875
3876 if (insn)
3877 emit_insn (insn);
3878 else
3879 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3880 TYPE_MODE (integer_type_node), 3,
3881 XEXP (arg1_rtx, 0), Pmode,
3882 XEXP (arg2_rtx, 0), Pmode,
3883 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3884 TYPE_UNSIGNED (sizetype)),
3885 TYPE_MODE (sizetype));
3886
3887 /* Return the value in the proper mode for this function. */
3888 mode = TYPE_MODE (TREE_TYPE (exp));
3889 if (GET_MODE (result) == mode)
3890 return result;
3891 else if (target != 0)
3892 {
3893 convert_move (target, result, 0);
3894 return target;
3895 }
3896 else
3897 return convert_to_mode (mode, result, 0);
3898 }
3899 #endif /* HAVE_cmpmemsi. */
3900
3901 return NULL_RTX;
3902 }
3903
3904 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3905 if we failed the caller should emit a normal call, otherwise try to get
3906 the result in TARGET, if convenient. */
3907
3908 static rtx
3909 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3910 {
3911 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3912 return NULL_RTX;
3913
3914 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3915 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3916 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3917 {
3918 rtx arg1_rtx, arg2_rtx;
3919 rtx result, insn = NULL_RTX;
3920 tree fndecl, fn;
3921 tree arg1 = CALL_EXPR_ARG (exp, 0);
3922 tree arg2 = CALL_EXPR_ARG (exp, 1);
3923
3924 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3925 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3926
3927 /* If we don't have POINTER_TYPE, call the function. */
3928 if (arg1_align == 0 || arg2_align == 0)
3929 return NULL_RTX;
3930
3931 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3932 arg1 = builtin_save_expr (arg1);
3933 arg2 = builtin_save_expr (arg2);
3934
3935 arg1_rtx = get_memory_rtx (arg1, NULL);
3936 arg2_rtx = get_memory_rtx (arg2, NULL);
3937
3938 #ifdef HAVE_cmpstrsi
3939 /* Try to call cmpstrsi. */
3940 if (HAVE_cmpstrsi)
3941 {
3942 machine_mode insn_mode
3943 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3944
3945 /* Make a place to write the result of the instruction. */
3946 result = target;
3947 if (! (result != 0
3948 && REG_P (result) && GET_MODE (result) == insn_mode
3949 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3950 result = gen_reg_rtx (insn_mode);
3951
3952 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3953 GEN_INT (MIN (arg1_align, arg2_align)));
3954 }
3955 #endif
3956 #ifdef HAVE_cmpstrnsi
3957 /* Try to determine at least one length and call cmpstrnsi. */
3958 if (!insn && HAVE_cmpstrnsi)
3959 {
3960 tree len;
3961 rtx arg3_rtx;
3962
3963 machine_mode insn_mode
3964 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3965 tree len1 = c_strlen (arg1, 1);
3966 tree len2 = c_strlen (arg2, 1);
3967
3968 if (len1)
3969 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3970 if (len2)
3971 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3972
3973 /* If we don't have a constant length for the first, use the length
3974 of the second, if we know it. We don't require a constant for
3975 this case; some cost analysis could be done if both are available
3976 but neither is constant. For now, assume they're equally cheap,
3977 unless one has side effects. If both strings have constant lengths,
3978 use the smaller. */
3979
3980 if (!len1)
3981 len = len2;
3982 else if (!len2)
3983 len = len1;
3984 else if (TREE_SIDE_EFFECTS (len1))
3985 len = len2;
3986 else if (TREE_SIDE_EFFECTS (len2))
3987 len = len1;
3988 else if (TREE_CODE (len1) != INTEGER_CST)
3989 len = len2;
3990 else if (TREE_CODE (len2) != INTEGER_CST)
3991 len = len1;
3992 else if (tree_int_cst_lt (len1, len2))
3993 len = len1;
3994 else
3995 len = len2;
3996
3997 /* If both arguments have side effects, we cannot optimize. */
3998 if (!len || TREE_SIDE_EFFECTS (len))
3999 goto do_libcall;
4000
4001 arg3_rtx = expand_normal (len);
4002
4003 /* Make a place to write the result of the instruction. */
4004 result = target;
4005 if (! (result != 0
4006 && REG_P (result) && GET_MODE (result) == insn_mode
4007 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4008 result = gen_reg_rtx (insn_mode);
4009
4010 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4011 GEN_INT (MIN (arg1_align, arg2_align)));
4012 }
4013 #endif
4014
4015 if (insn)
4016 {
4017 machine_mode mode;
4018 emit_insn (insn);
4019
4020 /* Return the value in the proper mode for this function. */
4021 mode = TYPE_MODE (TREE_TYPE (exp));
4022 if (GET_MODE (result) == mode)
4023 return result;
4024 if (target == 0)
4025 return convert_to_mode (mode, result, 0);
4026 convert_move (target, result, 0);
4027 return target;
4028 }
4029
4030 /* Expand the library call ourselves using a stabilized argument
4031 list to avoid re-evaluating the function's arguments twice. */
4032 #ifdef HAVE_cmpstrnsi
4033 do_libcall:
4034 #endif
4035 fndecl = get_callee_fndecl (exp);
4036 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4037 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4038 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4039 return expand_call (fn, target, target == const0_rtx);
4040 }
4041 #endif
4042 return NULL_RTX;
4043 }
4044
4045 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4046 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4047 the result in TARGET, if convenient. */
4048
4049 static rtx
4050 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4051 ATTRIBUTE_UNUSED machine_mode mode)
4052 {
4053 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4054
4055 if (!validate_arglist (exp,
4056 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4057 return NULL_RTX;
4058
4059 /* If c_strlen can determine an expression for one of the string
4060 lengths, and it doesn't have side effects, then emit cmpstrnsi
4061 using length MIN(strlen(string)+1, arg3). */
4062 #ifdef HAVE_cmpstrnsi
4063 if (HAVE_cmpstrnsi)
4064 {
4065 tree len, len1, len2;
4066 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4067 rtx result, insn;
4068 tree fndecl, fn;
4069 tree arg1 = CALL_EXPR_ARG (exp, 0);
4070 tree arg2 = CALL_EXPR_ARG (exp, 1);
4071 tree arg3 = CALL_EXPR_ARG (exp, 2);
4072
4073 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4074 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4075 machine_mode insn_mode
4076 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4077
4078 len1 = c_strlen (arg1, 1);
4079 len2 = c_strlen (arg2, 1);
4080
4081 if (len1)
4082 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4083 if (len2)
4084 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4085
4086 /* If we don't have a constant length for the first, use the length
4087 of the second, if we know it. We don't require a constant for
4088 this case; some cost analysis could be done if both are available
4089 but neither is constant. For now, assume they're equally cheap,
4090 unless one has side effects. If both strings have constant lengths,
4091 use the smaller. */
4092
4093 if (!len1)
4094 len = len2;
4095 else if (!len2)
4096 len = len1;
4097 else if (TREE_SIDE_EFFECTS (len1))
4098 len = len2;
4099 else if (TREE_SIDE_EFFECTS (len2))
4100 len = len1;
4101 else if (TREE_CODE (len1) != INTEGER_CST)
4102 len = len2;
4103 else if (TREE_CODE (len2) != INTEGER_CST)
4104 len = len1;
4105 else if (tree_int_cst_lt (len1, len2))
4106 len = len1;
4107 else
4108 len = len2;
4109
4110 /* If both arguments have side effects, we cannot optimize. */
4111 if (!len || TREE_SIDE_EFFECTS (len))
4112 return NULL_RTX;
4113
4114 /* The actual new length parameter is MIN(len,arg3). */
4115 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4116 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4117
4118 /* If we don't have POINTER_TYPE, call the function. */
4119 if (arg1_align == 0 || arg2_align == 0)
4120 return NULL_RTX;
4121
4122 /* Make a place to write the result of the instruction. */
4123 result = target;
4124 if (! (result != 0
4125 && REG_P (result) && GET_MODE (result) == insn_mode
4126 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4127 result = gen_reg_rtx (insn_mode);
4128
4129 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4130 arg1 = builtin_save_expr (arg1);
4131 arg2 = builtin_save_expr (arg2);
4132 len = builtin_save_expr (len);
4133
4134 arg1_rtx = get_memory_rtx (arg1, len);
4135 arg2_rtx = get_memory_rtx (arg2, len);
4136 arg3_rtx = expand_normal (len);
4137 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4138 GEN_INT (MIN (arg1_align, arg2_align)));
4139 if (insn)
4140 {
4141 emit_insn (insn);
4142
4143 /* Return the value in the proper mode for this function. */
4144 mode = TYPE_MODE (TREE_TYPE (exp));
4145 if (GET_MODE (result) == mode)
4146 return result;
4147 if (target == 0)
4148 return convert_to_mode (mode, result, 0);
4149 convert_move (target, result, 0);
4150 return target;
4151 }
4152
4153 /* Expand the library call ourselves using a stabilized argument
4154 list to avoid re-evaluating the function's arguments twice. */
4155 fndecl = get_callee_fndecl (exp);
4156 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4157 arg1, arg2, len);
4158 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4159 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4160 return expand_call (fn, target, target == const0_rtx);
4161 }
4162 #endif
4163 return NULL_RTX;
4164 }
4165
4166 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4167 if that's convenient. */
4168
4169 rtx
4170 expand_builtin_saveregs (void)
4171 {
4172 rtx val;
4173 rtx_insn *seq;
4174
4175 /* Don't do __builtin_saveregs more than once in a function.
4176 Save the result of the first call and reuse it. */
4177 if (saveregs_value != 0)
4178 return saveregs_value;
4179
4180 /* When this function is called, it means that registers must be
4181 saved on entry to this function. So we migrate the call to the
4182 first insn of this function. */
4183
4184 start_sequence ();
4185
4186 /* Do whatever the machine needs done in this case. */
4187 val = targetm.calls.expand_builtin_saveregs ();
4188
4189 seq = get_insns ();
4190 end_sequence ();
4191
4192 saveregs_value = val;
4193
4194 /* Put the insns after the NOTE that starts the function. If this
4195 is inside a start_sequence, make the outer-level insn chain current, so
4196 the code is placed at the start of the function. */
4197 push_topmost_sequence ();
4198 emit_insn_after (seq, entry_of_function ());
4199 pop_topmost_sequence ();
4200
4201 return val;
4202 }
4203
4204 /* Expand a call to __builtin_next_arg. */
4205
4206 static rtx
4207 expand_builtin_next_arg (void)
4208 {
4209 /* Checking arguments is already done in fold_builtin_next_arg
4210 that must be called before this function. */
4211 return expand_binop (ptr_mode, add_optab,
4212 crtl->args.internal_arg_pointer,
4213 crtl->args.arg_offset_rtx,
4214 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4215 }
4216
4217 /* Make it easier for the backends by protecting the valist argument
4218 from multiple evaluations. */
4219
4220 static tree
4221 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4222 {
4223 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4224
4225 /* The current way of determining the type of valist is completely
4226 bogus. We should have the information on the va builtin instead. */
4227 if (!vatype)
4228 vatype = targetm.fn_abi_va_list (cfun->decl);
4229
4230 if (TREE_CODE (vatype) == ARRAY_TYPE)
4231 {
4232 if (TREE_SIDE_EFFECTS (valist))
4233 valist = save_expr (valist);
4234
4235 /* For this case, the backends will be expecting a pointer to
4236 vatype, but it's possible we've actually been given an array
4237 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4238 So fix it. */
4239 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4240 {
4241 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4242 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4243 }
4244 }
4245 else
4246 {
4247 tree pt = build_pointer_type (vatype);
4248
4249 if (! needs_lvalue)
4250 {
4251 if (! TREE_SIDE_EFFECTS (valist))
4252 return valist;
4253
4254 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4255 TREE_SIDE_EFFECTS (valist) = 1;
4256 }
4257
4258 if (TREE_SIDE_EFFECTS (valist))
4259 valist = save_expr (valist);
4260 valist = fold_build2_loc (loc, MEM_REF,
4261 vatype, valist, build_int_cst (pt, 0));
4262 }
4263
4264 return valist;
4265 }
4266
4267 /* The "standard" definition of va_list is void*. */
4268
4269 tree
4270 std_build_builtin_va_list (void)
4271 {
4272 return ptr_type_node;
4273 }
4274
4275 /* The "standard" abi va_list is va_list_type_node. */
4276
4277 tree
4278 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4279 {
4280 return va_list_type_node;
4281 }
4282
4283 /* The "standard" type of va_list is va_list_type_node. */
4284
4285 tree
4286 std_canonical_va_list_type (tree type)
4287 {
4288 tree wtype, htype;
4289
4290 if (INDIRECT_REF_P (type))
4291 type = TREE_TYPE (type);
4292 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4293 type = TREE_TYPE (type);
4294 wtype = va_list_type_node;
4295 htype = type;
4296 /* Treat structure va_list types. */
4297 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4298 htype = TREE_TYPE (htype);
4299 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4300 {
4301 /* If va_list is an array type, the argument may have decayed
4302 to a pointer type, e.g. by being passed to another function.
4303 In that case, unwrap both types so that we can compare the
4304 underlying records. */
4305 if (TREE_CODE (htype) == ARRAY_TYPE
4306 || POINTER_TYPE_P (htype))
4307 {
4308 wtype = TREE_TYPE (wtype);
4309 htype = TREE_TYPE (htype);
4310 }
4311 }
4312 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4313 return va_list_type_node;
4314
4315 return NULL_TREE;
4316 }
4317
4318 /* The "standard" implementation of va_start: just assign `nextarg' to
4319 the variable. */
4320
4321 void
4322 std_expand_builtin_va_start (tree valist, rtx nextarg)
4323 {
4324 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4325 convert_move (va_r, nextarg, 0);
4326 }
4327
4328 /* Expand EXP, a call to __builtin_va_start. */
4329
4330 static rtx
4331 expand_builtin_va_start (tree exp)
4332 {
4333 rtx nextarg;
4334 tree valist;
4335 location_t loc = EXPR_LOCATION (exp);
4336
4337 if (call_expr_nargs (exp) < 2)
4338 {
4339 error_at (loc, "too few arguments to function %<va_start%>");
4340 return const0_rtx;
4341 }
4342
4343 if (fold_builtin_next_arg (exp, true))
4344 return const0_rtx;
4345
4346 nextarg = expand_builtin_next_arg ();
4347 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4348
4349 if (targetm.expand_builtin_va_start)
4350 targetm.expand_builtin_va_start (valist, nextarg);
4351 else
4352 std_expand_builtin_va_start (valist, nextarg);
4353
4354 return const0_rtx;
4355 }
4356
4357 /* Expand EXP, a call to __builtin_va_end. */
4358
4359 static rtx
4360 expand_builtin_va_end (tree exp)
4361 {
4362 tree valist = CALL_EXPR_ARG (exp, 0);
4363
4364 /* Evaluate for side effects, if needed. I hate macros that don't
4365 do that. */
4366 if (TREE_SIDE_EFFECTS (valist))
4367 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4368
4369 return const0_rtx;
4370 }
4371
4372 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4373 builtin rather than just as an assignment in stdarg.h because of the
4374 nastiness of array-type va_list types. */
4375
4376 static rtx
4377 expand_builtin_va_copy (tree exp)
4378 {
4379 tree dst, src, t;
4380 location_t loc = EXPR_LOCATION (exp);
4381
4382 dst = CALL_EXPR_ARG (exp, 0);
4383 src = CALL_EXPR_ARG (exp, 1);
4384
4385 dst = stabilize_va_list_loc (loc, dst, 1);
4386 src = stabilize_va_list_loc (loc, src, 0);
4387
4388 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4389
4390 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4391 {
4392 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4393 TREE_SIDE_EFFECTS (t) = 1;
4394 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4395 }
4396 else
4397 {
4398 rtx dstb, srcb, size;
4399
4400 /* Evaluate to pointers. */
4401 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4402 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4403 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4404 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4405
4406 dstb = convert_memory_address (Pmode, dstb);
4407 srcb = convert_memory_address (Pmode, srcb);
4408
4409 /* "Dereference" to BLKmode memories. */
4410 dstb = gen_rtx_MEM (BLKmode, dstb);
4411 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4412 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4413 srcb = gen_rtx_MEM (BLKmode, srcb);
4414 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4415 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4416
4417 /* Copy. */
4418 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4419 }
4420
4421 return const0_rtx;
4422 }
4423
4424 /* Expand a call to one of the builtin functions __builtin_frame_address or
4425 __builtin_return_address. */
4426
4427 static rtx
4428 expand_builtin_frame_address (tree fndecl, tree exp)
4429 {
4430 /* The argument must be a nonnegative integer constant.
4431 It counts the number of frames to scan up the stack.
4432 The value is the return address saved in that frame. */
4433 if (call_expr_nargs (exp) == 0)
4434 /* Warning about missing arg was already issued. */
4435 return const0_rtx;
4436 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4437 {
4438 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4439 error ("invalid argument to %<__builtin_frame_address%>");
4440 else
4441 error ("invalid argument to %<__builtin_return_address%>");
4442 return const0_rtx;
4443 }
4444 else
4445 {
4446 rtx tem
4447 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4448 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4449
4450 /* Some ports cannot access arbitrary stack frames. */
4451 if (tem == NULL)
4452 {
4453 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4454 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4455 else
4456 warning (0, "unsupported argument to %<__builtin_return_address%>");
4457 return const0_rtx;
4458 }
4459
4460 /* For __builtin_frame_address, return what we've got. */
4461 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4462 return tem;
4463
4464 if (!REG_P (tem)
4465 && ! CONSTANT_P (tem))
4466 tem = copy_addr_to_reg (tem);
4467 return tem;
4468 }
4469 }
4470
4471 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4472 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4473 is the same as for allocate_dynamic_stack_space. */
4474
4475 static rtx
4476 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4477 {
4478 rtx op0;
4479 rtx result;
4480 bool valid_arglist;
4481 unsigned int align;
4482 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4483 == BUILT_IN_ALLOCA_WITH_ALIGN);
4484
4485 valid_arglist
4486 = (alloca_with_align
4487 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4488 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4489
4490 if (!valid_arglist)
4491 return NULL_RTX;
4492
4493 /* Compute the argument. */
4494 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4495
4496 /* Compute the alignment. */
4497 align = (alloca_with_align
4498 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4499 : BIGGEST_ALIGNMENT);
4500
4501 /* Allocate the desired space. */
4502 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4503 result = convert_memory_address (ptr_mode, result);
4504
4505 return result;
4506 }
4507
4508 /* Expand a call to bswap builtin in EXP.
4509 Return NULL_RTX if a normal call should be emitted rather than expanding the
4510 function in-line. If convenient, the result should be placed in TARGET.
4511 SUBTARGET may be used as the target for computing one of EXP's operands. */
4512
4513 static rtx
4514 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4515 rtx subtarget)
4516 {
4517 tree arg;
4518 rtx op0;
4519
4520 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4521 return NULL_RTX;
4522
4523 arg = CALL_EXPR_ARG (exp, 0);
4524 op0 = expand_expr (arg,
4525 subtarget && GET_MODE (subtarget) == target_mode
4526 ? subtarget : NULL_RTX,
4527 target_mode, EXPAND_NORMAL);
4528 if (GET_MODE (op0) != target_mode)
4529 op0 = convert_to_mode (target_mode, op0, 1);
4530
4531 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4532
4533 gcc_assert (target);
4534
4535 return convert_to_mode (target_mode, target, 1);
4536 }
4537
4538 /* Expand a call to a unary builtin in EXP.
4539 Return NULL_RTX if a normal call should be emitted rather than expanding the
4540 function in-line. If convenient, the result should be placed in TARGET.
4541 SUBTARGET may be used as the target for computing one of EXP's operands. */
4542
4543 static rtx
4544 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4545 rtx subtarget, optab op_optab)
4546 {
4547 rtx op0;
4548
4549 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4550 return NULL_RTX;
4551
4552 /* Compute the argument. */
4553 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4554 (subtarget
4555 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4556 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4557 VOIDmode, EXPAND_NORMAL);
4558 /* Compute op, into TARGET if possible.
4559 Set TARGET to wherever the result comes back. */
4560 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4561 op_optab, op0, target, op_optab != clrsb_optab);
4562 gcc_assert (target);
4563
4564 return convert_to_mode (target_mode, target, 0);
4565 }
4566
4567 /* Expand a call to __builtin_expect. We just return our argument
4568 as the builtin_expect semantic should've been already executed by
4569 tree branch prediction pass. */
4570
4571 static rtx
4572 expand_builtin_expect (tree exp, rtx target)
4573 {
4574 tree arg;
4575
4576 if (call_expr_nargs (exp) < 2)
4577 return const0_rtx;
4578 arg = CALL_EXPR_ARG (exp, 0);
4579
4580 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4581 /* When guessing was done, the hints should be already stripped away. */
4582 gcc_assert (!flag_guess_branch_prob
4583 || optimize == 0 || seen_error ());
4584 return target;
4585 }
4586
4587 /* Expand a call to __builtin_assume_aligned. We just return our first
4588 argument as the builtin_assume_aligned semantic should've been already
4589 executed by CCP. */
4590
4591 static rtx
4592 expand_builtin_assume_aligned (tree exp, rtx target)
4593 {
4594 if (call_expr_nargs (exp) < 2)
4595 return const0_rtx;
4596 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4597 EXPAND_NORMAL);
4598 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4599 && (call_expr_nargs (exp) < 3
4600 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4601 return target;
4602 }
4603
4604 void
4605 expand_builtin_trap (void)
4606 {
4607 #ifdef HAVE_trap
4608 if (HAVE_trap)
4609 {
4610 rtx insn = emit_insn (gen_trap ());
4611 /* For trap insns when not accumulating outgoing args force
4612 REG_ARGS_SIZE note to prevent crossjumping of calls with
4613 different args sizes. */
4614 if (!ACCUMULATE_OUTGOING_ARGS)
4615 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4616 }
4617 else
4618 #endif
4619 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4620 emit_barrier ();
4621 }
4622
4623 /* Expand a call to __builtin_unreachable. We do nothing except emit
4624 a barrier saying that control flow will not pass here.
4625
4626 It is the responsibility of the program being compiled to ensure
4627 that control flow does never reach __builtin_unreachable. */
4628 static void
4629 expand_builtin_unreachable (void)
4630 {
4631 emit_barrier ();
4632 }
4633
4634 /* Expand EXP, a call to fabs, fabsf or fabsl.
4635 Return NULL_RTX if a normal call should be emitted rather than expanding
4636 the function inline. If convenient, the result should be placed
4637 in TARGET. SUBTARGET may be used as the target for computing
4638 the operand. */
4639
4640 static rtx
4641 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4642 {
4643 machine_mode mode;
4644 tree arg;
4645 rtx op0;
4646
4647 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4648 return NULL_RTX;
4649
4650 arg = CALL_EXPR_ARG (exp, 0);
4651 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4652 mode = TYPE_MODE (TREE_TYPE (arg));
4653 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4654 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4655 }
4656
4657 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4658 Return NULL is a normal call should be emitted rather than expanding the
4659 function inline. If convenient, the result should be placed in TARGET.
4660 SUBTARGET may be used as the target for computing the operand. */
4661
4662 static rtx
4663 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4664 {
4665 rtx op0, op1;
4666 tree arg;
4667
4668 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4669 return NULL_RTX;
4670
4671 arg = CALL_EXPR_ARG (exp, 0);
4672 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4673
4674 arg = CALL_EXPR_ARG (exp, 1);
4675 op1 = expand_normal (arg);
4676
4677 return expand_copysign (op0, op1, target);
4678 }
4679
4680 /* Expand a call to __builtin___clear_cache. */
4681
4682 static rtx
4683 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4684 {
4685 #ifndef HAVE_clear_cache
4686 #ifdef CLEAR_INSN_CACHE
4687 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4688 does something. Just do the default expansion to a call to
4689 __clear_cache(). */
4690 return NULL_RTX;
4691 #else
4692 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4693 does nothing. There is no need to call it. Do nothing. */
4694 return const0_rtx;
4695 #endif /* CLEAR_INSN_CACHE */
4696 #else
4697 /* We have a "clear_cache" insn, and it will handle everything. */
4698 tree begin, end;
4699 rtx begin_rtx, end_rtx;
4700
4701 /* We must not expand to a library call. If we did, any
4702 fallback library function in libgcc that might contain a call to
4703 __builtin___clear_cache() would recurse infinitely. */
4704 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4705 {
4706 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4707 return const0_rtx;
4708 }
4709
4710 if (HAVE_clear_cache)
4711 {
4712 struct expand_operand ops[2];
4713
4714 begin = CALL_EXPR_ARG (exp, 0);
4715 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4716
4717 end = CALL_EXPR_ARG (exp, 1);
4718 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4719
4720 create_address_operand (&ops[0], begin_rtx);
4721 create_address_operand (&ops[1], end_rtx);
4722 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4723 return const0_rtx;
4724 }
4725 return const0_rtx;
4726 #endif /* HAVE_clear_cache */
4727 }
4728
4729 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4730
4731 static rtx
4732 round_trampoline_addr (rtx tramp)
4733 {
4734 rtx temp, addend, mask;
4735
4736 /* If we don't need too much alignment, we'll have been guaranteed
4737 proper alignment by get_trampoline_type. */
4738 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4739 return tramp;
4740
4741 /* Round address up to desired boundary. */
4742 temp = gen_reg_rtx (Pmode);
4743 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4744 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4745
4746 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4747 temp, 0, OPTAB_LIB_WIDEN);
4748 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4749 temp, 0, OPTAB_LIB_WIDEN);
4750
4751 return tramp;
4752 }
4753
4754 static rtx
4755 expand_builtin_init_trampoline (tree exp, bool onstack)
4756 {
4757 tree t_tramp, t_func, t_chain;
4758 rtx m_tramp, r_tramp, r_chain, tmp;
4759
4760 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4761 POINTER_TYPE, VOID_TYPE))
4762 return NULL_RTX;
4763
4764 t_tramp = CALL_EXPR_ARG (exp, 0);
4765 t_func = CALL_EXPR_ARG (exp, 1);
4766 t_chain = CALL_EXPR_ARG (exp, 2);
4767
4768 r_tramp = expand_normal (t_tramp);
4769 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4770 MEM_NOTRAP_P (m_tramp) = 1;
4771
4772 /* If ONSTACK, the TRAMP argument should be the address of a field
4773 within the local function's FRAME decl. Either way, let's see if
4774 we can fill in the MEM_ATTRs for this memory. */
4775 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4776 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4777
4778 /* Creator of a heap trampoline is responsible for making sure the
4779 address is aligned to at least STACK_BOUNDARY. Normally malloc
4780 will ensure this anyhow. */
4781 tmp = round_trampoline_addr (r_tramp);
4782 if (tmp != r_tramp)
4783 {
4784 m_tramp = change_address (m_tramp, BLKmode, tmp);
4785 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4786 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4787 }
4788
4789 /* The FUNC argument should be the address of the nested function.
4790 Extract the actual function decl to pass to the hook. */
4791 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4792 t_func = TREE_OPERAND (t_func, 0);
4793 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4794
4795 r_chain = expand_normal (t_chain);
4796
4797 /* Generate insns to initialize the trampoline. */
4798 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4799
4800 if (onstack)
4801 {
4802 trampolines_created = 1;
4803
4804 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4805 "trampoline generated for nested function %qD", t_func);
4806 }
4807
4808 return const0_rtx;
4809 }
4810
4811 static rtx
4812 expand_builtin_adjust_trampoline (tree exp)
4813 {
4814 rtx tramp;
4815
4816 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4817 return NULL_RTX;
4818
4819 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4820 tramp = round_trampoline_addr (tramp);
4821 if (targetm.calls.trampoline_adjust_address)
4822 tramp = targetm.calls.trampoline_adjust_address (tramp);
4823
4824 return tramp;
4825 }
4826
4827 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4828 function. The function first checks whether the back end provides
4829 an insn to implement signbit for the respective mode. If not, it
4830 checks whether the floating point format of the value is such that
4831 the sign bit can be extracted. If that is not the case, the
4832 function returns NULL_RTX to indicate that a normal call should be
4833 emitted rather than expanding the function in-line. EXP is the
4834 expression that is a call to the builtin function; if convenient,
4835 the result should be placed in TARGET. */
4836 static rtx
4837 expand_builtin_signbit (tree exp, rtx target)
4838 {
4839 const struct real_format *fmt;
4840 machine_mode fmode, imode, rmode;
4841 tree arg;
4842 int word, bitpos;
4843 enum insn_code icode;
4844 rtx temp;
4845 location_t loc = EXPR_LOCATION (exp);
4846
4847 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4848 return NULL_RTX;
4849
4850 arg = CALL_EXPR_ARG (exp, 0);
4851 fmode = TYPE_MODE (TREE_TYPE (arg));
4852 rmode = TYPE_MODE (TREE_TYPE (exp));
4853 fmt = REAL_MODE_FORMAT (fmode);
4854
4855 arg = builtin_save_expr (arg);
4856
4857 /* Expand the argument yielding a RTX expression. */
4858 temp = expand_normal (arg);
4859
4860 /* Check if the back end provides an insn that handles signbit for the
4861 argument's mode. */
4862 icode = optab_handler (signbit_optab, fmode);
4863 if (icode != CODE_FOR_nothing)
4864 {
4865 rtx_insn *last = get_last_insn ();
4866 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4867 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4868 return target;
4869 delete_insns_since (last);
4870 }
4871
4872 /* For floating point formats without a sign bit, implement signbit
4873 as "ARG < 0.0". */
4874 bitpos = fmt->signbit_ro;
4875 if (bitpos < 0)
4876 {
4877 /* But we can't do this if the format supports signed zero. */
4878 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4879 return NULL_RTX;
4880
4881 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4882 build_real (TREE_TYPE (arg), dconst0));
4883 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4884 }
4885
4886 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4887 {
4888 imode = int_mode_for_mode (fmode);
4889 if (imode == BLKmode)
4890 return NULL_RTX;
4891 temp = gen_lowpart (imode, temp);
4892 }
4893 else
4894 {
4895 imode = word_mode;
4896 /* Handle targets with different FP word orders. */
4897 if (FLOAT_WORDS_BIG_ENDIAN)
4898 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4899 else
4900 word = bitpos / BITS_PER_WORD;
4901 temp = operand_subword_force (temp, word, fmode);
4902 bitpos = bitpos % BITS_PER_WORD;
4903 }
4904
4905 /* Force the intermediate word_mode (or narrower) result into a
4906 register. This avoids attempting to create paradoxical SUBREGs
4907 of floating point modes below. */
4908 temp = force_reg (imode, temp);
4909
4910 /* If the bitpos is within the "result mode" lowpart, the operation
4911 can be implement with a single bitwise AND. Otherwise, we need
4912 a right shift and an AND. */
4913
4914 if (bitpos < GET_MODE_BITSIZE (rmode))
4915 {
4916 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4917
4918 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4919 temp = gen_lowpart (rmode, temp);
4920 temp = expand_binop (rmode, and_optab, temp,
4921 immed_wide_int_const (mask, rmode),
4922 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4923 }
4924 else
4925 {
4926 /* Perform a logical right shift to place the signbit in the least
4927 significant bit, then truncate the result to the desired mode
4928 and mask just this bit. */
4929 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4930 temp = gen_lowpart (rmode, temp);
4931 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4932 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4933 }
4934
4935 return temp;
4936 }
4937
4938 /* Expand fork or exec calls. TARGET is the desired target of the
4939 call. EXP is the call. FN is the
4940 identificator of the actual function. IGNORE is nonzero if the
4941 value is to be ignored. */
4942
4943 static rtx
4944 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4945 {
4946 tree id, decl;
4947 tree call;
4948
4949 /* If we are not profiling, just call the function. */
4950 if (!profile_arc_flag)
4951 return NULL_RTX;
4952
4953 /* Otherwise call the wrapper. This should be equivalent for the rest of
4954 compiler, so the code does not diverge, and the wrapper may run the
4955 code necessary for keeping the profiling sane. */
4956
4957 switch (DECL_FUNCTION_CODE (fn))
4958 {
4959 case BUILT_IN_FORK:
4960 id = get_identifier ("__gcov_fork");
4961 break;
4962
4963 case BUILT_IN_EXECL:
4964 id = get_identifier ("__gcov_execl");
4965 break;
4966
4967 case BUILT_IN_EXECV:
4968 id = get_identifier ("__gcov_execv");
4969 break;
4970
4971 case BUILT_IN_EXECLP:
4972 id = get_identifier ("__gcov_execlp");
4973 break;
4974
4975 case BUILT_IN_EXECLE:
4976 id = get_identifier ("__gcov_execle");
4977 break;
4978
4979 case BUILT_IN_EXECVP:
4980 id = get_identifier ("__gcov_execvp");
4981 break;
4982
4983 case BUILT_IN_EXECVE:
4984 id = get_identifier ("__gcov_execve");
4985 break;
4986
4987 default:
4988 gcc_unreachable ();
4989 }
4990
4991 decl = build_decl (DECL_SOURCE_LOCATION (fn),
4992 FUNCTION_DECL, id, TREE_TYPE (fn));
4993 DECL_EXTERNAL (decl) = 1;
4994 TREE_PUBLIC (decl) = 1;
4995 DECL_ARTIFICIAL (decl) = 1;
4996 TREE_NOTHROW (decl) = 1;
4997 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
4998 DECL_VISIBILITY_SPECIFIED (decl) = 1;
4999 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5000 return expand_call (call, target, ignore);
5001 }
5002
5003
5004 \f
5005 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5006 the pointer in these functions is void*, the tree optimizers may remove
5007 casts. The mode computed in expand_builtin isn't reliable either, due
5008 to __sync_bool_compare_and_swap.
5009
5010 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5011 group of builtins. This gives us log2 of the mode size. */
5012
5013 static inline machine_mode
5014 get_builtin_sync_mode (int fcode_diff)
5015 {
5016 /* The size is not negotiable, so ask not to get BLKmode in return
5017 if the target indicates that a smaller size would be better. */
5018 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5019 }
5020
5021 /* Expand the memory expression LOC and return the appropriate memory operand
5022 for the builtin_sync operations. */
5023
5024 static rtx
5025 get_builtin_sync_mem (tree loc, machine_mode mode)
5026 {
5027 rtx addr, mem;
5028
5029 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5030 addr = convert_memory_address (Pmode, addr);
5031
5032 /* Note that we explicitly do not want any alias information for this
5033 memory, so that we kill all other live memories. Otherwise we don't
5034 satisfy the full barrier semantics of the intrinsic. */
5035 mem = validize_mem (gen_rtx_MEM (mode, addr));
5036
5037 /* The alignment needs to be at least according to that of the mode. */
5038 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5039 get_pointer_alignment (loc)));
5040 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5041 MEM_VOLATILE_P (mem) = 1;
5042
5043 return mem;
5044 }
5045
5046 /* Make sure an argument is in the right mode.
5047 EXP is the tree argument.
5048 MODE is the mode it should be in. */
5049
5050 static rtx
5051 expand_expr_force_mode (tree exp, machine_mode mode)
5052 {
5053 rtx val;
5054 machine_mode old_mode;
5055
5056 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5057 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5058 of CONST_INTs, where we know the old_mode only from the call argument. */
5059
5060 old_mode = GET_MODE (val);
5061 if (old_mode == VOIDmode)
5062 old_mode = TYPE_MODE (TREE_TYPE (exp));
5063 val = convert_modes (mode, old_mode, val, 1);
5064 return val;
5065 }
5066
5067
5068 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5069 EXP is the CALL_EXPR. CODE is the rtx code
5070 that corresponds to the arithmetic or logical operation from the name;
5071 an exception here is that NOT actually means NAND. TARGET is an optional
5072 place for us to store the results; AFTER is true if this is the
5073 fetch_and_xxx form. */
5074
5075 static rtx
5076 expand_builtin_sync_operation (machine_mode mode, tree exp,
5077 enum rtx_code code, bool after,
5078 rtx target)
5079 {
5080 rtx val, mem;
5081 location_t loc = EXPR_LOCATION (exp);
5082
5083 if (code == NOT && warn_sync_nand)
5084 {
5085 tree fndecl = get_callee_fndecl (exp);
5086 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5087
5088 static bool warned_f_a_n, warned_n_a_f;
5089
5090 switch (fcode)
5091 {
5092 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5093 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5094 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5095 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5096 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5097 if (warned_f_a_n)
5098 break;
5099
5100 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5101 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5102 warned_f_a_n = true;
5103 break;
5104
5105 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5106 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5107 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5108 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5109 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5110 if (warned_n_a_f)
5111 break;
5112
5113 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5114 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5115 warned_n_a_f = true;
5116 break;
5117
5118 default:
5119 gcc_unreachable ();
5120 }
5121 }
5122
5123 /* Expand the operands. */
5124 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5125 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5126
5127 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5128 after);
5129 }
5130
5131 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5132 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5133 true if this is the boolean form. TARGET is a place for us to store the
5134 results; this is NOT optional if IS_BOOL is true. */
5135
5136 static rtx
5137 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5138 bool is_bool, rtx target)
5139 {
5140 rtx old_val, new_val, mem;
5141 rtx *pbool, *poval;
5142
5143 /* Expand the operands. */
5144 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5145 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5146 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5147
5148 pbool = poval = NULL;
5149 if (target != const0_rtx)
5150 {
5151 if (is_bool)
5152 pbool = &target;
5153 else
5154 poval = &target;
5155 }
5156 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5157 false, MEMMODEL_SEQ_CST,
5158 MEMMODEL_SEQ_CST))
5159 return NULL_RTX;
5160
5161 return target;
5162 }
5163
5164 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5165 general form is actually an atomic exchange, and some targets only
5166 support a reduced form with the second argument being a constant 1.
5167 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5168 the results. */
5169
5170 static rtx
5171 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5172 rtx target)
5173 {
5174 rtx val, mem;
5175
5176 /* Expand the operands. */
5177 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5178 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5179
5180 return expand_sync_lock_test_and_set (target, mem, val);
5181 }
5182
5183 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5184
5185 static void
5186 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5187 {
5188 rtx mem;
5189
5190 /* Expand the operands. */
5191 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5192
5193 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5194 }
5195
5196 /* Given an integer representing an ``enum memmodel'', verify its
5197 correctness and return the memory model enum. */
5198
5199 static enum memmodel
5200 get_memmodel (tree exp)
5201 {
5202 rtx op;
5203 unsigned HOST_WIDE_INT val;
5204
5205 /* If the parameter is not a constant, it's a run time value so we'll just
5206 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5207 if (TREE_CODE (exp) != INTEGER_CST)
5208 return MEMMODEL_SEQ_CST;
5209
5210 op = expand_normal (exp);
5211
5212 val = INTVAL (op);
5213 if (targetm.memmodel_check)
5214 val = targetm.memmodel_check (val);
5215 else if (val & ~MEMMODEL_MASK)
5216 {
5217 warning (OPT_Winvalid_memory_model,
5218 "Unknown architecture specifier in memory model to builtin.");
5219 return MEMMODEL_SEQ_CST;
5220 }
5221
5222 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5223 {
5224 warning (OPT_Winvalid_memory_model,
5225 "invalid memory model argument to builtin");
5226 return MEMMODEL_SEQ_CST;
5227 }
5228
5229 return (enum memmodel) val;
5230 }
5231
5232 /* Expand the __atomic_exchange intrinsic:
5233 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5234 EXP is the CALL_EXPR.
5235 TARGET is an optional place for us to store the results. */
5236
5237 static rtx
5238 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5239 {
5240 rtx val, mem;
5241 enum memmodel model;
5242
5243 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5244 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5245 {
5246 error ("invalid memory model for %<__atomic_exchange%>");
5247 return NULL_RTX;
5248 }
5249
5250 if (!flag_inline_atomics)
5251 return NULL_RTX;
5252
5253 /* Expand the operands. */
5254 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5255 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5256
5257 return expand_atomic_exchange (target, mem, val, model);
5258 }
5259
5260 /* Expand the __atomic_compare_exchange intrinsic:
5261 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5262 TYPE desired, BOOL weak,
5263 enum memmodel success,
5264 enum memmodel failure)
5265 EXP is the CALL_EXPR.
5266 TARGET is an optional place for us to store the results. */
5267
5268 static rtx
5269 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5270 rtx target)
5271 {
5272 rtx expect, desired, mem, oldval;
5273 rtx_code_label *label;
5274 enum memmodel success, failure;
5275 tree weak;
5276 bool is_weak;
5277
5278 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5279 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5280
5281 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5282 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5283 {
5284 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5285 return NULL_RTX;
5286 }
5287
5288 if (failure > success)
5289 {
5290 error ("failure memory model cannot be stronger than success "
5291 "memory model for %<__atomic_compare_exchange%>");
5292 return NULL_RTX;
5293 }
5294
5295 if (!flag_inline_atomics)
5296 return NULL_RTX;
5297
5298 /* Expand the operands. */
5299 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5300
5301 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5302 expect = convert_memory_address (Pmode, expect);
5303 expect = gen_rtx_MEM (mode, expect);
5304 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5305
5306 weak = CALL_EXPR_ARG (exp, 3);
5307 is_weak = false;
5308 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5309 is_weak = true;
5310
5311 if (target == const0_rtx)
5312 target = NULL;
5313
5314 /* Lest the rtl backend create a race condition with an imporoper store
5315 to memory, always create a new pseudo for OLDVAL. */
5316 oldval = NULL;
5317
5318 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5319 is_weak, success, failure))
5320 return NULL_RTX;
5321
5322 /* Conditionally store back to EXPECT, lest we create a race condition
5323 with an improper store to memory. */
5324 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5325 the normal case where EXPECT is totally private, i.e. a register. At
5326 which point the store can be unconditional. */
5327 label = gen_label_rtx ();
5328 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL, VOIDmode, 1, label);
5329 emit_move_insn (expect, oldval);
5330 emit_label (label);
5331
5332 return target;
5333 }
5334
5335 /* Expand the __atomic_load intrinsic:
5336 TYPE __atomic_load (TYPE *object, enum memmodel)
5337 EXP is the CALL_EXPR.
5338 TARGET is an optional place for us to store the results. */
5339
5340 static rtx
5341 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5342 {
5343 rtx mem;
5344 enum memmodel model;
5345
5346 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5347 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5348 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5349 {
5350 error ("invalid memory model for %<__atomic_load%>");
5351 return NULL_RTX;
5352 }
5353
5354 if (!flag_inline_atomics)
5355 return NULL_RTX;
5356
5357 /* Expand the operand. */
5358 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5359
5360 return expand_atomic_load (target, mem, model);
5361 }
5362
5363
5364 /* Expand the __atomic_store intrinsic:
5365 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5366 EXP is the CALL_EXPR.
5367 TARGET is an optional place for us to store the results. */
5368
5369 static rtx
5370 expand_builtin_atomic_store (machine_mode mode, tree exp)
5371 {
5372 rtx mem, val;
5373 enum memmodel model;
5374
5375 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5376 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5377 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5378 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5379 {
5380 error ("invalid memory model for %<__atomic_store%>");
5381 return NULL_RTX;
5382 }
5383
5384 if (!flag_inline_atomics)
5385 return NULL_RTX;
5386
5387 /* Expand the operands. */
5388 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5389 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5390
5391 return expand_atomic_store (mem, val, model, false);
5392 }
5393
5394 /* Expand the __atomic_fetch_XXX intrinsic:
5395 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5396 EXP is the CALL_EXPR.
5397 TARGET is an optional place for us to store the results.
5398 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5399 FETCH_AFTER is true if returning the result of the operation.
5400 FETCH_AFTER is false if returning the value before the operation.
5401 IGNORE is true if the result is not used.
5402 EXT_CALL is the correct builtin for an external call if this cannot be
5403 resolved to an instruction sequence. */
5404
5405 static rtx
5406 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5407 enum rtx_code code, bool fetch_after,
5408 bool ignore, enum built_in_function ext_call)
5409 {
5410 rtx val, mem, ret;
5411 enum memmodel model;
5412 tree fndecl;
5413 tree addr;
5414
5415 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5416
5417 /* Expand the operands. */
5418 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5419 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5420
5421 /* Only try generating instructions if inlining is turned on. */
5422 if (flag_inline_atomics)
5423 {
5424 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5425 if (ret)
5426 return ret;
5427 }
5428
5429 /* Return if a different routine isn't needed for the library call. */
5430 if (ext_call == BUILT_IN_NONE)
5431 return NULL_RTX;
5432
5433 /* Change the call to the specified function. */
5434 fndecl = get_callee_fndecl (exp);
5435 addr = CALL_EXPR_FN (exp);
5436 STRIP_NOPS (addr);
5437
5438 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5439 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5440
5441 /* Expand the call here so we can emit trailing code. */
5442 ret = expand_call (exp, target, ignore);
5443
5444 /* Replace the original function just in case it matters. */
5445 TREE_OPERAND (addr, 0) = fndecl;
5446
5447 /* Then issue the arithmetic correction to return the right result. */
5448 if (!ignore)
5449 {
5450 if (code == NOT)
5451 {
5452 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5453 OPTAB_LIB_WIDEN);
5454 ret = expand_simple_unop (mode, NOT, ret, target, true);
5455 }
5456 else
5457 ret = expand_simple_binop (mode, code, ret, val, target, true,
5458 OPTAB_LIB_WIDEN);
5459 }
5460 return ret;
5461 }
5462
5463
5464 #ifndef HAVE_atomic_clear
5465 # define HAVE_atomic_clear 0
5466 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5467 #endif
5468
5469 /* Expand an atomic clear operation.
5470 void _atomic_clear (BOOL *obj, enum memmodel)
5471 EXP is the call expression. */
5472
5473 static rtx
5474 expand_builtin_atomic_clear (tree exp)
5475 {
5476 machine_mode mode;
5477 rtx mem, ret;
5478 enum memmodel model;
5479
5480 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5481 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5482 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5483
5484 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5485 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5486 {
5487 error ("invalid memory model for %<__atomic_store%>");
5488 return const0_rtx;
5489 }
5490
5491 if (HAVE_atomic_clear)
5492 {
5493 emit_insn (gen_atomic_clear (mem, model));
5494 return const0_rtx;
5495 }
5496
5497 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5498 Failing that, a store is issued by __atomic_store. The only way this can
5499 fail is if the bool type is larger than a word size. Unlikely, but
5500 handle it anyway for completeness. Assume a single threaded model since
5501 there is no atomic support in this case, and no barriers are required. */
5502 ret = expand_atomic_store (mem, const0_rtx, model, true);
5503 if (!ret)
5504 emit_move_insn (mem, const0_rtx);
5505 return const0_rtx;
5506 }
5507
5508 /* Expand an atomic test_and_set operation.
5509 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5510 EXP is the call expression. */
5511
5512 static rtx
5513 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5514 {
5515 rtx mem;
5516 enum memmodel model;
5517 machine_mode mode;
5518
5519 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5520 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5521 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5522
5523 return expand_atomic_test_and_set (target, mem, model);
5524 }
5525
5526
5527 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5528 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5529
5530 static tree
5531 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5532 {
5533 int size;
5534 machine_mode mode;
5535 unsigned int mode_align, type_align;
5536
5537 if (TREE_CODE (arg0) != INTEGER_CST)
5538 return NULL_TREE;
5539
5540 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5541 mode = mode_for_size (size, MODE_INT, 0);
5542 mode_align = GET_MODE_ALIGNMENT (mode);
5543
5544 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5545 type_align = mode_align;
5546 else
5547 {
5548 tree ttype = TREE_TYPE (arg1);
5549
5550 /* This function is usually invoked and folded immediately by the front
5551 end before anything else has a chance to look at it. The pointer
5552 parameter at this point is usually cast to a void *, so check for that
5553 and look past the cast. */
5554 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5555 && VOID_TYPE_P (TREE_TYPE (ttype)))
5556 arg1 = TREE_OPERAND (arg1, 0);
5557
5558 ttype = TREE_TYPE (arg1);
5559 gcc_assert (POINTER_TYPE_P (ttype));
5560
5561 /* Get the underlying type of the object. */
5562 ttype = TREE_TYPE (ttype);
5563 type_align = TYPE_ALIGN (ttype);
5564 }
5565
5566 /* If the object has smaller alignment, the the lock free routines cannot
5567 be used. */
5568 if (type_align < mode_align)
5569 return boolean_false_node;
5570
5571 /* Check if a compare_and_swap pattern exists for the mode which represents
5572 the required size. The pattern is not allowed to fail, so the existence
5573 of the pattern indicates support is present. */
5574 if (can_compare_and_swap_p (mode, true))
5575 return boolean_true_node;
5576 else
5577 return boolean_false_node;
5578 }
5579
5580 /* Return true if the parameters to call EXP represent an object which will
5581 always generate lock free instructions. The first argument represents the
5582 size of the object, and the second parameter is a pointer to the object
5583 itself. If NULL is passed for the object, then the result is based on
5584 typical alignment for an object of the specified size. Otherwise return
5585 false. */
5586
5587 static rtx
5588 expand_builtin_atomic_always_lock_free (tree exp)
5589 {
5590 tree size;
5591 tree arg0 = CALL_EXPR_ARG (exp, 0);
5592 tree arg1 = CALL_EXPR_ARG (exp, 1);
5593
5594 if (TREE_CODE (arg0) != INTEGER_CST)
5595 {
5596 error ("non-constant argument 1 to __atomic_always_lock_free");
5597 return const0_rtx;
5598 }
5599
5600 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5601 if (size == boolean_true_node)
5602 return const1_rtx;
5603 return const0_rtx;
5604 }
5605
5606 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5607 is lock free on this architecture. */
5608
5609 static tree
5610 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5611 {
5612 if (!flag_inline_atomics)
5613 return NULL_TREE;
5614
5615 /* If it isn't always lock free, don't generate a result. */
5616 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5617 return boolean_true_node;
5618
5619 return NULL_TREE;
5620 }
5621
5622 /* Return true if the parameters to call EXP represent an object which will
5623 always generate lock free instructions. The first argument represents the
5624 size of the object, and the second parameter is a pointer to the object
5625 itself. If NULL is passed for the object, then the result is based on
5626 typical alignment for an object of the specified size. Otherwise return
5627 NULL*/
5628
5629 static rtx
5630 expand_builtin_atomic_is_lock_free (tree exp)
5631 {
5632 tree size;
5633 tree arg0 = CALL_EXPR_ARG (exp, 0);
5634 tree arg1 = CALL_EXPR_ARG (exp, 1);
5635
5636 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5637 {
5638 error ("non-integer argument 1 to __atomic_is_lock_free");
5639 return NULL_RTX;
5640 }
5641
5642 if (!flag_inline_atomics)
5643 return NULL_RTX;
5644
5645 /* If the value is known at compile time, return the RTX for it. */
5646 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5647 if (size == boolean_true_node)
5648 return const1_rtx;
5649
5650 return NULL_RTX;
5651 }
5652
5653 /* Expand the __atomic_thread_fence intrinsic:
5654 void __atomic_thread_fence (enum memmodel)
5655 EXP is the CALL_EXPR. */
5656
5657 static void
5658 expand_builtin_atomic_thread_fence (tree exp)
5659 {
5660 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5661 expand_mem_thread_fence (model);
5662 }
5663
5664 /* Expand the __atomic_signal_fence intrinsic:
5665 void __atomic_signal_fence (enum memmodel)
5666 EXP is the CALL_EXPR. */
5667
5668 static void
5669 expand_builtin_atomic_signal_fence (tree exp)
5670 {
5671 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5672 expand_mem_signal_fence (model);
5673 }
5674
5675 /* Expand the __sync_synchronize intrinsic. */
5676
5677 static void
5678 expand_builtin_sync_synchronize (void)
5679 {
5680 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5681 }
5682
5683 static rtx
5684 expand_builtin_thread_pointer (tree exp, rtx target)
5685 {
5686 enum insn_code icode;
5687 if (!validate_arglist (exp, VOID_TYPE))
5688 return const0_rtx;
5689 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5690 if (icode != CODE_FOR_nothing)
5691 {
5692 struct expand_operand op;
5693 /* If the target is not sutitable then create a new target. */
5694 if (target == NULL_RTX
5695 || !REG_P (target)
5696 || GET_MODE (target) != Pmode)
5697 target = gen_reg_rtx (Pmode);
5698 create_output_operand (&op, target, Pmode);
5699 expand_insn (icode, 1, &op);
5700 return target;
5701 }
5702 error ("__builtin_thread_pointer is not supported on this target");
5703 return const0_rtx;
5704 }
5705
5706 static void
5707 expand_builtin_set_thread_pointer (tree exp)
5708 {
5709 enum insn_code icode;
5710 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5711 return;
5712 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5713 if (icode != CODE_FOR_nothing)
5714 {
5715 struct expand_operand op;
5716 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5717 Pmode, EXPAND_NORMAL);
5718 create_input_operand (&op, val, Pmode);
5719 expand_insn (icode, 1, &op);
5720 return;
5721 }
5722 error ("__builtin_set_thread_pointer is not supported on this target");
5723 }
5724
5725 \f
5726 /* Emit code to restore the current value of stack. */
5727
5728 static void
5729 expand_stack_restore (tree var)
5730 {
5731 rtx_insn *prev;
5732 rtx sa = expand_normal (var);
5733
5734 sa = convert_memory_address (Pmode, sa);
5735
5736 prev = get_last_insn ();
5737 emit_stack_restore (SAVE_BLOCK, sa);
5738 fixup_args_size_notes (prev, get_last_insn (), 0);
5739 }
5740
5741
5742 /* Emit code to save the current value of stack. */
5743
5744 static rtx
5745 expand_stack_save (void)
5746 {
5747 rtx ret = NULL_RTX;
5748
5749 do_pending_stack_adjust ();
5750 emit_stack_save (SAVE_BLOCK, &ret);
5751 return ret;
5752 }
5753
5754 /* Expand an expression EXP that calls a built-in function,
5755 with result going to TARGET if that's convenient
5756 (and in mode MODE if that's convenient).
5757 SUBTARGET may be used as the target for computing one of EXP's operands.
5758 IGNORE is nonzero if the value is to be ignored. */
5759
5760 rtx
5761 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5762 int ignore)
5763 {
5764 tree fndecl = get_callee_fndecl (exp);
5765 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5766 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5767 int flags;
5768
5769 /* When ASan is enabled, we don't want to expand some memory/string
5770 builtins and rely on libsanitizer's hooks. This allows us to avoid
5771 redundant checks and be sure, that possible overflow will be detected
5772 by ASan. */
5773
5774 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5775 return expand_call (exp, target, ignore);
5776
5777 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5778 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5779
5780 /* When not optimizing, generate calls to library functions for a certain
5781 set of builtins. */
5782 if (!optimize
5783 && !called_as_built_in (fndecl)
5784 && fcode != BUILT_IN_FORK
5785 && fcode != BUILT_IN_EXECL
5786 && fcode != BUILT_IN_EXECV
5787 && fcode != BUILT_IN_EXECLP
5788 && fcode != BUILT_IN_EXECLE
5789 && fcode != BUILT_IN_EXECVP
5790 && fcode != BUILT_IN_EXECVE
5791 && fcode != BUILT_IN_ALLOCA
5792 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5793 && fcode != BUILT_IN_FREE)
5794 return expand_call (exp, target, ignore);
5795
5796 /* The built-in function expanders test for target == const0_rtx
5797 to determine whether the function's result will be ignored. */
5798 if (ignore)
5799 target = const0_rtx;
5800
5801 /* If the result of a pure or const built-in function is ignored, and
5802 none of its arguments are volatile, we can avoid expanding the
5803 built-in call and just evaluate the arguments for side-effects. */
5804 if (target == const0_rtx
5805 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5806 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5807 {
5808 bool volatilep = false;
5809 tree arg;
5810 call_expr_arg_iterator iter;
5811
5812 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5813 if (TREE_THIS_VOLATILE (arg))
5814 {
5815 volatilep = true;
5816 break;
5817 }
5818
5819 if (! volatilep)
5820 {
5821 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5822 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5823 return const0_rtx;
5824 }
5825 }
5826
5827 switch (fcode)
5828 {
5829 CASE_FLT_FN (BUILT_IN_FABS):
5830 case BUILT_IN_FABSD32:
5831 case BUILT_IN_FABSD64:
5832 case BUILT_IN_FABSD128:
5833 target = expand_builtin_fabs (exp, target, subtarget);
5834 if (target)
5835 return target;
5836 break;
5837
5838 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5839 target = expand_builtin_copysign (exp, target, subtarget);
5840 if (target)
5841 return target;
5842 break;
5843
5844 /* Just do a normal library call if we were unable to fold
5845 the values. */
5846 CASE_FLT_FN (BUILT_IN_CABS):
5847 break;
5848
5849 CASE_FLT_FN (BUILT_IN_EXP):
5850 CASE_FLT_FN (BUILT_IN_EXP10):
5851 CASE_FLT_FN (BUILT_IN_POW10):
5852 CASE_FLT_FN (BUILT_IN_EXP2):
5853 CASE_FLT_FN (BUILT_IN_EXPM1):
5854 CASE_FLT_FN (BUILT_IN_LOGB):
5855 CASE_FLT_FN (BUILT_IN_LOG):
5856 CASE_FLT_FN (BUILT_IN_LOG10):
5857 CASE_FLT_FN (BUILT_IN_LOG2):
5858 CASE_FLT_FN (BUILT_IN_LOG1P):
5859 CASE_FLT_FN (BUILT_IN_TAN):
5860 CASE_FLT_FN (BUILT_IN_ASIN):
5861 CASE_FLT_FN (BUILT_IN_ACOS):
5862 CASE_FLT_FN (BUILT_IN_ATAN):
5863 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5864 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5865 because of possible accuracy problems. */
5866 if (! flag_unsafe_math_optimizations)
5867 break;
5868 CASE_FLT_FN (BUILT_IN_SQRT):
5869 CASE_FLT_FN (BUILT_IN_FLOOR):
5870 CASE_FLT_FN (BUILT_IN_CEIL):
5871 CASE_FLT_FN (BUILT_IN_TRUNC):
5872 CASE_FLT_FN (BUILT_IN_ROUND):
5873 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5874 CASE_FLT_FN (BUILT_IN_RINT):
5875 target = expand_builtin_mathfn (exp, target, subtarget);
5876 if (target)
5877 return target;
5878 break;
5879
5880 CASE_FLT_FN (BUILT_IN_FMA):
5881 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5882 if (target)
5883 return target;
5884 break;
5885
5886 CASE_FLT_FN (BUILT_IN_ILOGB):
5887 if (! flag_unsafe_math_optimizations)
5888 break;
5889 CASE_FLT_FN (BUILT_IN_ISINF):
5890 CASE_FLT_FN (BUILT_IN_FINITE):
5891 case BUILT_IN_ISFINITE:
5892 case BUILT_IN_ISNORMAL:
5893 target = expand_builtin_interclass_mathfn (exp, target);
5894 if (target)
5895 return target;
5896 break;
5897
5898 CASE_FLT_FN (BUILT_IN_ICEIL):
5899 CASE_FLT_FN (BUILT_IN_LCEIL):
5900 CASE_FLT_FN (BUILT_IN_LLCEIL):
5901 CASE_FLT_FN (BUILT_IN_LFLOOR):
5902 CASE_FLT_FN (BUILT_IN_IFLOOR):
5903 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5904 target = expand_builtin_int_roundingfn (exp, target);
5905 if (target)
5906 return target;
5907 break;
5908
5909 CASE_FLT_FN (BUILT_IN_IRINT):
5910 CASE_FLT_FN (BUILT_IN_LRINT):
5911 CASE_FLT_FN (BUILT_IN_LLRINT):
5912 CASE_FLT_FN (BUILT_IN_IROUND):
5913 CASE_FLT_FN (BUILT_IN_LROUND):
5914 CASE_FLT_FN (BUILT_IN_LLROUND):
5915 target = expand_builtin_int_roundingfn_2 (exp, target);
5916 if (target)
5917 return target;
5918 break;
5919
5920 CASE_FLT_FN (BUILT_IN_POWI):
5921 target = expand_builtin_powi (exp, target);
5922 if (target)
5923 return target;
5924 break;
5925
5926 CASE_FLT_FN (BUILT_IN_ATAN2):
5927 CASE_FLT_FN (BUILT_IN_LDEXP):
5928 CASE_FLT_FN (BUILT_IN_SCALB):
5929 CASE_FLT_FN (BUILT_IN_SCALBN):
5930 CASE_FLT_FN (BUILT_IN_SCALBLN):
5931 if (! flag_unsafe_math_optimizations)
5932 break;
5933
5934 CASE_FLT_FN (BUILT_IN_FMOD):
5935 CASE_FLT_FN (BUILT_IN_REMAINDER):
5936 CASE_FLT_FN (BUILT_IN_DREM):
5937 CASE_FLT_FN (BUILT_IN_POW):
5938 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5939 if (target)
5940 return target;
5941 break;
5942
5943 CASE_FLT_FN (BUILT_IN_CEXPI):
5944 target = expand_builtin_cexpi (exp, target);
5945 gcc_assert (target);
5946 return target;
5947
5948 CASE_FLT_FN (BUILT_IN_SIN):
5949 CASE_FLT_FN (BUILT_IN_COS):
5950 if (! flag_unsafe_math_optimizations)
5951 break;
5952 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5953 if (target)
5954 return target;
5955 break;
5956
5957 CASE_FLT_FN (BUILT_IN_SINCOS):
5958 if (! flag_unsafe_math_optimizations)
5959 break;
5960 target = expand_builtin_sincos (exp);
5961 if (target)
5962 return target;
5963 break;
5964
5965 case BUILT_IN_APPLY_ARGS:
5966 return expand_builtin_apply_args ();
5967
5968 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5969 FUNCTION with a copy of the parameters described by
5970 ARGUMENTS, and ARGSIZE. It returns a block of memory
5971 allocated on the stack into which is stored all the registers
5972 that might possibly be used for returning the result of a
5973 function. ARGUMENTS is the value returned by
5974 __builtin_apply_args. ARGSIZE is the number of bytes of
5975 arguments that must be copied. ??? How should this value be
5976 computed? We'll also need a safe worst case value for varargs
5977 functions. */
5978 case BUILT_IN_APPLY:
5979 if (!validate_arglist (exp, POINTER_TYPE,
5980 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5981 && !validate_arglist (exp, REFERENCE_TYPE,
5982 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5983 return const0_rtx;
5984 else
5985 {
5986 rtx ops[3];
5987
5988 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5989 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5990 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5991
5992 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5993 }
5994
5995 /* __builtin_return (RESULT) causes the function to return the
5996 value described by RESULT. RESULT is address of the block of
5997 memory returned by __builtin_apply. */
5998 case BUILT_IN_RETURN:
5999 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6000 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6001 return const0_rtx;
6002
6003 case BUILT_IN_SAVEREGS:
6004 return expand_builtin_saveregs ();
6005
6006 case BUILT_IN_VA_ARG_PACK:
6007 /* All valid uses of __builtin_va_arg_pack () are removed during
6008 inlining. */
6009 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6010 return const0_rtx;
6011
6012 case BUILT_IN_VA_ARG_PACK_LEN:
6013 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6014 inlining. */
6015 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6016 return const0_rtx;
6017
6018 /* Return the address of the first anonymous stack arg. */
6019 case BUILT_IN_NEXT_ARG:
6020 if (fold_builtin_next_arg (exp, false))
6021 return const0_rtx;
6022 return expand_builtin_next_arg ();
6023
6024 case BUILT_IN_CLEAR_CACHE:
6025 target = expand_builtin___clear_cache (exp);
6026 if (target)
6027 return target;
6028 break;
6029
6030 case BUILT_IN_CLASSIFY_TYPE:
6031 return expand_builtin_classify_type (exp);
6032
6033 case BUILT_IN_CONSTANT_P:
6034 return const0_rtx;
6035
6036 case BUILT_IN_FRAME_ADDRESS:
6037 case BUILT_IN_RETURN_ADDRESS:
6038 return expand_builtin_frame_address (fndecl, exp);
6039
6040 /* Returns the address of the area where the structure is returned.
6041 0 otherwise. */
6042 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6043 if (call_expr_nargs (exp) != 0
6044 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6045 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6046 return const0_rtx;
6047 else
6048 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6049
6050 case BUILT_IN_ALLOCA:
6051 case BUILT_IN_ALLOCA_WITH_ALIGN:
6052 /* If the allocation stems from the declaration of a variable-sized
6053 object, it cannot accumulate. */
6054 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6055 if (target)
6056 return target;
6057 break;
6058
6059 case BUILT_IN_STACK_SAVE:
6060 return expand_stack_save ();
6061
6062 case BUILT_IN_STACK_RESTORE:
6063 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6064 return const0_rtx;
6065
6066 case BUILT_IN_BSWAP16:
6067 case BUILT_IN_BSWAP32:
6068 case BUILT_IN_BSWAP64:
6069 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6070 if (target)
6071 return target;
6072 break;
6073
6074 CASE_INT_FN (BUILT_IN_FFS):
6075 target = expand_builtin_unop (target_mode, exp, target,
6076 subtarget, ffs_optab);
6077 if (target)
6078 return target;
6079 break;
6080
6081 CASE_INT_FN (BUILT_IN_CLZ):
6082 target = expand_builtin_unop (target_mode, exp, target,
6083 subtarget, clz_optab);
6084 if (target)
6085 return target;
6086 break;
6087
6088 CASE_INT_FN (BUILT_IN_CTZ):
6089 target = expand_builtin_unop (target_mode, exp, target,
6090 subtarget, ctz_optab);
6091 if (target)
6092 return target;
6093 break;
6094
6095 CASE_INT_FN (BUILT_IN_CLRSB):
6096 target = expand_builtin_unop (target_mode, exp, target,
6097 subtarget, clrsb_optab);
6098 if (target)
6099 return target;
6100 break;
6101
6102 CASE_INT_FN (BUILT_IN_POPCOUNT):
6103 target = expand_builtin_unop (target_mode, exp, target,
6104 subtarget, popcount_optab);
6105 if (target)
6106 return target;
6107 break;
6108
6109 CASE_INT_FN (BUILT_IN_PARITY):
6110 target = expand_builtin_unop (target_mode, exp, target,
6111 subtarget, parity_optab);
6112 if (target)
6113 return target;
6114 break;
6115
6116 case BUILT_IN_STRLEN:
6117 target = expand_builtin_strlen (exp, target, target_mode);
6118 if (target)
6119 return target;
6120 break;
6121
6122 case BUILT_IN_STRCPY:
6123 target = expand_builtin_strcpy (exp, target);
6124 if (target)
6125 return target;
6126 break;
6127
6128 case BUILT_IN_STRNCPY:
6129 target = expand_builtin_strncpy (exp, target);
6130 if (target)
6131 return target;
6132 break;
6133
6134 case BUILT_IN_STPCPY:
6135 target = expand_builtin_stpcpy (exp, target, mode);
6136 if (target)
6137 return target;
6138 break;
6139
6140 case BUILT_IN_MEMCPY:
6141 target = expand_builtin_memcpy (exp, target);
6142 if (target)
6143 return target;
6144 break;
6145
6146 case BUILT_IN_MEMPCPY:
6147 target = expand_builtin_mempcpy (exp, target, mode);
6148 if (target)
6149 return target;
6150 break;
6151
6152 case BUILT_IN_MEMSET:
6153 target = expand_builtin_memset (exp, target, mode);
6154 if (target)
6155 return target;
6156 break;
6157
6158 case BUILT_IN_BZERO:
6159 target = expand_builtin_bzero (exp);
6160 if (target)
6161 return target;
6162 break;
6163
6164 case BUILT_IN_STRCMP:
6165 target = expand_builtin_strcmp (exp, target);
6166 if (target)
6167 return target;
6168 break;
6169
6170 case BUILT_IN_STRNCMP:
6171 target = expand_builtin_strncmp (exp, target, mode);
6172 if (target)
6173 return target;
6174 break;
6175
6176 case BUILT_IN_BCMP:
6177 case BUILT_IN_MEMCMP:
6178 target = expand_builtin_memcmp (exp, target, mode);
6179 if (target)
6180 return target;
6181 break;
6182
6183 case BUILT_IN_SETJMP:
6184 /* This should have been lowered to the builtins below. */
6185 gcc_unreachable ();
6186
6187 case BUILT_IN_SETJMP_SETUP:
6188 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6189 and the receiver label. */
6190 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6191 {
6192 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6193 VOIDmode, EXPAND_NORMAL);
6194 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6195 rtx label_r = label_rtx (label);
6196
6197 /* This is copied from the handling of non-local gotos. */
6198 expand_builtin_setjmp_setup (buf_addr, label_r);
6199 nonlocal_goto_handler_labels
6200 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6201 nonlocal_goto_handler_labels);
6202 /* ??? Do not let expand_label treat us as such since we would
6203 not want to be both on the list of non-local labels and on
6204 the list of forced labels. */
6205 FORCED_LABEL (label) = 0;
6206 return const0_rtx;
6207 }
6208 break;
6209
6210 case BUILT_IN_SETJMP_RECEIVER:
6211 /* __builtin_setjmp_receiver is passed the receiver label. */
6212 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6213 {
6214 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6215 rtx label_r = label_rtx (label);
6216
6217 expand_builtin_setjmp_receiver (label_r);
6218 return const0_rtx;
6219 }
6220 break;
6221
6222 /* __builtin_longjmp is passed a pointer to an array of five words.
6223 It's similar to the C library longjmp function but works with
6224 __builtin_setjmp above. */
6225 case BUILT_IN_LONGJMP:
6226 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6227 {
6228 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6229 VOIDmode, EXPAND_NORMAL);
6230 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6231
6232 if (value != const1_rtx)
6233 {
6234 error ("%<__builtin_longjmp%> second argument must be 1");
6235 return const0_rtx;
6236 }
6237
6238 expand_builtin_longjmp (buf_addr, value);
6239 return const0_rtx;
6240 }
6241 break;
6242
6243 case BUILT_IN_NONLOCAL_GOTO:
6244 target = expand_builtin_nonlocal_goto (exp);
6245 if (target)
6246 return target;
6247 break;
6248
6249 /* This updates the setjmp buffer that is its argument with the value
6250 of the current stack pointer. */
6251 case BUILT_IN_UPDATE_SETJMP_BUF:
6252 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6253 {
6254 rtx buf_addr
6255 = expand_normal (CALL_EXPR_ARG (exp, 0));
6256
6257 expand_builtin_update_setjmp_buf (buf_addr);
6258 return const0_rtx;
6259 }
6260 break;
6261
6262 case BUILT_IN_TRAP:
6263 expand_builtin_trap ();
6264 return const0_rtx;
6265
6266 case BUILT_IN_UNREACHABLE:
6267 expand_builtin_unreachable ();
6268 return const0_rtx;
6269
6270 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6271 case BUILT_IN_SIGNBITD32:
6272 case BUILT_IN_SIGNBITD64:
6273 case BUILT_IN_SIGNBITD128:
6274 target = expand_builtin_signbit (exp, target);
6275 if (target)
6276 return target;
6277 break;
6278
6279 /* Various hooks for the DWARF 2 __throw routine. */
6280 case BUILT_IN_UNWIND_INIT:
6281 expand_builtin_unwind_init ();
6282 return const0_rtx;
6283 case BUILT_IN_DWARF_CFA:
6284 return virtual_cfa_rtx;
6285 #ifdef DWARF2_UNWIND_INFO
6286 case BUILT_IN_DWARF_SP_COLUMN:
6287 return expand_builtin_dwarf_sp_column ();
6288 case BUILT_IN_INIT_DWARF_REG_SIZES:
6289 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6290 return const0_rtx;
6291 #endif
6292 case BUILT_IN_FROB_RETURN_ADDR:
6293 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6294 case BUILT_IN_EXTRACT_RETURN_ADDR:
6295 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6296 case BUILT_IN_EH_RETURN:
6297 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6298 CALL_EXPR_ARG (exp, 1));
6299 return const0_rtx;
6300 #ifdef EH_RETURN_DATA_REGNO
6301 case BUILT_IN_EH_RETURN_DATA_REGNO:
6302 return expand_builtin_eh_return_data_regno (exp);
6303 #endif
6304 case BUILT_IN_EXTEND_POINTER:
6305 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6306 case BUILT_IN_EH_POINTER:
6307 return expand_builtin_eh_pointer (exp);
6308 case BUILT_IN_EH_FILTER:
6309 return expand_builtin_eh_filter (exp);
6310 case BUILT_IN_EH_COPY_VALUES:
6311 return expand_builtin_eh_copy_values (exp);
6312
6313 case BUILT_IN_VA_START:
6314 return expand_builtin_va_start (exp);
6315 case BUILT_IN_VA_END:
6316 return expand_builtin_va_end (exp);
6317 case BUILT_IN_VA_COPY:
6318 return expand_builtin_va_copy (exp);
6319 case BUILT_IN_EXPECT:
6320 return expand_builtin_expect (exp, target);
6321 case BUILT_IN_ASSUME_ALIGNED:
6322 return expand_builtin_assume_aligned (exp, target);
6323 case BUILT_IN_PREFETCH:
6324 expand_builtin_prefetch (exp);
6325 return const0_rtx;
6326
6327 case BUILT_IN_INIT_TRAMPOLINE:
6328 return expand_builtin_init_trampoline (exp, true);
6329 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6330 return expand_builtin_init_trampoline (exp, false);
6331 case BUILT_IN_ADJUST_TRAMPOLINE:
6332 return expand_builtin_adjust_trampoline (exp);
6333
6334 case BUILT_IN_FORK:
6335 case BUILT_IN_EXECL:
6336 case BUILT_IN_EXECV:
6337 case BUILT_IN_EXECLP:
6338 case BUILT_IN_EXECLE:
6339 case BUILT_IN_EXECVP:
6340 case BUILT_IN_EXECVE:
6341 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6342 if (target)
6343 return target;
6344 break;
6345
6346 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6347 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6348 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6349 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6350 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6351 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6352 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6353 if (target)
6354 return target;
6355 break;
6356
6357 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6358 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6359 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6360 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6361 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6362 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6363 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6364 if (target)
6365 return target;
6366 break;
6367
6368 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6369 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6370 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6371 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6372 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6373 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6374 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6375 if (target)
6376 return target;
6377 break;
6378
6379 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6380 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6381 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6382 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6383 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6384 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6385 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6386 if (target)
6387 return target;
6388 break;
6389
6390 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6391 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6392 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6393 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6394 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6395 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6396 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6397 if (target)
6398 return target;
6399 break;
6400
6401 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6402 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6403 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6404 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6405 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6406 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6407 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6408 if (target)
6409 return target;
6410 break;
6411
6412 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6413 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6414 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6415 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6416 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6417 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6418 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6419 if (target)
6420 return target;
6421 break;
6422
6423 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6424 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6425 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6426 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6427 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6428 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6429 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6430 if (target)
6431 return target;
6432 break;
6433
6434 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6435 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6436 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6437 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6438 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6439 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6440 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6441 if (target)
6442 return target;
6443 break;
6444
6445 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6446 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6447 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6448 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6449 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6450 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6451 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6452 if (target)
6453 return target;
6454 break;
6455
6456 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6457 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6458 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6459 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6460 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6461 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6462 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6463 if (target)
6464 return target;
6465 break;
6466
6467 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6468 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6469 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6470 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6471 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6472 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6473 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6474 if (target)
6475 return target;
6476 break;
6477
6478 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6479 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6480 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6481 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6482 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6483 if (mode == VOIDmode)
6484 mode = TYPE_MODE (boolean_type_node);
6485 if (!target || !register_operand (target, mode))
6486 target = gen_reg_rtx (mode);
6487
6488 mode = get_builtin_sync_mode
6489 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6490 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6491 if (target)
6492 return target;
6493 break;
6494
6495 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6496 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6497 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6498 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6499 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6500 mode = get_builtin_sync_mode
6501 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6502 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6503 if (target)
6504 return target;
6505 break;
6506
6507 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6508 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6509 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6510 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6511 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6512 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6513 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6514 if (target)
6515 return target;
6516 break;
6517
6518 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6519 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6520 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6521 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6522 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6523 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6524 expand_builtin_sync_lock_release (mode, exp);
6525 return const0_rtx;
6526
6527 case BUILT_IN_SYNC_SYNCHRONIZE:
6528 expand_builtin_sync_synchronize ();
6529 return const0_rtx;
6530
6531 case BUILT_IN_ATOMIC_EXCHANGE_1:
6532 case BUILT_IN_ATOMIC_EXCHANGE_2:
6533 case BUILT_IN_ATOMIC_EXCHANGE_4:
6534 case BUILT_IN_ATOMIC_EXCHANGE_8:
6535 case BUILT_IN_ATOMIC_EXCHANGE_16:
6536 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6537 target = expand_builtin_atomic_exchange (mode, exp, target);
6538 if (target)
6539 return target;
6540 break;
6541
6542 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6543 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6544 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6545 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6546 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6547 {
6548 unsigned int nargs, z;
6549 vec<tree, va_gc> *vec;
6550
6551 mode =
6552 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6553 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6554 if (target)
6555 return target;
6556
6557 /* If this is turned into an external library call, the weak parameter
6558 must be dropped to match the expected parameter list. */
6559 nargs = call_expr_nargs (exp);
6560 vec_alloc (vec, nargs - 1);
6561 for (z = 0; z < 3; z++)
6562 vec->quick_push (CALL_EXPR_ARG (exp, z));
6563 /* Skip the boolean weak parameter. */
6564 for (z = 4; z < 6; z++)
6565 vec->quick_push (CALL_EXPR_ARG (exp, z));
6566 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6567 break;
6568 }
6569
6570 case BUILT_IN_ATOMIC_LOAD_1:
6571 case BUILT_IN_ATOMIC_LOAD_2:
6572 case BUILT_IN_ATOMIC_LOAD_4:
6573 case BUILT_IN_ATOMIC_LOAD_8:
6574 case BUILT_IN_ATOMIC_LOAD_16:
6575 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6576 target = expand_builtin_atomic_load (mode, exp, target);
6577 if (target)
6578 return target;
6579 break;
6580
6581 case BUILT_IN_ATOMIC_STORE_1:
6582 case BUILT_IN_ATOMIC_STORE_2:
6583 case BUILT_IN_ATOMIC_STORE_4:
6584 case BUILT_IN_ATOMIC_STORE_8:
6585 case BUILT_IN_ATOMIC_STORE_16:
6586 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6587 target = expand_builtin_atomic_store (mode, exp);
6588 if (target)
6589 return const0_rtx;
6590 break;
6591
6592 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6593 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6594 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6595 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6596 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6597 {
6598 enum built_in_function lib;
6599 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6600 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6601 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6602 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6603 ignore, lib);
6604 if (target)
6605 return target;
6606 break;
6607 }
6608 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6609 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6610 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6611 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6612 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6613 {
6614 enum built_in_function lib;
6615 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6616 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6617 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6618 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6619 ignore, lib);
6620 if (target)
6621 return target;
6622 break;
6623 }
6624 case BUILT_IN_ATOMIC_AND_FETCH_1:
6625 case BUILT_IN_ATOMIC_AND_FETCH_2:
6626 case BUILT_IN_ATOMIC_AND_FETCH_4:
6627 case BUILT_IN_ATOMIC_AND_FETCH_8:
6628 case BUILT_IN_ATOMIC_AND_FETCH_16:
6629 {
6630 enum built_in_function lib;
6631 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6632 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6633 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6634 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6635 ignore, lib);
6636 if (target)
6637 return target;
6638 break;
6639 }
6640 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6641 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6642 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6643 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6644 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6645 {
6646 enum built_in_function lib;
6647 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6648 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6649 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6650 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6651 ignore, lib);
6652 if (target)
6653 return target;
6654 break;
6655 }
6656 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6657 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6658 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6659 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6660 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6661 {
6662 enum built_in_function lib;
6663 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6664 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6665 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6666 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6667 ignore, lib);
6668 if (target)
6669 return target;
6670 break;
6671 }
6672 case BUILT_IN_ATOMIC_OR_FETCH_1:
6673 case BUILT_IN_ATOMIC_OR_FETCH_2:
6674 case BUILT_IN_ATOMIC_OR_FETCH_4:
6675 case BUILT_IN_ATOMIC_OR_FETCH_8:
6676 case BUILT_IN_ATOMIC_OR_FETCH_16:
6677 {
6678 enum built_in_function lib;
6679 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6680 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6681 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6682 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6683 ignore, lib);
6684 if (target)
6685 return target;
6686 break;
6687 }
6688 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6689 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6690 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6691 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6692 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6693 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6694 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6695 ignore, BUILT_IN_NONE);
6696 if (target)
6697 return target;
6698 break;
6699
6700 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6701 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6702 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6703 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6704 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6705 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6706 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6707 ignore, BUILT_IN_NONE);
6708 if (target)
6709 return target;
6710 break;
6711
6712 case BUILT_IN_ATOMIC_FETCH_AND_1:
6713 case BUILT_IN_ATOMIC_FETCH_AND_2:
6714 case BUILT_IN_ATOMIC_FETCH_AND_4:
6715 case BUILT_IN_ATOMIC_FETCH_AND_8:
6716 case BUILT_IN_ATOMIC_FETCH_AND_16:
6717 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6718 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6719 ignore, BUILT_IN_NONE);
6720 if (target)
6721 return target;
6722 break;
6723
6724 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6725 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6726 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6727 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6728 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6729 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6730 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6731 ignore, BUILT_IN_NONE);
6732 if (target)
6733 return target;
6734 break;
6735
6736 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6737 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6738 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6739 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6740 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6741 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6742 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6743 ignore, BUILT_IN_NONE);
6744 if (target)
6745 return target;
6746 break;
6747
6748 case BUILT_IN_ATOMIC_FETCH_OR_1:
6749 case BUILT_IN_ATOMIC_FETCH_OR_2:
6750 case BUILT_IN_ATOMIC_FETCH_OR_4:
6751 case BUILT_IN_ATOMIC_FETCH_OR_8:
6752 case BUILT_IN_ATOMIC_FETCH_OR_16:
6753 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6754 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6755 ignore, BUILT_IN_NONE);
6756 if (target)
6757 return target;
6758 break;
6759
6760 case BUILT_IN_ATOMIC_TEST_AND_SET:
6761 return expand_builtin_atomic_test_and_set (exp, target);
6762
6763 case BUILT_IN_ATOMIC_CLEAR:
6764 return expand_builtin_atomic_clear (exp);
6765
6766 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6767 return expand_builtin_atomic_always_lock_free (exp);
6768
6769 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6770 target = expand_builtin_atomic_is_lock_free (exp);
6771 if (target)
6772 return target;
6773 break;
6774
6775 case BUILT_IN_ATOMIC_THREAD_FENCE:
6776 expand_builtin_atomic_thread_fence (exp);
6777 return const0_rtx;
6778
6779 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6780 expand_builtin_atomic_signal_fence (exp);
6781 return const0_rtx;
6782
6783 case BUILT_IN_OBJECT_SIZE:
6784 return expand_builtin_object_size (exp);
6785
6786 case BUILT_IN_MEMCPY_CHK:
6787 case BUILT_IN_MEMPCPY_CHK:
6788 case BUILT_IN_MEMMOVE_CHK:
6789 case BUILT_IN_MEMSET_CHK:
6790 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6791 if (target)
6792 return target;
6793 break;
6794
6795 case BUILT_IN_STRCPY_CHK:
6796 case BUILT_IN_STPCPY_CHK:
6797 case BUILT_IN_STRNCPY_CHK:
6798 case BUILT_IN_STPNCPY_CHK:
6799 case BUILT_IN_STRCAT_CHK:
6800 case BUILT_IN_STRNCAT_CHK:
6801 case BUILT_IN_SNPRINTF_CHK:
6802 case BUILT_IN_VSNPRINTF_CHK:
6803 maybe_emit_chk_warning (exp, fcode);
6804 break;
6805
6806 case BUILT_IN_SPRINTF_CHK:
6807 case BUILT_IN_VSPRINTF_CHK:
6808 maybe_emit_sprintf_chk_warning (exp, fcode);
6809 break;
6810
6811 case BUILT_IN_FREE:
6812 if (warn_free_nonheap_object)
6813 maybe_emit_free_warning (exp);
6814 break;
6815
6816 case BUILT_IN_THREAD_POINTER:
6817 return expand_builtin_thread_pointer (exp, target);
6818
6819 case BUILT_IN_SET_THREAD_POINTER:
6820 expand_builtin_set_thread_pointer (exp);
6821 return const0_rtx;
6822
6823 case BUILT_IN_CILK_DETACH:
6824 expand_builtin_cilk_detach (exp);
6825 return const0_rtx;
6826
6827 case BUILT_IN_CILK_POP_FRAME:
6828 expand_builtin_cilk_pop_frame (exp);
6829 return const0_rtx;
6830
6831 default: /* just do library call, if unknown builtin */
6832 break;
6833 }
6834
6835 /* The switch statement above can drop through to cause the function
6836 to be called normally. */
6837 return expand_call (exp, target, ignore);
6838 }
6839
6840 /* Determine whether a tree node represents a call to a built-in
6841 function. If the tree T is a call to a built-in function with
6842 the right number of arguments of the appropriate types, return
6843 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6844 Otherwise the return value is END_BUILTINS. */
6845
6846 enum built_in_function
6847 builtin_mathfn_code (const_tree t)
6848 {
6849 const_tree fndecl, arg, parmlist;
6850 const_tree argtype, parmtype;
6851 const_call_expr_arg_iterator iter;
6852
6853 if (TREE_CODE (t) != CALL_EXPR
6854 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6855 return END_BUILTINS;
6856
6857 fndecl = get_callee_fndecl (t);
6858 if (fndecl == NULL_TREE
6859 || TREE_CODE (fndecl) != FUNCTION_DECL
6860 || ! DECL_BUILT_IN (fndecl)
6861 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6862 return END_BUILTINS;
6863
6864 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6865 init_const_call_expr_arg_iterator (t, &iter);
6866 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6867 {
6868 /* If a function doesn't take a variable number of arguments,
6869 the last element in the list will have type `void'. */
6870 parmtype = TREE_VALUE (parmlist);
6871 if (VOID_TYPE_P (parmtype))
6872 {
6873 if (more_const_call_expr_args_p (&iter))
6874 return END_BUILTINS;
6875 return DECL_FUNCTION_CODE (fndecl);
6876 }
6877
6878 if (! more_const_call_expr_args_p (&iter))
6879 return END_BUILTINS;
6880
6881 arg = next_const_call_expr_arg (&iter);
6882 argtype = TREE_TYPE (arg);
6883
6884 if (SCALAR_FLOAT_TYPE_P (parmtype))
6885 {
6886 if (! SCALAR_FLOAT_TYPE_P (argtype))
6887 return END_BUILTINS;
6888 }
6889 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6890 {
6891 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6892 return END_BUILTINS;
6893 }
6894 else if (POINTER_TYPE_P (parmtype))
6895 {
6896 if (! POINTER_TYPE_P (argtype))
6897 return END_BUILTINS;
6898 }
6899 else if (INTEGRAL_TYPE_P (parmtype))
6900 {
6901 if (! INTEGRAL_TYPE_P (argtype))
6902 return END_BUILTINS;
6903 }
6904 else
6905 return END_BUILTINS;
6906 }
6907
6908 /* Variable-length argument list. */
6909 return DECL_FUNCTION_CODE (fndecl);
6910 }
6911
6912 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6913 evaluate to a constant. */
6914
6915 static tree
6916 fold_builtin_constant_p (tree arg)
6917 {
6918 /* We return 1 for a numeric type that's known to be a constant
6919 value at compile-time or for an aggregate type that's a
6920 literal constant. */
6921 STRIP_NOPS (arg);
6922
6923 /* If we know this is a constant, emit the constant of one. */
6924 if (CONSTANT_CLASS_P (arg)
6925 || (TREE_CODE (arg) == CONSTRUCTOR
6926 && TREE_CONSTANT (arg)))
6927 return integer_one_node;
6928 if (TREE_CODE (arg) == ADDR_EXPR)
6929 {
6930 tree op = TREE_OPERAND (arg, 0);
6931 if (TREE_CODE (op) == STRING_CST
6932 || (TREE_CODE (op) == ARRAY_REF
6933 && integer_zerop (TREE_OPERAND (op, 1))
6934 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6935 return integer_one_node;
6936 }
6937
6938 /* If this expression has side effects, show we don't know it to be a
6939 constant. Likewise if it's a pointer or aggregate type since in
6940 those case we only want literals, since those are only optimized
6941 when generating RTL, not later.
6942 And finally, if we are compiling an initializer, not code, we
6943 need to return a definite result now; there's not going to be any
6944 more optimization done. */
6945 if (TREE_SIDE_EFFECTS (arg)
6946 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6947 || POINTER_TYPE_P (TREE_TYPE (arg))
6948 || cfun == 0
6949 || folding_initializer
6950 || force_folding_builtin_constant_p)
6951 return integer_zero_node;
6952
6953 return NULL_TREE;
6954 }
6955
6956 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6957 return it as a truthvalue. */
6958
6959 static tree
6960 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
6961 tree predictor)
6962 {
6963 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6964
6965 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
6966 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6967 ret_type = TREE_TYPE (TREE_TYPE (fn));
6968 pred_type = TREE_VALUE (arg_types);
6969 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6970
6971 pred = fold_convert_loc (loc, pred_type, pred);
6972 expected = fold_convert_loc (loc, expected_type, expected);
6973 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
6974 predictor);
6975
6976 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6977 build_int_cst (ret_type, 0));
6978 }
6979
6980 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6981 NULL_TREE if no simplification is possible. */
6982
6983 tree
6984 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
6985 {
6986 tree inner, fndecl, inner_arg0;
6987 enum tree_code code;
6988
6989 /* Distribute the expected value over short-circuiting operators.
6990 See through the cast from truthvalue_type_node to long. */
6991 inner_arg0 = arg0;
6992 while (TREE_CODE (inner_arg0) == NOP_EXPR
6993 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
6994 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
6995 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
6996
6997 /* If this is a builtin_expect within a builtin_expect keep the
6998 inner one. See through a comparison against a constant. It
6999 might have been added to create a thruthvalue. */
7000 inner = inner_arg0;
7001
7002 if (COMPARISON_CLASS_P (inner)
7003 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7004 inner = TREE_OPERAND (inner, 0);
7005
7006 if (TREE_CODE (inner) == CALL_EXPR
7007 && (fndecl = get_callee_fndecl (inner))
7008 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7009 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7010 return arg0;
7011
7012 inner = inner_arg0;
7013 code = TREE_CODE (inner);
7014 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7015 {
7016 tree op0 = TREE_OPERAND (inner, 0);
7017 tree op1 = TREE_OPERAND (inner, 1);
7018
7019 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7020 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7021 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7022
7023 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7024 }
7025
7026 /* If the argument isn't invariant then there's nothing else we can do. */
7027 if (!TREE_CONSTANT (inner_arg0))
7028 return NULL_TREE;
7029
7030 /* If we expect that a comparison against the argument will fold to
7031 a constant return the constant. In practice, this means a true
7032 constant or the address of a non-weak symbol. */
7033 inner = inner_arg0;
7034 STRIP_NOPS (inner);
7035 if (TREE_CODE (inner) == ADDR_EXPR)
7036 {
7037 do
7038 {
7039 inner = TREE_OPERAND (inner, 0);
7040 }
7041 while (TREE_CODE (inner) == COMPONENT_REF
7042 || TREE_CODE (inner) == ARRAY_REF);
7043 if ((TREE_CODE (inner) == VAR_DECL
7044 || TREE_CODE (inner) == FUNCTION_DECL)
7045 && DECL_WEAK (inner))
7046 return NULL_TREE;
7047 }
7048
7049 /* Otherwise, ARG0 already has the proper type for the return value. */
7050 return arg0;
7051 }
7052
7053 /* Fold a call to __builtin_classify_type with argument ARG. */
7054
7055 static tree
7056 fold_builtin_classify_type (tree arg)
7057 {
7058 if (arg == 0)
7059 return build_int_cst (integer_type_node, no_type_class);
7060
7061 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7062 }
7063
7064 /* Fold a call to __builtin_strlen with argument ARG. */
7065
7066 static tree
7067 fold_builtin_strlen (location_t loc, tree type, tree arg)
7068 {
7069 if (!validate_arg (arg, POINTER_TYPE))
7070 return NULL_TREE;
7071 else
7072 {
7073 tree len = c_strlen (arg, 0);
7074
7075 if (len)
7076 return fold_convert_loc (loc, type, len);
7077
7078 return NULL_TREE;
7079 }
7080 }
7081
7082 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7083
7084 static tree
7085 fold_builtin_inf (location_t loc, tree type, int warn)
7086 {
7087 REAL_VALUE_TYPE real;
7088
7089 /* __builtin_inff is intended to be usable to define INFINITY on all
7090 targets. If an infinity is not available, INFINITY expands "to a
7091 positive constant of type float that overflows at translation
7092 time", footnote "In this case, using INFINITY will violate the
7093 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7094 Thus we pedwarn to ensure this constraint violation is
7095 diagnosed. */
7096 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7097 pedwarn (loc, 0, "target format does not support infinity");
7098
7099 real_inf (&real);
7100 return build_real (type, real);
7101 }
7102
7103 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7104
7105 static tree
7106 fold_builtin_nan (tree arg, tree type, int quiet)
7107 {
7108 REAL_VALUE_TYPE real;
7109 const char *str;
7110
7111 if (!validate_arg (arg, POINTER_TYPE))
7112 return NULL_TREE;
7113 str = c_getstr (arg);
7114 if (!str)
7115 return NULL_TREE;
7116
7117 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7118 return NULL_TREE;
7119
7120 return build_real (type, real);
7121 }
7122
7123 /* Return true if the floating point expression T has an integer value.
7124 We also allow +Inf, -Inf and NaN to be considered integer values. */
7125
7126 static bool
7127 integer_valued_real_p (tree t)
7128 {
7129 switch (TREE_CODE (t))
7130 {
7131 case FLOAT_EXPR:
7132 return true;
7133
7134 case ABS_EXPR:
7135 case SAVE_EXPR:
7136 return integer_valued_real_p (TREE_OPERAND (t, 0));
7137
7138 case COMPOUND_EXPR:
7139 case MODIFY_EXPR:
7140 case BIND_EXPR:
7141 return integer_valued_real_p (TREE_OPERAND (t, 1));
7142
7143 case PLUS_EXPR:
7144 case MINUS_EXPR:
7145 case MULT_EXPR:
7146 case MIN_EXPR:
7147 case MAX_EXPR:
7148 return integer_valued_real_p (TREE_OPERAND (t, 0))
7149 && integer_valued_real_p (TREE_OPERAND (t, 1));
7150
7151 case COND_EXPR:
7152 return integer_valued_real_p (TREE_OPERAND (t, 1))
7153 && integer_valued_real_p (TREE_OPERAND (t, 2));
7154
7155 case REAL_CST:
7156 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7157
7158 case NOP_EXPR:
7159 {
7160 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7161 if (TREE_CODE (type) == INTEGER_TYPE)
7162 return true;
7163 if (TREE_CODE (type) == REAL_TYPE)
7164 return integer_valued_real_p (TREE_OPERAND (t, 0));
7165 break;
7166 }
7167
7168 case CALL_EXPR:
7169 switch (builtin_mathfn_code (t))
7170 {
7171 CASE_FLT_FN (BUILT_IN_CEIL):
7172 CASE_FLT_FN (BUILT_IN_FLOOR):
7173 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7174 CASE_FLT_FN (BUILT_IN_RINT):
7175 CASE_FLT_FN (BUILT_IN_ROUND):
7176 CASE_FLT_FN (BUILT_IN_TRUNC):
7177 return true;
7178
7179 CASE_FLT_FN (BUILT_IN_FMIN):
7180 CASE_FLT_FN (BUILT_IN_FMAX):
7181 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7182 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7183
7184 default:
7185 break;
7186 }
7187 break;
7188
7189 default:
7190 break;
7191 }
7192 return false;
7193 }
7194
7195 /* FNDECL is assumed to be a builtin where truncation can be propagated
7196 across (for instance floor((double)f) == (double)floorf (f).
7197 Do the transformation for a call with argument ARG. */
7198
7199 static tree
7200 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7201 {
7202 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7203
7204 if (!validate_arg (arg, REAL_TYPE))
7205 return NULL_TREE;
7206
7207 /* Integer rounding functions are idempotent. */
7208 if (fcode == builtin_mathfn_code (arg))
7209 return arg;
7210
7211 /* If argument is already integer valued, and we don't need to worry
7212 about setting errno, there's no need to perform rounding. */
7213 if (! flag_errno_math && integer_valued_real_p (arg))
7214 return arg;
7215
7216 if (optimize)
7217 {
7218 tree arg0 = strip_float_extensions (arg);
7219 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7220 tree newtype = TREE_TYPE (arg0);
7221 tree decl;
7222
7223 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7224 && (decl = mathfn_built_in (newtype, fcode)))
7225 return fold_convert_loc (loc, ftype,
7226 build_call_expr_loc (loc, decl, 1,
7227 fold_convert_loc (loc,
7228 newtype,
7229 arg0)));
7230 }
7231 return NULL_TREE;
7232 }
7233
7234 /* FNDECL is assumed to be builtin which can narrow the FP type of
7235 the argument, for instance lround((double)f) -> lroundf (f).
7236 Do the transformation for a call with argument ARG. */
7237
7238 static tree
7239 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7240 {
7241 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7242
7243 if (!validate_arg (arg, REAL_TYPE))
7244 return NULL_TREE;
7245
7246 /* If argument is already integer valued, and we don't need to worry
7247 about setting errno, there's no need to perform rounding. */
7248 if (! flag_errno_math && integer_valued_real_p (arg))
7249 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7250 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7251
7252 if (optimize)
7253 {
7254 tree ftype = TREE_TYPE (arg);
7255 tree arg0 = strip_float_extensions (arg);
7256 tree newtype = TREE_TYPE (arg0);
7257 tree decl;
7258
7259 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7260 && (decl = mathfn_built_in (newtype, fcode)))
7261 return build_call_expr_loc (loc, decl, 1,
7262 fold_convert_loc (loc, newtype, arg0));
7263 }
7264
7265 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7266 sizeof (int) == sizeof (long). */
7267 if (TYPE_PRECISION (integer_type_node)
7268 == TYPE_PRECISION (long_integer_type_node))
7269 {
7270 tree newfn = NULL_TREE;
7271 switch (fcode)
7272 {
7273 CASE_FLT_FN (BUILT_IN_ICEIL):
7274 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7275 break;
7276
7277 CASE_FLT_FN (BUILT_IN_IFLOOR):
7278 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7279 break;
7280
7281 CASE_FLT_FN (BUILT_IN_IROUND):
7282 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7283 break;
7284
7285 CASE_FLT_FN (BUILT_IN_IRINT):
7286 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7287 break;
7288
7289 default:
7290 break;
7291 }
7292
7293 if (newfn)
7294 {
7295 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7296 return fold_convert_loc (loc,
7297 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7298 }
7299 }
7300
7301 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7302 sizeof (long long) == sizeof (long). */
7303 if (TYPE_PRECISION (long_long_integer_type_node)
7304 == TYPE_PRECISION (long_integer_type_node))
7305 {
7306 tree newfn = NULL_TREE;
7307 switch (fcode)
7308 {
7309 CASE_FLT_FN (BUILT_IN_LLCEIL):
7310 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7311 break;
7312
7313 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7314 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7315 break;
7316
7317 CASE_FLT_FN (BUILT_IN_LLROUND):
7318 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7319 break;
7320
7321 CASE_FLT_FN (BUILT_IN_LLRINT):
7322 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7323 break;
7324
7325 default:
7326 break;
7327 }
7328
7329 if (newfn)
7330 {
7331 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7332 return fold_convert_loc (loc,
7333 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7334 }
7335 }
7336
7337 return NULL_TREE;
7338 }
7339
7340 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7341 return type. Return NULL_TREE if no simplification can be made. */
7342
7343 static tree
7344 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7345 {
7346 tree res;
7347
7348 if (!validate_arg (arg, COMPLEX_TYPE)
7349 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7350 return NULL_TREE;
7351
7352 /* Calculate the result when the argument is a constant. */
7353 if (TREE_CODE (arg) == COMPLEX_CST
7354 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7355 type, mpfr_hypot)))
7356 return res;
7357
7358 if (TREE_CODE (arg) == COMPLEX_EXPR)
7359 {
7360 tree real = TREE_OPERAND (arg, 0);
7361 tree imag = TREE_OPERAND (arg, 1);
7362
7363 /* If either part is zero, cabs is fabs of the other. */
7364 if (real_zerop (real))
7365 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7366 if (real_zerop (imag))
7367 return fold_build1_loc (loc, ABS_EXPR, type, real);
7368
7369 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7370 if (flag_unsafe_math_optimizations
7371 && operand_equal_p (real, imag, OEP_PURE_SAME))
7372 {
7373 const REAL_VALUE_TYPE sqrt2_trunc
7374 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7375 STRIP_NOPS (real);
7376 return fold_build2_loc (loc, MULT_EXPR, type,
7377 fold_build1_loc (loc, ABS_EXPR, type, real),
7378 build_real (type, sqrt2_trunc));
7379 }
7380 }
7381
7382 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7383 if (TREE_CODE (arg) == NEGATE_EXPR
7384 || TREE_CODE (arg) == CONJ_EXPR)
7385 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7386
7387 /* Don't do this when optimizing for size. */
7388 if (flag_unsafe_math_optimizations
7389 && optimize && optimize_function_for_speed_p (cfun))
7390 {
7391 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7392
7393 if (sqrtfn != NULL_TREE)
7394 {
7395 tree rpart, ipart, result;
7396
7397 arg = builtin_save_expr (arg);
7398
7399 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7400 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7401
7402 rpart = builtin_save_expr (rpart);
7403 ipart = builtin_save_expr (ipart);
7404
7405 result = fold_build2_loc (loc, PLUS_EXPR, type,
7406 fold_build2_loc (loc, MULT_EXPR, type,
7407 rpart, rpart),
7408 fold_build2_loc (loc, MULT_EXPR, type,
7409 ipart, ipart));
7410
7411 return build_call_expr_loc (loc, sqrtfn, 1, result);
7412 }
7413 }
7414
7415 return NULL_TREE;
7416 }
7417
7418 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7419 complex tree type of the result. If NEG is true, the imaginary
7420 zero is negative. */
7421
7422 static tree
7423 build_complex_cproj (tree type, bool neg)
7424 {
7425 REAL_VALUE_TYPE rinf, rzero = dconst0;
7426
7427 real_inf (&rinf);
7428 rzero.sign = neg;
7429 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7430 build_real (TREE_TYPE (type), rzero));
7431 }
7432
7433 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7434 return type. Return NULL_TREE if no simplification can be made. */
7435
7436 static tree
7437 fold_builtin_cproj (location_t loc, tree arg, tree type)
7438 {
7439 if (!validate_arg (arg, COMPLEX_TYPE)
7440 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7441 return NULL_TREE;
7442
7443 /* If there are no infinities, return arg. */
7444 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7445 return non_lvalue_loc (loc, arg);
7446
7447 /* Calculate the result when the argument is a constant. */
7448 if (TREE_CODE (arg) == COMPLEX_CST)
7449 {
7450 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7451 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7452
7453 if (real_isinf (real) || real_isinf (imag))
7454 return build_complex_cproj (type, imag->sign);
7455 else
7456 return arg;
7457 }
7458 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7459 {
7460 tree real = TREE_OPERAND (arg, 0);
7461 tree imag = TREE_OPERAND (arg, 1);
7462
7463 STRIP_NOPS (real);
7464 STRIP_NOPS (imag);
7465
7466 /* If the real part is inf and the imag part is known to be
7467 nonnegative, return (inf + 0i). Remember side-effects are
7468 possible in the imag part. */
7469 if (TREE_CODE (real) == REAL_CST
7470 && real_isinf (TREE_REAL_CST_PTR (real))
7471 && tree_expr_nonnegative_p (imag))
7472 return omit_one_operand_loc (loc, type,
7473 build_complex_cproj (type, false),
7474 arg);
7475
7476 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7477 Remember side-effects are possible in the real part. */
7478 if (TREE_CODE (imag) == REAL_CST
7479 && real_isinf (TREE_REAL_CST_PTR (imag)))
7480 return
7481 omit_one_operand_loc (loc, type,
7482 build_complex_cproj (type, TREE_REAL_CST_PTR
7483 (imag)->sign), arg);
7484 }
7485
7486 return NULL_TREE;
7487 }
7488
7489 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7490 Return NULL_TREE if no simplification can be made. */
7491
7492 static tree
7493 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7494 {
7495
7496 enum built_in_function fcode;
7497 tree res;
7498
7499 if (!validate_arg (arg, REAL_TYPE))
7500 return NULL_TREE;
7501
7502 /* Calculate the result when the argument is a constant. */
7503 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7504 return res;
7505
7506 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7507 fcode = builtin_mathfn_code (arg);
7508 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7509 {
7510 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7511 arg = fold_build2_loc (loc, MULT_EXPR, type,
7512 CALL_EXPR_ARG (arg, 0),
7513 build_real (type, dconsthalf));
7514 return build_call_expr_loc (loc, expfn, 1, arg);
7515 }
7516
7517 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7518 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7519 {
7520 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7521
7522 if (powfn)
7523 {
7524 tree arg0 = CALL_EXPR_ARG (arg, 0);
7525 tree tree_root;
7526 /* The inner root was either sqrt or cbrt. */
7527 /* This was a conditional expression but it triggered a bug
7528 in Sun C 5.5. */
7529 REAL_VALUE_TYPE dconstroot;
7530 if (BUILTIN_SQRT_P (fcode))
7531 dconstroot = dconsthalf;
7532 else
7533 dconstroot = dconst_third ();
7534
7535 /* Adjust for the outer root. */
7536 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7537 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7538 tree_root = build_real (type, dconstroot);
7539 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7540 }
7541 }
7542
7543 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7544 if (flag_unsafe_math_optimizations
7545 && (fcode == BUILT_IN_POW
7546 || fcode == BUILT_IN_POWF
7547 || fcode == BUILT_IN_POWL))
7548 {
7549 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7550 tree arg0 = CALL_EXPR_ARG (arg, 0);
7551 tree arg1 = CALL_EXPR_ARG (arg, 1);
7552 tree narg1;
7553 if (!tree_expr_nonnegative_p (arg0))
7554 arg0 = build1 (ABS_EXPR, type, arg0);
7555 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7556 build_real (type, dconsthalf));
7557 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7558 }
7559
7560 return NULL_TREE;
7561 }
7562
7563 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7564 Return NULL_TREE if no simplification can be made. */
7565
7566 static tree
7567 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7568 {
7569 const enum built_in_function fcode = builtin_mathfn_code (arg);
7570 tree res;
7571
7572 if (!validate_arg (arg, REAL_TYPE))
7573 return NULL_TREE;
7574
7575 /* Calculate the result when the argument is a constant. */
7576 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7577 return res;
7578
7579 if (flag_unsafe_math_optimizations)
7580 {
7581 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7582 if (BUILTIN_EXPONENT_P (fcode))
7583 {
7584 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7585 const REAL_VALUE_TYPE third_trunc =
7586 real_value_truncate (TYPE_MODE (type), dconst_third ());
7587 arg = fold_build2_loc (loc, MULT_EXPR, type,
7588 CALL_EXPR_ARG (arg, 0),
7589 build_real (type, third_trunc));
7590 return build_call_expr_loc (loc, expfn, 1, arg);
7591 }
7592
7593 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7594 if (BUILTIN_SQRT_P (fcode))
7595 {
7596 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7597
7598 if (powfn)
7599 {
7600 tree arg0 = CALL_EXPR_ARG (arg, 0);
7601 tree tree_root;
7602 REAL_VALUE_TYPE dconstroot = dconst_third ();
7603
7604 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7605 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7606 tree_root = build_real (type, dconstroot);
7607 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7608 }
7609 }
7610
7611 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7612 if (BUILTIN_CBRT_P (fcode))
7613 {
7614 tree arg0 = CALL_EXPR_ARG (arg, 0);
7615 if (tree_expr_nonnegative_p (arg0))
7616 {
7617 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7618
7619 if (powfn)
7620 {
7621 tree tree_root;
7622 REAL_VALUE_TYPE dconstroot;
7623
7624 real_arithmetic (&dconstroot, MULT_EXPR,
7625 dconst_third_ptr (), dconst_third_ptr ());
7626 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7627 tree_root = build_real (type, dconstroot);
7628 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7629 }
7630 }
7631 }
7632
7633 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7634 if (fcode == BUILT_IN_POW
7635 || fcode == BUILT_IN_POWF
7636 || fcode == BUILT_IN_POWL)
7637 {
7638 tree arg00 = CALL_EXPR_ARG (arg, 0);
7639 tree arg01 = CALL_EXPR_ARG (arg, 1);
7640 if (tree_expr_nonnegative_p (arg00))
7641 {
7642 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7643 const REAL_VALUE_TYPE dconstroot
7644 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7645 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7646 build_real (type, dconstroot));
7647 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7648 }
7649 }
7650 }
7651 return NULL_TREE;
7652 }
7653
7654 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7655 TYPE is the type of the return value. Return NULL_TREE if no
7656 simplification can be made. */
7657
7658 static tree
7659 fold_builtin_cos (location_t loc,
7660 tree arg, tree type, tree fndecl)
7661 {
7662 tree res, narg;
7663
7664 if (!validate_arg (arg, REAL_TYPE))
7665 return NULL_TREE;
7666
7667 /* Calculate the result when the argument is a constant. */
7668 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7669 return res;
7670
7671 /* Optimize cos(-x) into cos (x). */
7672 if ((narg = fold_strip_sign_ops (arg)))
7673 return build_call_expr_loc (loc, fndecl, 1, narg);
7674
7675 return NULL_TREE;
7676 }
7677
7678 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7679 Return NULL_TREE if no simplification can be made. */
7680
7681 static tree
7682 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7683 {
7684 if (validate_arg (arg, REAL_TYPE))
7685 {
7686 tree res, narg;
7687
7688 /* Calculate the result when the argument is a constant. */
7689 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7690 return res;
7691
7692 /* Optimize cosh(-x) into cosh (x). */
7693 if ((narg = fold_strip_sign_ops (arg)))
7694 return build_call_expr_loc (loc, fndecl, 1, narg);
7695 }
7696
7697 return NULL_TREE;
7698 }
7699
7700 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7701 argument ARG. TYPE is the type of the return value. Return
7702 NULL_TREE if no simplification can be made. */
7703
7704 static tree
7705 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7706 bool hyper)
7707 {
7708 if (validate_arg (arg, COMPLEX_TYPE)
7709 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7710 {
7711 tree tmp;
7712
7713 /* Calculate the result when the argument is a constant. */
7714 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7715 return tmp;
7716
7717 /* Optimize fn(-x) into fn(x). */
7718 if ((tmp = fold_strip_sign_ops (arg)))
7719 return build_call_expr_loc (loc, fndecl, 1, tmp);
7720 }
7721
7722 return NULL_TREE;
7723 }
7724
7725 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7726 Return NULL_TREE if no simplification can be made. */
7727
7728 static tree
7729 fold_builtin_tan (tree arg, tree type)
7730 {
7731 enum built_in_function fcode;
7732 tree res;
7733
7734 if (!validate_arg (arg, REAL_TYPE))
7735 return NULL_TREE;
7736
7737 /* Calculate the result when the argument is a constant. */
7738 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7739 return res;
7740
7741 /* Optimize tan(atan(x)) = x. */
7742 fcode = builtin_mathfn_code (arg);
7743 if (flag_unsafe_math_optimizations
7744 && (fcode == BUILT_IN_ATAN
7745 || fcode == BUILT_IN_ATANF
7746 || fcode == BUILT_IN_ATANL))
7747 return CALL_EXPR_ARG (arg, 0);
7748
7749 return NULL_TREE;
7750 }
7751
7752 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7753 NULL_TREE if no simplification can be made. */
7754
7755 static tree
7756 fold_builtin_sincos (location_t loc,
7757 tree arg0, tree arg1, tree arg2)
7758 {
7759 tree type;
7760 tree res, fn, call;
7761
7762 if (!validate_arg (arg0, REAL_TYPE)
7763 || !validate_arg (arg1, POINTER_TYPE)
7764 || !validate_arg (arg2, POINTER_TYPE))
7765 return NULL_TREE;
7766
7767 type = TREE_TYPE (arg0);
7768
7769 /* Calculate the result when the argument is a constant. */
7770 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7771 return res;
7772
7773 /* Canonicalize sincos to cexpi. */
7774 if (!targetm.libc_has_function (function_c99_math_complex))
7775 return NULL_TREE;
7776 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7777 if (!fn)
7778 return NULL_TREE;
7779
7780 call = build_call_expr_loc (loc, fn, 1, arg0);
7781 call = builtin_save_expr (call);
7782
7783 return build2 (COMPOUND_EXPR, void_type_node,
7784 build2 (MODIFY_EXPR, void_type_node,
7785 build_fold_indirect_ref_loc (loc, arg1),
7786 build1 (IMAGPART_EXPR, type, call)),
7787 build2 (MODIFY_EXPR, void_type_node,
7788 build_fold_indirect_ref_loc (loc, arg2),
7789 build1 (REALPART_EXPR, type, call)));
7790 }
7791
7792 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7793 NULL_TREE if no simplification can be made. */
7794
7795 static tree
7796 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7797 {
7798 tree rtype;
7799 tree realp, imagp, ifn;
7800 tree res;
7801
7802 if (!validate_arg (arg0, COMPLEX_TYPE)
7803 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7804 return NULL_TREE;
7805
7806 /* Calculate the result when the argument is a constant. */
7807 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7808 return res;
7809
7810 rtype = TREE_TYPE (TREE_TYPE (arg0));
7811
7812 /* In case we can figure out the real part of arg0 and it is constant zero
7813 fold to cexpi. */
7814 if (!targetm.libc_has_function (function_c99_math_complex))
7815 return NULL_TREE;
7816 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7817 if (!ifn)
7818 return NULL_TREE;
7819
7820 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7821 && real_zerop (realp))
7822 {
7823 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7824 return build_call_expr_loc (loc, ifn, 1, narg);
7825 }
7826
7827 /* In case we can easily decompose real and imaginary parts split cexp
7828 to exp (r) * cexpi (i). */
7829 if (flag_unsafe_math_optimizations
7830 && realp)
7831 {
7832 tree rfn, rcall, icall;
7833
7834 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7835 if (!rfn)
7836 return NULL_TREE;
7837
7838 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7839 if (!imagp)
7840 return NULL_TREE;
7841
7842 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7843 icall = builtin_save_expr (icall);
7844 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7845 rcall = builtin_save_expr (rcall);
7846 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7847 fold_build2_loc (loc, MULT_EXPR, rtype,
7848 rcall,
7849 fold_build1_loc (loc, REALPART_EXPR,
7850 rtype, icall)),
7851 fold_build2_loc (loc, MULT_EXPR, rtype,
7852 rcall,
7853 fold_build1_loc (loc, IMAGPART_EXPR,
7854 rtype, icall)));
7855 }
7856
7857 return NULL_TREE;
7858 }
7859
7860 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7861 Return NULL_TREE if no simplification can be made. */
7862
7863 static tree
7864 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7865 {
7866 if (!validate_arg (arg, REAL_TYPE))
7867 return NULL_TREE;
7868
7869 /* Optimize trunc of constant value. */
7870 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7871 {
7872 REAL_VALUE_TYPE r, x;
7873 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7874
7875 x = TREE_REAL_CST (arg);
7876 real_trunc (&r, TYPE_MODE (type), &x);
7877 return build_real (type, r);
7878 }
7879
7880 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7881 }
7882
7883 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7884 Return NULL_TREE if no simplification can be made. */
7885
7886 static tree
7887 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7888 {
7889 if (!validate_arg (arg, REAL_TYPE))
7890 return NULL_TREE;
7891
7892 /* Optimize floor of constant value. */
7893 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7894 {
7895 REAL_VALUE_TYPE x;
7896
7897 x = TREE_REAL_CST (arg);
7898 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7899 {
7900 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7901 REAL_VALUE_TYPE r;
7902
7903 real_floor (&r, TYPE_MODE (type), &x);
7904 return build_real (type, r);
7905 }
7906 }
7907
7908 /* Fold floor (x) where x is nonnegative to trunc (x). */
7909 if (tree_expr_nonnegative_p (arg))
7910 {
7911 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7912 if (truncfn)
7913 return build_call_expr_loc (loc, truncfn, 1, arg);
7914 }
7915
7916 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7917 }
7918
7919 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7920 Return NULL_TREE if no simplification can be made. */
7921
7922 static tree
7923 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7924 {
7925 if (!validate_arg (arg, REAL_TYPE))
7926 return NULL_TREE;
7927
7928 /* Optimize ceil of constant value. */
7929 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7930 {
7931 REAL_VALUE_TYPE x;
7932
7933 x = TREE_REAL_CST (arg);
7934 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7935 {
7936 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7937 REAL_VALUE_TYPE r;
7938
7939 real_ceil (&r, TYPE_MODE (type), &x);
7940 return build_real (type, r);
7941 }
7942 }
7943
7944 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7945 }
7946
7947 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7948 Return NULL_TREE if no simplification can be made. */
7949
7950 static tree
7951 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7952 {
7953 if (!validate_arg (arg, REAL_TYPE))
7954 return NULL_TREE;
7955
7956 /* Optimize round of constant value. */
7957 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7958 {
7959 REAL_VALUE_TYPE x;
7960
7961 x = TREE_REAL_CST (arg);
7962 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7963 {
7964 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7965 REAL_VALUE_TYPE r;
7966
7967 real_round (&r, TYPE_MODE (type), &x);
7968 return build_real (type, r);
7969 }
7970 }
7971
7972 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7973 }
7974
7975 /* Fold function call to builtin lround, lroundf or lroundl (or the
7976 corresponding long long versions) and other rounding functions. ARG
7977 is the argument to the call. Return NULL_TREE if no simplification
7978 can be made. */
7979
7980 static tree
7981 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7982 {
7983 if (!validate_arg (arg, REAL_TYPE))
7984 return NULL_TREE;
7985
7986 /* Optimize lround of constant value. */
7987 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7988 {
7989 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7990
7991 if (real_isfinite (&x))
7992 {
7993 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7994 tree ftype = TREE_TYPE (arg);
7995 REAL_VALUE_TYPE r;
7996 bool fail = false;
7997
7998 switch (DECL_FUNCTION_CODE (fndecl))
7999 {
8000 CASE_FLT_FN (BUILT_IN_IFLOOR):
8001 CASE_FLT_FN (BUILT_IN_LFLOOR):
8002 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8003 real_floor (&r, TYPE_MODE (ftype), &x);
8004 break;
8005
8006 CASE_FLT_FN (BUILT_IN_ICEIL):
8007 CASE_FLT_FN (BUILT_IN_LCEIL):
8008 CASE_FLT_FN (BUILT_IN_LLCEIL):
8009 real_ceil (&r, TYPE_MODE (ftype), &x);
8010 break;
8011
8012 CASE_FLT_FN (BUILT_IN_IROUND):
8013 CASE_FLT_FN (BUILT_IN_LROUND):
8014 CASE_FLT_FN (BUILT_IN_LLROUND):
8015 real_round (&r, TYPE_MODE (ftype), &x);
8016 break;
8017
8018 default:
8019 gcc_unreachable ();
8020 }
8021
8022 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
8023 if (!fail)
8024 return wide_int_to_tree (itype, val);
8025 }
8026 }
8027
8028 switch (DECL_FUNCTION_CODE (fndecl))
8029 {
8030 CASE_FLT_FN (BUILT_IN_LFLOOR):
8031 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8032 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8033 if (tree_expr_nonnegative_p (arg))
8034 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8035 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8036 break;
8037 default:;
8038 }
8039
8040 return fold_fixed_mathfn (loc, fndecl, arg);
8041 }
8042
8043 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8044 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8045 the argument to the call. Return NULL_TREE if no simplification can
8046 be made. */
8047
8048 static tree
8049 fold_builtin_bitop (tree fndecl, tree arg)
8050 {
8051 if (!validate_arg (arg, INTEGER_TYPE))
8052 return NULL_TREE;
8053
8054 /* Optimize for constant argument. */
8055 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8056 {
8057 tree type = TREE_TYPE (arg);
8058 int result;
8059
8060 switch (DECL_FUNCTION_CODE (fndecl))
8061 {
8062 CASE_INT_FN (BUILT_IN_FFS):
8063 result = wi::ffs (arg);
8064 break;
8065
8066 CASE_INT_FN (BUILT_IN_CLZ):
8067 if (wi::ne_p (arg, 0))
8068 result = wi::clz (arg);
8069 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8070 result = TYPE_PRECISION (type);
8071 break;
8072
8073 CASE_INT_FN (BUILT_IN_CTZ):
8074 if (wi::ne_p (arg, 0))
8075 result = wi::ctz (arg);
8076 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8077 result = TYPE_PRECISION (type);
8078 break;
8079
8080 CASE_INT_FN (BUILT_IN_CLRSB):
8081 result = wi::clrsb (arg);
8082 break;
8083
8084 CASE_INT_FN (BUILT_IN_POPCOUNT):
8085 result = wi::popcount (arg);
8086 break;
8087
8088 CASE_INT_FN (BUILT_IN_PARITY):
8089 result = wi::parity (arg);
8090 break;
8091
8092 default:
8093 gcc_unreachable ();
8094 }
8095
8096 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8097 }
8098
8099 return NULL_TREE;
8100 }
8101
8102 /* Fold function call to builtin_bswap and the short, long and long long
8103 variants. Return NULL_TREE if no simplification can be made. */
8104 static tree
8105 fold_builtin_bswap (tree fndecl, tree arg)
8106 {
8107 if (! validate_arg (arg, INTEGER_TYPE))
8108 return NULL_TREE;
8109
8110 /* Optimize constant value. */
8111 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8112 {
8113 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8114
8115 switch (DECL_FUNCTION_CODE (fndecl))
8116 {
8117 case BUILT_IN_BSWAP16:
8118 case BUILT_IN_BSWAP32:
8119 case BUILT_IN_BSWAP64:
8120 {
8121 signop sgn = TYPE_SIGN (type);
8122 tree result =
8123 wide_int_to_tree (type,
8124 wide_int::from (arg, TYPE_PRECISION (type),
8125 sgn).bswap ());
8126 return result;
8127 }
8128 default:
8129 gcc_unreachable ();
8130 }
8131 }
8132
8133 return NULL_TREE;
8134 }
8135
8136 /* A subroutine of fold_builtin to fold the various logarithmic
8137 functions. Return NULL_TREE if no simplification can me made.
8138 FUNC is the corresponding MPFR logarithm function. */
8139
8140 static tree
8141 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8142 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8143 {
8144 if (validate_arg (arg, REAL_TYPE))
8145 {
8146 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8147 tree res;
8148 const enum built_in_function fcode = builtin_mathfn_code (arg);
8149
8150 /* Calculate the result when the argument is a constant. */
8151 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8152 return res;
8153
8154 /* Special case, optimize logN(expN(x)) = x. */
8155 if (flag_unsafe_math_optimizations
8156 && ((func == mpfr_log
8157 && (fcode == BUILT_IN_EXP
8158 || fcode == BUILT_IN_EXPF
8159 || fcode == BUILT_IN_EXPL))
8160 || (func == mpfr_log2
8161 && (fcode == BUILT_IN_EXP2
8162 || fcode == BUILT_IN_EXP2F
8163 || fcode == BUILT_IN_EXP2L))
8164 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8165 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8166
8167 /* Optimize logN(func()) for various exponential functions. We
8168 want to determine the value "x" and the power "exponent" in
8169 order to transform logN(x**exponent) into exponent*logN(x). */
8170 if (flag_unsafe_math_optimizations)
8171 {
8172 tree exponent = 0, x = 0;
8173
8174 switch (fcode)
8175 {
8176 CASE_FLT_FN (BUILT_IN_EXP):
8177 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8178 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8179 dconst_e ()));
8180 exponent = CALL_EXPR_ARG (arg, 0);
8181 break;
8182 CASE_FLT_FN (BUILT_IN_EXP2):
8183 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8184 x = build_real (type, dconst2);
8185 exponent = CALL_EXPR_ARG (arg, 0);
8186 break;
8187 CASE_FLT_FN (BUILT_IN_EXP10):
8188 CASE_FLT_FN (BUILT_IN_POW10):
8189 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8190 {
8191 REAL_VALUE_TYPE dconst10;
8192 real_from_integer (&dconst10, VOIDmode, 10, SIGNED);
8193 x = build_real (type, dconst10);
8194 }
8195 exponent = CALL_EXPR_ARG (arg, 0);
8196 break;
8197 CASE_FLT_FN (BUILT_IN_SQRT):
8198 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8199 x = CALL_EXPR_ARG (arg, 0);
8200 exponent = build_real (type, dconsthalf);
8201 break;
8202 CASE_FLT_FN (BUILT_IN_CBRT):
8203 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8204 x = CALL_EXPR_ARG (arg, 0);
8205 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8206 dconst_third ()));
8207 break;
8208 CASE_FLT_FN (BUILT_IN_POW):
8209 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8210 x = CALL_EXPR_ARG (arg, 0);
8211 exponent = CALL_EXPR_ARG (arg, 1);
8212 break;
8213 default:
8214 break;
8215 }
8216
8217 /* Now perform the optimization. */
8218 if (x && exponent)
8219 {
8220 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8221 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8222 }
8223 }
8224 }
8225
8226 return NULL_TREE;
8227 }
8228
8229 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8230 NULL_TREE if no simplification can be made. */
8231
8232 static tree
8233 fold_builtin_hypot (location_t loc, tree fndecl,
8234 tree arg0, tree arg1, tree type)
8235 {
8236 tree res, narg0, narg1;
8237
8238 if (!validate_arg (arg0, REAL_TYPE)
8239 || !validate_arg (arg1, REAL_TYPE))
8240 return NULL_TREE;
8241
8242 /* Calculate the result when the argument is a constant. */
8243 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8244 return res;
8245
8246 /* If either argument to hypot has a negate or abs, strip that off.
8247 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8248 narg0 = fold_strip_sign_ops (arg0);
8249 narg1 = fold_strip_sign_ops (arg1);
8250 if (narg0 || narg1)
8251 {
8252 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8253 narg1 ? narg1 : arg1);
8254 }
8255
8256 /* If either argument is zero, hypot is fabs of the other. */
8257 if (real_zerop (arg0))
8258 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8259 else if (real_zerop (arg1))
8260 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8261
8262 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8263 if (flag_unsafe_math_optimizations
8264 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8265 {
8266 const REAL_VALUE_TYPE sqrt2_trunc
8267 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8268 return fold_build2_loc (loc, MULT_EXPR, type,
8269 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8270 build_real (type, sqrt2_trunc));
8271 }
8272
8273 return NULL_TREE;
8274 }
8275
8276
8277 /* Fold a builtin function call to pow, powf, or powl. Return
8278 NULL_TREE if no simplification can be made. */
8279 static tree
8280 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8281 {
8282 tree res;
8283
8284 if (!validate_arg (arg0, REAL_TYPE)
8285 || !validate_arg (arg1, REAL_TYPE))
8286 return NULL_TREE;
8287
8288 /* Calculate the result when the argument is a constant. */
8289 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8290 return res;
8291
8292 /* Optimize pow(1.0,y) = 1.0. */
8293 if (real_onep (arg0))
8294 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8295
8296 if (TREE_CODE (arg1) == REAL_CST
8297 && !TREE_OVERFLOW (arg1))
8298 {
8299 REAL_VALUE_TYPE cint;
8300 REAL_VALUE_TYPE c;
8301 HOST_WIDE_INT n;
8302
8303 c = TREE_REAL_CST (arg1);
8304
8305 /* Optimize pow(x,0.0) = 1.0. */
8306 if (REAL_VALUES_EQUAL (c, dconst0))
8307 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8308 arg0);
8309
8310 /* Optimize pow(x,1.0) = x. */
8311 if (REAL_VALUES_EQUAL (c, dconst1))
8312 return arg0;
8313
8314 /* Optimize pow(x,-1.0) = 1.0/x. */
8315 if (REAL_VALUES_EQUAL (c, dconstm1))
8316 return fold_build2_loc (loc, RDIV_EXPR, type,
8317 build_real (type, dconst1), arg0);
8318
8319 /* Optimize pow(x,0.5) = sqrt(x). */
8320 if (flag_unsafe_math_optimizations
8321 && REAL_VALUES_EQUAL (c, dconsthalf))
8322 {
8323 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8324
8325 if (sqrtfn != NULL_TREE)
8326 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8327 }
8328
8329 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8330 if (flag_unsafe_math_optimizations)
8331 {
8332 const REAL_VALUE_TYPE dconstroot
8333 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8334
8335 if (REAL_VALUES_EQUAL (c, dconstroot))
8336 {
8337 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8338 if (cbrtfn != NULL_TREE)
8339 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8340 }
8341 }
8342
8343 /* Check for an integer exponent. */
8344 n = real_to_integer (&c);
8345 real_from_integer (&cint, VOIDmode, n, SIGNED);
8346 if (real_identical (&c, &cint))
8347 {
8348 /* Attempt to evaluate pow at compile-time, unless this should
8349 raise an exception. */
8350 if (TREE_CODE (arg0) == REAL_CST
8351 && !TREE_OVERFLOW (arg0)
8352 && (n > 0
8353 || (!flag_trapping_math && !flag_errno_math)
8354 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8355 {
8356 REAL_VALUE_TYPE x;
8357 bool inexact;
8358
8359 x = TREE_REAL_CST (arg0);
8360 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8361 if (flag_unsafe_math_optimizations || !inexact)
8362 return build_real (type, x);
8363 }
8364
8365 /* Strip sign ops from even integer powers. */
8366 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8367 {
8368 tree narg0 = fold_strip_sign_ops (arg0);
8369 if (narg0)
8370 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8371 }
8372 }
8373 }
8374
8375 if (flag_unsafe_math_optimizations)
8376 {
8377 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8378
8379 /* Optimize pow(expN(x),y) = expN(x*y). */
8380 if (BUILTIN_EXPONENT_P (fcode))
8381 {
8382 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8383 tree arg = CALL_EXPR_ARG (arg0, 0);
8384 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8385 return build_call_expr_loc (loc, expfn, 1, arg);
8386 }
8387
8388 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8389 if (BUILTIN_SQRT_P (fcode))
8390 {
8391 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8392 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8393 build_real (type, dconsthalf));
8394 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8395 }
8396
8397 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8398 if (BUILTIN_CBRT_P (fcode))
8399 {
8400 tree arg = CALL_EXPR_ARG (arg0, 0);
8401 if (tree_expr_nonnegative_p (arg))
8402 {
8403 const REAL_VALUE_TYPE dconstroot
8404 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8405 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8406 build_real (type, dconstroot));
8407 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8408 }
8409 }
8410
8411 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8412 if (fcode == BUILT_IN_POW
8413 || fcode == BUILT_IN_POWF
8414 || fcode == BUILT_IN_POWL)
8415 {
8416 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8417 if (tree_expr_nonnegative_p (arg00))
8418 {
8419 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8420 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8421 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8422 }
8423 }
8424 }
8425
8426 return NULL_TREE;
8427 }
8428
8429 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8430 Return NULL_TREE if no simplification can be made. */
8431 static tree
8432 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8433 tree arg0, tree arg1, tree type)
8434 {
8435 if (!validate_arg (arg0, REAL_TYPE)
8436 || !validate_arg (arg1, INTEGER_TYPE))
8437 return NULL_TREE;
8438
8439 /* Optimize pow(1.0,y) = 1.0. */
8440 if (real_onep (arg0))
8441 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8442
8443 if (tree_fits_shwi_p (arg1))
8444 {
8445 HOST_WIDE_INT c = tree_to_shwi (arg1);
8446
8447 /* Evaluate powi at compile-time. */
8448 if (TREE_CODE (arg0) == REAL_CST
8449 && !TREE_OVERFLOW (arg0))
8450 {
8451 REAL_VALUE_TYPE x;
8452 x = TREE_REAL_CST (arg0);
8453 real_powi (&x, TYPE_MODE (type), &x, c);
8454 return build_real (type, x);
8455 }
8456
8457 /* Optimize pow(x,0) = 1.0. */
8458 if (c == 0)
8459 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8460 arg0);
8461
8462 /* Optimize pow(x,1) = x. */
8463 if (c == 1)
8464 return arg0;
8465
8466 /* Optimize pow(x,-1) = 1.0/x. */
8467 if (c == -1)
8468 return fold_build2_loc (loc, RDIV_EXPR, type,
8469 build_real (type, dconst1), arg0);
8470 }
8471
8472 return NULL_TREE;
8473 }
8474
8475 /* A subroutine of fold_builtin to fold the various exponent
8476 functions. Return NULL_TREE if no simplification can be made.
8477 FUNC is the corresponding MPFR exponent function. */
8478
8479 static tree
8480 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8481 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8482 {
8483 if (validate_arg (arg, REAL_TYPE))
8484 {
8485 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8486 tree res;
8487
8488 /* Calculate the result when the argument is a constant. */
8489 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8490 return res;
8491
8492 /* Optimize expN(logN(x)) = x. */
8493 if (flag_unsafe_math_optimizations)
8494 {
8495 const enum built_in_function fcode = builtin_mathfn_code (arg);
8496
8497 if ((func == mpfr_exp
8498 && (fcode == BUILT_IN_LOG
8499 || fcode == BUILT_IN_LOGF
8500 || fcode == BUILT_IN_LOGL))
8501 || (func == mpfr_exp2
8502 && (fcode == BUILT_IN_LOG2
8503 || fcode == BUILT_IN_LOG2F
8504 || fcode == BUILT_IN_LOG2L))
8505 || (func == mpfr_exp10
8506 && (fcode == BUILT_IN_LOG10
8507 || fcode == BUILT_IN_LOG10F
8508 || fcode == BUILT_IN_LOG10L)))
8509 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8510 }
8511 }
8512
8513 return NULL_TREE;
8514 }
8515
8516 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8517 Return NULL_TREE if no simplification can be made. */
8518
8519 static tree
8520 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8521 {
8522 tree fn, len, lenp1, call, type;
8523
8524 if (!validate_arg (dest, POINTER_TYPE)
8525 || !validate_arg (src, POINTER_TYPE))
8526 return NULL_TREE;
8527
8528 len = c_strlen (src, 1);
8529 if (!len
8530 || TREE_CODE (len) != INTEGER_CST)
8531 return NULL_TREE;
8532
8533 if (optimize_function_for_size_p (cfun)
8534 /* If length is zero it's small enough. */
8535 && !integer_zerop (len))
8536 return NULL_TREE;
8537
8538 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8539 if (!fn)
8540 return NULL_TREE;
8541
8542 lenp1 = size_binop_loc (loc, PLUS_EXPR,
8543 fold_convert_loc (loc, size_type_node, len),
8544 build_int_cst (size_type_node, 1));
8545 /* We use dest twice in building our expression. Save it from
8546 multiple expansions. */
8547 dest = builtin_save_expr (dest);
8548 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8549
8550 type = TREE_TYPE (TREE_TYPE (fndecl));
8551 dest = fold_build_pointer_plus_loc (loc, dest, len);
8552 dest = fold_convert_loc (loc, type, dest);
8553 dest = omit_one_operand_loc (loc, type, dest, call);
8554 return dest;
8555 }
8556
8557 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8558 arguments to the call, and TYPE is its return type.
8559 Return NULL_TREE if no simplification can be made. */
8560
8561 static tree
8562 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8563 {
8564 if (!validate_arg (arg1, POINTER_TYPE)
8565 || !validate_arg (arg2, INTEGER_TYPE)
8566 || !validate_arg (len, INTEGER_TYPE))
8567 return NULL_TREE;
8568 else
8569 {
8570 const char *p1;
8571
8572 if (TREE_CODE (arg2) != INTEGER_CST
8573 || !tree_fits_uhwi_p (len))
8574 return NULL_TREE;
8575
8576 p1 = c_getstr (arg1);
8577 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8578 {
8579 char c;
8580 const char *r;
8581 tree tem;
8582
8583 if (target_char_cast (arg2, &c))
8584 return NULL_TREE;
8585
8586 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
8587
8588 if (r == NULL)
8589 return build_int_cst (TREE_TYPE (arg1), 0);
8590
8591 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8592 return fold_convert_loc (loc, type, tem);
8593 }
8594 return NULL_TREE;
8595 }
8596 }
8597
8598 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8599 Return NULL_TREE if no simplification can be made. */
8600
8601 static tree
8602 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8603 {
8604 const char *p1, *p2;
8605
8606 if (!validate_arg (arg1, POINTER_TYPE)
8607 || !validate_arg (arg2, POINTER_TYPE)
8608 || !validate_arg (len, INTEGER_TYPE))
8609 return NULL_TREE;
8610
8611 /* If the LEN parameter is zero, return zero. */
8612 if (integer_zerop (len))
8613 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8614 arg1, arg2);
8615
8616 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8617 if (operand_equal_p (arg1, arg2, 0))
8618 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8619
8620 p1 = c_getstr (arg1);
8621 p2 = c_getstr (arg2);
8622
8623 /* If all arguments are constant, and the value of len is not greater
8624 than the lengths of arg1 and arg2, evaluate at compile-time. */
8625 if (tree_fits_uhwi_p (len) && p1 && p2
8626 && compare_tree_int (len, strlen (p1) + 1) <= 0
8627 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8628 {
8629 const int r = memcmp (p1, p2, tree_to_uhwi (len));
8630
8631 if (r > 0)
8632 return integer_one_node;
8633 else if (r < 0)
8634 return integer_minus_one_node;
8635 else
8636 return integer_zero_node;
8637 }
8638
8639 /* If len parameter is one, return an expression corresponding to
8640 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8641 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8642 {
8643 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8644 tree cst_uchar_ptr_node
8645 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8646
8647 tree ind1
8648 = fold_convert_loc (loc, integer_type_node,
8649 build1 (INDIRECT_REF, cst_uchar_node,
8650 fold_convert_loc (loc,
8651 cst_uchar_ptr_node,
8652 arg1)));
8653 tree ind2
8654 = fold_convert_loc (loc, integer_type_node,
8655 build1 (INDIRECT_REF, cst_uchar_node,
8656 fold_convert_loc (loc,
8657 cst_uchar_ptr_node,
8658 arg2)));
8659 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8660 }
8661
8662 return NULL_TREE;
8663 }
8664
8665 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8666 Return NULL_TREE if no simplification can be made. */
8667
8668 static tree
8669 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8670 {
8671 const char *p1, *p2;
8672
8673 if (!validate_arg (arg1, POINTER_TYPE)
8674 || !validate_arg (arg2, POINTER_TYPE))
8675 return NULL_TREE;
8676
8677 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8678 if (operand_equal_p (arg1, arg2, 0))
8679 return integer_zero_node;
8680
8681 p1 = c_getstr (arg1);
8682 p2 = c_getstr (arg2);
8683
8684 if (p1 && p2)
8685 {
8686 const int i = strcmp (p1, p2);
8687 if (i < 0)
8688 return integer_minus_one_node;
8689 else if (i > 0)
8690 return integer_one_node;
8691 else
8692 return integer_zero_node;
8693 }
8694
8695 /* If the second arg is "", return *(const unsigned char*)arg1. */
8696 if (p2 && *p2 == '\0')
8697 {
8698 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8699 tree cst_uchar_ptr_node
8700 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8701
8702 return fold_convert_loc (loc, integer_type_node,
8703 build1 (INDIRECT_REF, cst_uchar_node,
8704 fold_convert_loc (loc,
8705 cst_uchar_ptr_node,
8706 arg1)));
8707 }
8708
8709 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8710 if (p1 && *p1 == '\0')
8711 {
8712 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8713 tree cst_uchar_ptr_node
8714 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8715
8716 tree temp
8717 = fold_convert_loc (loc, integer_type_node,
8718 build1 (INDIRECT_REF, cst_uchar_node,
8719 fold_convert_loc (loc,
8720 cst_uchar_ptr_node,
8721 arg2)));
8722 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8723 }
8724
8725 return NULL_TREE;
8726 }
8727
8728 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8729 Return NULL_TREE if no simplification can be made. */
8730
8731 static tree
8732 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8733 {
8734 const char *p1, *p2;
8735
8736 if (!validate_arg (arg1, POINTER_TYPE)
8737 || !validate_arg (arg2, POINTER_TYPE)
8738 || !validate_arg (len, INTEGER_TYPE))
8739 return NULL_TREE;
8740
8741 /* If the LEN parameter is zero, return zero. */
8742 if (integer_zerop (len))
8743 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8744 arg1, arg2);
8745
8746 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8747 if (operand_equal_p (arg1, arg2, 0))
8748 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8749
8750 p1 = c_getstr (arg1);
8751 p2 = c_getstr (arg2);
8752
8753 if (tree_fits_uhwi_p (len) && p1 && p2)
8754 {
8755 const int i = strncmp (p1, p2, tree_to_uhwi (len));
8756 if (i > 0)
8757 return integer_one_node;
8758 else if (i < 0)
8759 return integer_minus_one_node;
8760 else
8761 return integer_zero_node;
8762 }
8763
8764 /* If the second arg is "", and the length is greater than zero,
8765 return *(const unsigned char*)arg1. */
8766 if (p2 && *p2 == '\0'
8767 && TREE_CODE (len) == INTEGER_CST
8768 && tree_int_cst_sgn (len) == 1)
8769 {
8770 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8771 tree cst_uchar_ptr_node
8772 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8773
8774 return fold_convert_loc (loc, integer_type_node,
8775 build1 (INDIRECT_REF, cst_uchar_node,
8776 fold_convert_loc (loc,
8777 cst_uchar_ptr_node,
8778 arg1)));
8779 }
8780
8781 /* If the first arg is "", and the length is greater than zero,
8782 return -*(const unsigned char*)arg2. */
8783 if (p1 && *p1 == '\0'
8784 && TREE_CODE (len) == INTEGER_CST
8785 && tree_int_cst_sgn (len) == 1)
8786 {
8787 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8788 tree cst_uchar_ptr_node
8789 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8790
8791 tree temp = fold_convert_loc (loc, integer_type_node,
8792 build1 (INDIRECT_REF, cst_uchar_node,
8793 fold_convert_loc (loc,
8794 cst_uchar_ptr_node,
8795 arg2)));
8796 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8797 }
8798
8799 /* If len parameter is one, return an expression corresponding to
8800 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8801 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8802 {
8803 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8804 tree cst_uchar_ptr_node
8805 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8806
8807 tree ind1 = fold_convert_loc (loc, integer_type_node,
8808 build1 (INDIRECT_REF, cst_uchar_node,
8809 fold_convert_loc (loc,
8810 cst_uchar_ptr_node,
8811 arg1)));
8812 tree ind2 = fold_convert_loc (loc, integer_type_node,
8813 build1 (INDIRECT_REF, cst_uchar_node,
8814 fold_convert_loc (loc,
8815 cst_uchar_ptr_node,
8816 arg2)));
8817 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8818 }
8819
8820 return NULL_TREE;
8821 }
8822
8823 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8824 ARG. Return NULL_TREE if no simplification can be made. */
8825
8826 static tree
8827 fold_builtin_signbit (location_t loc, tree arg, tree type)
8828 {
8829 if (!validate_arg (arg, REAL_TYPE))
8830 return NULL_TREE;
8831
8832 /* If ARG is a compile-time constant, determine the result. */
8833 if (TREE_CODE (arg) == REAL_CST
8834 && !TREE_OVERFLOW (arg))
8835 {
8836 REAL_VALUE_TYPE c;
8837
8838 c = TREE_REAL_CST (arg);
8839 return (REAL_VALUE_NEGATIVE (c)
8840 ? build_one_cst (type)
8841 : build_zero_cst (type));
8842 }
8843
8844 /* If ARG is non-negative, the result is always zero. */
8845 if (tree_expr_nonnegative_p (arg))
8846 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8847
8848 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8849 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8850 return fold_convert (type,
8851 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
8852 build_real (TREE_TYPE (arg), dconst0)));
8853
8854 return NULL_TREE;
8855 }
8856
8857 /* Fold function call to builtin copysign, copysignf or copysignl with
8858 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8859 be made. */
8860
8861 static tree
8862 fold_builtin_copysign (location_t loc, tree fndecl,
8863 tree arg1, tree arg2, tree type)
8864 {
8865 tree tem;
8866
8867 if (!validate_arg (arg1, REAL_TYPE)
8868 || !validate_arg (arg2, REAL_TYPE))
8869 return NULL_TREE;
8870
8871 /* copysign(X,X) is X. */
8872 if (operand_equal_p (arg1, arg2, 0))
8873 return fold_convert_loc (loc, type, arg1);
8874
8875 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8876 if (TREE_CODE (arg1) == REAL_CST
8877 && TREE_CODE (arg2) == REAL_CST
8878 && !TREE_OVERFLOW (arg1)
8879 && !TREE_OVERFLOW (arg2))
8880 {
8881 REAL_VALUE_TYPE c1, c2;
8882
8883 c1 = TREE_REAL_CST (arg1);
8884 c2 = TREE_REAL_CST (arg2);
8885 /* c1.sign := c2.sign. */
8886 real_copysign (&c1, &c2);
8887 return build_real (type, c1);
8888 }
8889
8890 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8891 Remember to evaluate Y for side-effects. */
8892 if (tree_expr_nonnegative_p (arg2))
8893 return omit_one_operand_loc (loc, type,
8894 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8895 arg2);
8896
8897 /* Strip sign changing operations for the first argument. */
8898 tem = fold_strip_sign_ops (arg1);
8899 if (tem)
8900 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8901
8902 return NULL_TREE;
8903 }
8904
8905 /* Fold a call to builtin isascii with argument ARG. */
8906
8907 static tree
8908 fold_builtin_isascii (location_t loc, tree arg)
8909 {
8910 if (!validate_arg (arg, INTEGER_TYPE))
8911 return NULL_TREE;
8912 else
8913 {
8914 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8915 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8916 build_int_cst (integer_type_node,
8917 ~ (unsigned HOST_WIDE_INT) 0x7f));
8918 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8919 arg, integer_zero_node);
8920 }
8921 }
8922
8923 /* Fold a call to builtin toascii with argument ARG. */
8924
8925 static tree
8926 fold_builtin_toascii (location_t loc, tree arg)
8927 {
8928 if (!validate_arg (arg, INTEGER_TYPE))
8929 return NULL_TREE;
8930
8931 /* Transform toascii(c) -> (c & 0x7f). */
8932 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8933 build_int_cst (integer_type_node, 0x7f));
8934 }
8935
8936 /* Fold a call to builtin isdigit with argument ARG. */
8937
8938 static tree
8939 fold_builtin_isdigit (location_t loc, tree arg)
8940 {
8941 if (!validate_arg (arg, INTEGER_TYPE))
8942 return NULL_TREE;
8943 else
8944 {
8945 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8946 /* According to the C standard, isdigit is unaffected by locale.
8947 However, it definitely is affected by the target character set. */
8948 unsigned HOST_WIDE_INT target_digit0
8949 = lang_hooks.to_target_charset ('0');
8950
8951 if (target_digit0 == 0)
8952 return NULL_TREE;
8953
8954 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8955 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8956 build_int_cst (unsigned_type_node, target_digit0));
8957 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8958 build_int_cst (unsigned_type_node, 9));
8959 }
8960 }
8961
8962 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8963
8964 static tree
8965 fold_builtin_fabs (location_t loc, tree arg, tree type)
8966 {
8967 if (!validate_arg (arg, REAL_TYPE))
8968 return NULL_TREE;
8969
8970 arg = fold_convert_loc (loc, type, arg);
8971 if (TREE_CODE (arg) == REAL_CST)
8972 return fold_abs_const (arg, type);
8973 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8974 }
8975
8976 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8977
8978 static tree
8979 fold_builtin_abs (location_t loc, tree arg, tree type)
8980 {
8981 if (!validate_arg (arg, INTEGER_TYPE))
8982 return NULL_TREE;
8983
8984 arg = fold_convert_loc (loc, type, arg);
8985 if (TREE_CODE (arg) == INTEGER_CST)
8986 return fold_abs_const (arg, type);
8987 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8988 }
8989
8990 /* Fold a fma operation with arguments ARG[012]. */
8991
8992 tree
8993 fold_fma (location_t loc ATTRIBUTE_UNUSED,
8994 tree type, tree arg0, tree arg1, tree arg2)
8995 {
8996 if (TREE_CODE (arg0) == REAL_CST
8997 && TREE_CODE (arg1) == REAL_CST
8998 && TREE_CODE (arg2) == REAL_CST)
8999 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9000
9001 return NULL_TREE;
9002 }
9003
9004 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9005
9006 static tree
9007 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9008 {
9009 if (validate_arg (arg0, REAL_TYPE)
9010 && validate_arg (arg1, REAL_TYPE)
9011 && validate_arg (arg2, REAL_TYPE))
9012 {
9013 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9014 if (tem)
9015 return tem;
9016
9017 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9018 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9019 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9020 }
9021 return NULL_TREE;
9022 }
9023
9024 /* Fold a call to builtin fmin or fmax. */
9025
9026 static tree
9027 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9028 tree type, bool max)
9029 {
9030 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9031 {
9032 /* Calculate the result when the argument is a constant. */
9033 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9034
9035 if (res)
9036 return res;
9037
9038 /* If either argument is NaN, return the other one. Avoid the
9039 transformation if we get (and honor) a signalling NaN. Using
9040 omit_one_operand() ensures we create a non-lvalue. */
9041 if (TREE_CODE (arg0) == REAL_CST
9042 && real_isnan (&TREE_REAL_CST (arg0))
9043 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9044 || ! TREE_REAL_CST (arg0).signalling))
9045 return omit_one_operand_loc (loc, type, arg1, arg0);
9046 if (TREE_CODE (arg1) == REAL_CST
9047 && real_isnan (&TREE_REAL_CST (arg1))
9048 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9049 || ! TREE_REAL_CST (arg1).signalling))
9050 return omit_one_operand_loc (loc, type, arg0, arg1);
9051
9052 /* Transform fmin/fmax(x,x) -> x. */
9053 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9054 return omit_one_operand_loc (loc, type, arg0, arg1);
9055
9056 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9057 functions to return the numeric arg if the other one is NaN.
9058 These tree codes don't honor that, so only transform if
9059 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9060 handled, so we don't have to worry about it either. */
9061 if (flag_finite_math_only)
9062 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9063 fold_convert_loc (loc, type, arg0),
9064 fold_convert_loc (loc, type, arg1));
9065 }
9066 return NULL_TREE;
9067 }
9068
9069 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9070
9071 static tree
9072 fold_builtin_carg (location_t loc, tree arg, tree type)
9073 {
9074 if (validate_arg (arg, COMPLEX_TYPE)
9075 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9076 {
9077 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9078
9079 if (atan2_fn)
9080 {
9081 tree new_arg = builtin_save_expr (arg);
9082 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9083 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9084 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9085 }
9086 }
9087
9088 return NULL_TREE;
9089 }
9090
9091 /* Fold a call to builtin logb/ilogb. */
9092
9093 static tree
9094 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9095 {
9096 if (! validate_arg (arg, REAL_TYPE))
9097 return NULL_TREE;
9098
9099 STRIP_NOPS (arg);
9100
9101 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9102 {
9103 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9104
9105 switch (value->cl)
9106 {
9107 case rvc_nan:
9108 case rvc_inf:
9109 /* If arg is Inf or NaN and we're logb, return it. */
9110 if (TREE_CODE (rettype) == REAL_TYPE)
9111 {
9112 /* For logb(-Inf) we have to return +Inf. */
9113 if (real_isinf (value) && real_isneg (value))
9114 {
9115 REAL_VALUE_TYPE tem;
9116 real_inf (&tem);
9117 return build_real (rettype, tem);
9118 }
9119 return fold_convert_loc (loc, rettype, arg);
9120 }
9121 /* Fall through... */
9122 case rvc_zero:
9123 /* Zero may set errno and/or raise an exception for logb, also
9124 for ilogb we don't know FP_ILOGB0. */
9125 return NULL_TREE;
9126 case rvc_normal:
9127 /* For normal numbers, proceed iff radix == 2. In GCC,
9128 normalized significands are in the range [0.5, 1.0). We
9129 want the exponent as if they were [1.0, 2.0) so get the
9130 exponent and subtract 1. */
9131 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9132 return fold_convert_loc (loc, rettype,
9133 build_int_cst (integer_type_node,
9134 REAL_EXP (value)-1));
9135 break;
9136 }
9137 }
9138
9139 return NULL_TREE;
9140 }
9141
9142 /* Fold a call to builtin significand, if radix == 2. */
9143
9144 static tree
9145 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9146 {
9147 if (! validate_arg (arg, REAL_TYPE))
9148 return NULL_TREE;
9149
9150 STRIP_NOPS (arg);
9151
9152 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9153 {
9154 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9155
9156 switch (value->cl)
9157 {
9158 case rvc_zero:
9159 case rvc_nan:
9160 case rvc_inf:
9161 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9162 return fold_convert_loc (loc, rettype, arg);
9163 case rvc_normal:
9164 /* For normal numbers, proceed iff radix == 2. */
9165 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9166 {
9167 REAL_VALUE_TYPE result = *value;
9168 /* In GCC, normalized significands are in the range [0.5,
9169 1.0). We want them to be [1.0, 2.0) so set the
9170 exponent to 1. */
9171 SET_REAL_EXP (&result, 1);
9172 return build_real (rettype, result);
9173 }
9174 break;
9175 }
9176 }
9177
9178 return NULL_TREE;
9179 }
9180
9181 /* Fold a call to builtin frexp, we can assume the base is 2. */
9182
9183 static tree
9184 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9185 {
9186 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9187 return NULL_TREE;
9188
9189 STRIP_NOPS (arg0);
9190
9191 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9192 return NULL_TREE;
9193
9194 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9195
9196 /* Proceed if a valid pointer type was passed in. */
9197 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9198 {
9199 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9200 tree frac, exp;
9201
9202 switch (value->cl)
9203 {
9204 case rvc_zero:
9205 /* For +-0, return (*exp = 0, +-0). */
9206 exp = integer_zero_node;
9207 frac = arg0;
9208 break;
9209 case rvc_nan:
9210 case rvc_inf:
9211 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9212 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9213 case rvc_normal:
9214 {
9215 /* Since the frexp function always expects base 2, and in
9216 GCC normalized significands are already in the range
9217 [0.5, 1.0), we have exactly what frexp wants. */
9218 REAL_VALUE_TYPE frac_rvt = *value;
9219 SET_REAL_EXP (&frac_rvt, 0);
9220 frac = build_real (rettype, frac_rvt);
9221 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9222 }
9223 break;
9224 default:
9225 gcc_unreachable ();
9226 }
9227
9228 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9229 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9230 TREE_SIDE_EFFECTS (arg1) = 1;
9231 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9232 }
9233
9234 return NULL_TREE;
9235 }
9236
9237 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9238 then we can assume the base is two. If it's false, then we have to
9239 check the mode of the TYPE parameter in certain cases. */
9240
9241 static tree
9242 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9243 tree type, bool ldexp)
9244 {
9245 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9246 {
9247 STRIP_NOPS (arg0);
9248 STRIP_NOPS (arg1);
9249
9250 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9251 if (real_zerop (arg0) || integer_zerop (arg1)
9252 || (TREE_CODE (arg0) == REAL_CST
9253 && !real_isfinite (&TREE_REAL_CST (arg0))))
9254 return omit_one_operand_loc (loc, type, arg0, arg1);
9255
9256 /* If both arguments are constant, then try to evaluate it. */
9257 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9258 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9259 && tree_fits_shwi_p (arg1))
9260 {
9261 /* Bound the maximum adjustment to twice the range of the
9262 mode's valid exponents. Use abs to ensure the range is
9263 positive as a sanity check. */
9264 const long max_exp_adj = 2 *
9265 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9266 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9267
9268 /* Get the user-requested adjustment. */
9269 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9270
9271 /* The requested adjustment must be inside this range. This
9272 is a preliminary cap to avoid things like overflow, we
9273 may still fail to compute the result for other reasons. */
9274 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9275 {
9276 REAL_VALUE_TYPE initial_result;
9277
9278 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9279
9280 /* Ensure we didn't overflow. */
9281 if (! real_isinf (&initial_result))
9282 {
9283 const REAL_VALUE_TYPE trunc_result
9284 = real_value_truncate (TYPE_MODE (type), initial_result);
9285
9286 /* Only proceed if the target mode can hold the
9287 resulting value. */
9288 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9289 return build_real (type, trunc_result);
9290 }
9291 }
9292 }
9293 }
9294
9295 return NULL_TREE;
9296 }
9297
9298 /* Fold a call to builtin modf. */
9299
9300 static tree
9301 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9302 {
9303 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9304 return NULL_TREE;
9305
9306 STRIP_NOPS (arg0);
9307
9308 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9309 return NULL_TREE;
9310
9311 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9312
9313 /* Proceed if a valid pointer type was passed in. */
9314 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9315 {
9316 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9317 REAL_VALUE_TYPE trunc, frac;
9318
9319 switch (value->cl)
9320 {
9321 case rvc_nan:
9322 case rvc_zero:
9323 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9324 trunc = frac = *value;
9325 break;
9326 case rvc_inf:
9327 /* For +-Inf, return (*arg1 = arg0, +-0). */
9328 frac = dconst0;
9329 frac.sign = value->sign;
9330 trunc = *value;
9331 break;
9332 case rvc_normal:
9333 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9334 real_trunc (&trunc, VOIDmode, value);
9335 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9336 /* If the original number was negative and already
9337 integral, then the fractional part is -0.0. */
9338 if (value->sign && frac.cl == rvc_zero)
9339 frac.sign = value->sign;
9340 break;
9341 }
9342
9343 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9344 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9345 build_real (rettype, trunc));
9346 TREE_SIDE_EFFECTS (arg1) = 1;
9347 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9348 build_real (rettype, frac));
9349 }
9350
9351 return NULL_TREE;
9352 }
9353
9354 /* Given a location LOC, an interclass builtin function decl FNDECL
9355 and its single argument ARG, return an folded expression computing
9356 the same, or NULL_TREE if we either couldn't or didn't want to fold
9357 (the latter happen if there's an RTL instruction available). */
9358
9359 static tree
9360 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9361 {
9362 machine_mode mode;
9363
9364 if (!validate_arg (arg, REAL_TYPE))
9365 return NULL_TREE;
9366
9367 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9368 return NULL_TREE;
9369
9370 mode = TYPE_MODE (TREE_TYPE (arg));
9371
9372 /* If there is no optab, try generic code. */
9373 switch (DECL_FUNCTION_CODE (fndecl))
9374 {
9375 tree result;
9376
9377 CASE_FLT_FN (BUILT_IN_ISINF):
9378 {
9379 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9380 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9381 tree const type = TREE_TYPE (arg);
9382 REAL_VALUE_TYPE r;
9383 char buf[128];
9384
9385 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9386 real_from_string (&r, buf);
9387 result = build_call_expr (isgr_fn, 2,
9388 fold_build1_loc (loc, ABS_EXPR, type, arg),
9389 build_real (type, r));
9390 return result;
9391 }
9392 CASE_FLT_FN (BUILT_IN_FINITE):
9393 case BUILT_IN_ISFINITE:
9394 {
9395 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9396 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9397 tree const type = TREE_TYPE (arg);
9398 REAL_VALUE_TYPE r;
9399 char buf[128];
9400
9401 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9402 real_from_string (&r, buf);
9403 result = build_call_expr (isle_fn, 2,
9404 fold_build1_loc (loc, ABS_EXPR, type, arg),
9405 build_real (type, r));
9406 /*result = fold_build2_loc (loc, UNGT_EXPR,
9407 TREE_TYPE (TREE_TYPE (fndecl)),
9408 fold_build1_loc (loc, ABS_EXPR, type, arg),
9409 build_real (type, r));
9410 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9411 TREE_TYPE (TREE_TYPE (fndecl)),
9412 result);*/
9413 return result;
9414 }
9415 case BUILT_IN_ISNORMAL:
9416 {
9417 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9418 islessequal(fabs(x),DBL_MAX). */
9419 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9420 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9421 tree const type = TREE_TYPE (arg);
9422 REAL_VALUE_TYPE rmax, rmin;
9423 char buf[128];
9424
9425 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9426 real_from_string (&rmax, buf);
9427 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9428 real_from_string (&rmin, buf);
9429 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9430 result = build_call_expr (isle_fn, 2, arg,
9431 build_real (type, rmax));
9432 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9433 build_call_expr (isge_fn, 2, arg,
9434 build_real (type, rmin)));
9435 return result;
9436 }
9437 default:
9438 break;
9439 }
9440
9441 return NULL_TREE;
9442 }
9443
9444 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9445 ARG is the argument for the call. */
9446
9447 static tree
9448 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9449 {
9450 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9451 REAL_VALUE_TYPE r;
9452
9453 if (!validate_arg (arg, REAL_TYPE))
9454 return NULL_TREE;
9455
9456 switch (builtin_index)
9457 {
9458 case BUILT_IN_ISINF:
9459 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9460 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9461
9462 if (TREE_CODE (arg) == REAL_CST)
9463 {
9464 r = TREE_REAL_CST (arg);
9465 if (real_isinf (&r))
9466 return real_compare (GT_EXPR, &r, &dconst0)
9467 ? integer_one_node : integer_minus_one_node;
9468 else
9469 return integer_zero_node;
9470 }
9471
9472 return NULL_TREE;
9473
9474 case BUILT_IN_ISINF_SIGN:
9475 {
9476 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9477 /* In a boolean context, GCC will fold the inner COND_EXPR to
9478 1. So e.g. "if (isinf_sign(x))" would be folded to just
9479 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9480 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9481 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9482 tree tmp = NULL_TREE;
9483
9484 arg = builtin_save_expr (arg);
9485
9486 if (signbit_fn && isinf_fn)
9487 {
9488 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9489 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9490
9491 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9492 signbit_call, integer_zero_node);
9493 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9494 isinf_call, integer_zero_node);
9495
9496 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9497 integer_minus_one_node, integer_one_node);
9498 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9499 isinf_call, tmp,
9500 integer_zero_node);
9501 }
9502
9503 return tmp;
9504 }
9505
9506 case BUILT_IN_ISFINITE:
9507 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9508 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9509 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9510
9511 if (TREE_CODE (arg) == REAL_CST)
9512 {
9513 r = TREE_REAL_CST (arg);
9514 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9515 }
9516
9517 return NULL_TREE;
9518
9519 case BUILT_IN_ISNAN:
9520 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9521 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9522
9523 if (TREE_CODE (arg) == REAL_CST)
9524 {
9525 r = TREE_REAL_CST (arg);
9526 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9527 }
9528
9529 arg = builtin_save_expr (arg);
9530 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9531
9532 default:
9533 gcc_unreachable ();
9534 }
9535 }
9536
9537 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9538 This builtin will generate code to return the appropriate floating
9539 point classification depending on the value of the floating point
9540 number passed in. The possible return values must be supplied as
9541 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9542 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9543 one floating point argument which is "type generic". */
9544
9545 static tree
9546 fold_builtin_fpclassify (location_t loc, tree exp)
9547 {
9548 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9549 arg, type, res, tmp;
9550 machine_mode mode;
9551 REAL_VALUE_TYPE r;
9552 char buf[128];
9553
9554 /* Verify the required arguments in the original call. */
9555 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9556 INTEGER_TYPE, INTEGER_TYPE,
9557 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9558 return NULL_TREE;
9559
9560 fp_nan = CALL_EXPR_ARG (exp, 0);
9561 fp_infinite = CALL_EXPR_ARG (exp, 1);
9562 fp_normal = CALL_EXPR_ARG (exp, 2);
9563 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9564 fp_zero = CALL_EXPR_ARG (exp, 4);
9565 arg = CALL_EXPR_ARG (exp, 5);
9566 type = TREE_TYPE (arg);
9567 mode = TYPE_MODE (type);
9568 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9569
9570 /* fpclassify(x) ->
9571 isnan(x) ? FP_NAN :
9572 (fabs(x) == Inf ? FP_INFINITE :
9573 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9574 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9575
9576 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9577 build_real (type, dconst0));
9578 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9579 tmp, fp_zero, fp_subnormal);
9580
9581 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9582 real_from_string (&r, buf);
9583 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9584 arg, build_real (type, r));
9585 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9586
9587 if (HONOR_INFINITIES (mode))
9588 {
9589 real_inf (&r);
9590 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9591 build_real (type, r));
9592 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9593 fp_infinite, res);
9594 }
9595
9596 if (HONOR_NANS (mode))
9597 {
9598 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9599 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9600 }
9601
9602 return res;
9603 }
9604
9605 /* Fold a call to an unordered comparison function such as
9606 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9607 being called and ARG0 and ARG1 are the arguments for the call.
9608 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9609 the opposite of the desired result. UNORDERED_CODE is used
9610 for modes that can hold NaNs and ORDERED_CODE is used for
9611 the rest. */
9612
9613 static tree
9614 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9615 enum tree_code unordered_code,
9616 enum tree_code ordered_code)
9617 {
9618 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9619 enum tree_code code;
9620 tree type0, type1;
9621 enum tree_code code0, code1;
9622 tree cmp_type = NULL_TREE;
9623
9624 type0 = TREE_TYPE (arg0);
9625 type1 = TREE_TYPE (arg1);
9626
9627 code0 = TREE_CODE (type0);
9628 code1 = TREE_CODE (type1);
9629
9630 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9631 /* Choose the wider of two real types. */
9632 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9633 ? type0 : type1;
9634 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9635 cmp_type = type0;
9636 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9637 cmp_type = type1;
9638
9639 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9640 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9641
9642 if (unordered_code == UNORDERED_EXPR)
9643 {
9644 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9645 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9646 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9647 }
9648
9649 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9650 : ordered_code;
9651 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9652 fold_build2_loc (loc, code, type, arg0, arg1));
9653 }
9654
9655 /* Fold a call to built-in function FNDECL with 0 arguments.
9656 IGNORE is true if the result of the function call is ignored. This
9657 function returns NULL_TREE if no simplification was possible. */
9658
9659 static tree
9660 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9661 {
9662 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9663 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9664 switch (fcode)
9665 {
9666 CASE_FLT_FN (BUILT_IN_INF):
9667 case BUILT_IN_INFD32:
9668 case BUILT_IN_INFD64:
9669 case BUILT_IN_INFD128:
9670 return fold_builtin_inf (loc, type, true);
9671
9672 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9673 return fold_builtin_inf (loc, type, false);
9674
9675 case BUILT_IN_CLASSIFY_TYPE:
9676 return fold_builtin_classify_type (NULL_TREE);
9677
9678 case BUILT_IN_UNREACHABLE:
9679 if (flag_sanitize & SANITIZE_UNREACHABLE
9680 && (current_function_decl == NULL
9681 || !lookup_attribute ("no_sanitize_undefined",
9682 DECL_ATTRIBUTES (current_function_decl))))
9683 return ubsan_instrument_unreachable (loc);
9684 break;
9685
9686 default:
9687 break;
9688 }
9689 return NULL_TREE;
9690 }
9691
9692 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9693 IGNORE is true if the result of the function call is ignored. This
9694 function returns NULL_TREE if no simplification was possible. */
9695
9696 static tree
9697 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9698 {
9699 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9700 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9701 switch (fcode)
9702 {
9703 case BUILT_IN_CONSTANT_P:
9704 {
9705 tree val = fold_builtin_constant_p (arg0);
9706
9707 /* Gimplification will pull the CALL_EXPR for the builtin out of
9708 an if condition. When not optimizing, we'll not CSE it back.
9709 To avoid link error types of regressions, return false now. */
9710 if (!val && !optimize)
9711 val = integer_zero_node;
9712
9713 return val;
9714 }
9715
9716 case BUILT_IN_CLASSIFY_TYPE:
9717 return fold_builtin_classify_type (arg0);
9718
9719 case BUILT_IN_STRLEN:
9720 return fold_builtin_strlen (loc, type, arg0);
9721
9722 CASE_FLT_FN (BUILT_IN_FABS):
9723 case BUILT_IN_FABSD32:
9724 case BUILT_IN_FABSD64:
9725 case BUILT_IN_FABSD128:
9726 return fold_builtin_fabs (loc, arg0, type);
9727
9728 case BUILT_IN_ABS:
9729 case BUILT_IN_LABS:
9730 case BUILT_IN_LLABS:
9731 case BUILT_IN_IMAXABS:
9732 return fold_builtin_abs (loc, arg0, type);
9733
9734 CASE_FLT_FN (BUILT_IN_CONJ):
9735 if (validate_arg (arg0, COMPLEX_TYPE)
9736 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9737 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9738 break;
9739
9740 CASE_FLT_FN (BUILT_IN_CREAL):
9741 if (validate_arg (arg0, COMPLEX_TYPE)
9742 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9743 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9744 break;
9745
9746 CASE_FLT_FN (BUILT_IN_CIMAG):
9747 if (validate_arg (arg0, COMPLEX_TYPE)
9748 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9749 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9750 break;
9751
9752 CASE_FLT_FN (BUILT_IN_CCOS):
9753 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
9754
9755 CASE_FLT_FN (BUILT_IN_CCOSH):
9756 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
9757
9758 CASE_FLT_FN (BUILT_IN_CPROJ):
9759 return fold_builtin_cproj (loc, arg0, type);
9760
9761 CASE_FLT_FN (BUILT_IN_CSIN):
9762 if (validate_arg (arg0, COMPLEX_TYPE)
9763 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9764 return do_mpc_arg1 (arg0, type, mpc_sin);
9765 break;
9766
9767 CASE_FLT_FN (BUILT_IN_CSINH):
9768 if (validate_arg (arg0, COMPLEX_TYPE)
9769 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9770 return do_mpc_arg1 (arg0, type, mpc_sinh);
9771 break;
9772
9773 CASE_FLT_FN (BUILT_IN_CTAN):
9774 if (validate_arg (arg0, COMPLEX_TYPE)
9775 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9776 return do_mpc_arg1 (arg0, type, mpc_tan);
9777 break;
9778
9779 CASE_FLT_FN (BUILT_IN_CTANH):
9780 if (validate_arg (arg0, COMPLEX_TYPE)
9781 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9782 return do_mpc_arg1 (arg0, type, mpc_tanh);
9783 break;
9784
9785 CASE_FLT_FN (BUILT_IN_CLOG):
9786 if (validate_arg (arg0, COMPLEX_TYPE)
9787 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9788 return do_mpc_arg1 (arg0, type, mpc_log);
9789 break;
9790
9791 CASE_FLT_FN (BUILT_IN_CSQRT):
9792 if (validate_arg (arg0, COMPLEX_TYPE)
9793 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9794 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9795 break;
9796
9797 CASE_FLT_FN (BUILT_IN_CASIN):
9798 if (validate_arg (arg0, COMPLEX_TYPE)
9799 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9800 return do_mpc_arg1 (arg0, type, mpc_asin);
9801 break;
9802
9803 CASE_FLT_FN (BUILT_IN_CACOS):
9804 if (validate_arg (arg0, COMPLEX_TYPE)
9805 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9806 return do_mpc_arg1 (arg0, type, mpc_acos);
9807 break;
9808
9809 CASE_FLT_FN (BUILT_IN_CATAN):
9810 if (validate_arg (arg0, COMPLEX_TYPE)
9811 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9812 return do_mpc_arg1 (arg0, type, mpc_atan);
9813 break;
9814
9815 CASE_FLT_FN (BUILT_IN_CASINH):
9816 if (validate_arg (arg0, COMPLEX_TYPE)
9817 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9818 return do_mpc_arg1 (arg0, type, mpc_asinh);
9819 break;
9820
9821 CASE_FLT_FN (BUILT_IN_CACOSH):
9822 if (validate_arg (arg0, COMPLEX_TYPE)
9823 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9824 return do_mpc_arg1 (arg0, type, mpc_acosh);
9825 break;
9826
9827 CASE_FLT_FN (BUILT_IN_CATANH):
9828 if (validate_arg (arg0, COMPLEX_TYPE)
9829 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9830 return do_mpc_arg1 (arg0, type, mpc_atanh);
9831 break;
9832
9833 CASE_FLT_FN (BUILT_IN_CABS):
9834 return fold_builtin_cabs (loc, arg0, type, fndecl);
9835
9836 CASE_FLT_FN (BUILT_IN_CARG):
9837 return fold_builtin_carg (loc, arg0, type);
9838
9839 CASE_FLT_FN (BUILT_IN_SQRT):
9840 return fold_builtin_sqrt (loc, arg0, type);
9841
9842 CASE_FLT_FN (BUILT_IN_CBRT):
9843 return fold_builtin_cbrt (loc, arg0, type);
9844
9845 CASE_FLT_FN (BUILT_IN_ASIN):
9846 if (validate_arg (arg0, REAL_TYPE))
9847 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9848 &dconstm1, &dconst1, true);
9849 break;
9850
9851 CASE_FLT_FN (BUILT_IN_ACOS):
9852 if (validate_arg (arg0, REAL_TYPE))
9853 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9854 &dconstm1, &dconst1, true);
9855 break;
9856
9857 CASE_FLT_FN (BUILT_IN_ATAN):
9858 if (validate_arg (arg0, REAL_TYPE))
9859 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9860 break;
9861
9862 CASE_FLT_FN (BUILT_IN_ASINH):
9863 if (validate_arg (arg0, REAL_TYPE))
9864 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9865 break;
9866
9867 CASE_FLT_FN (BUILT_IN_ACOSH):
9868 if (validate_arg (arg0, REAL_TYPE))
9869 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9870 &dconst1, NULL, true);
9871 break;
9872
9873 CASE_FLT_FN (BUILT_IN_ATANH):
9874 if (validate_arg (arg0, REAL_TYPE))
9875 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9876 &dconstm1, &dconst1, false);
9877 break;
9878
9879 CASE_FLT_FN (BUILT_IN_SIN):
9880 if (validate_arg (arg0, REAL_TYPE))
9881 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9882 break;
9883
9884 CASE_FLT_FN (BUILT_IN_COS):
9885 return fold_builtin_cos (loc, arg0, type, fndecl);
9886
9887 CASE_FLT_FN (BUILT_IN_TAN):
9888 return fold_builtin_tan (arg0, type);
9889
9890 CASE_FLT_FN (BUILT_IN_CEXP):
9891 return fold_builtin_cexp (loc, arg0, type);
9892
9893 CASE_FLT_FN (BUILT_IN_CEXPI):
9894 if (validate_arg (arg0, REAL_TYPE))
9895 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9896 break;
9897
9898 CASE_FLT_FN (BUILT_IN_SINH):
9899 if (validate_arg (arg0, REAL_TYPE))
9900 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9901 break;
9902
9903 CASE_FLT_FN (BUILT_IN_COSH):
9904 return fold_builtin_cosh (loc, arg0, type, fndecl);
9905
9906 CASE_FLT_FN (BUILT_IN_TANH):
9907 if (validate_arg (arg0, REAL_TYPE))
9908 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9909 break;
9910
9911 CASE_FLT_FN (BUILT_IN_ERF):
9912 if (validate_arg (arg0, REAL_TYPE))
9913 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9914 break;
9915
9916 CASE_FLT_FN (BUILT_IN_ERFC):
9917 if (validate_arg (arg0, REAL_TYPE))
9918 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9919 break;
9920
9921 CASE_FLT_FN (BUILT_IN_TGAMMA):
9922 if (validate_arg (arg0, REAL_TYPE))
9923 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9924 break;
9925
9926 CASE_FLT_FN (BUILT_IN_EXP):
9927 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9928
9929 CASE_FLT_FN (BUILT_IN_EXP2):
9930 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9931
9932 CASE_FLT_FN (BUILT_IN_EXP10):
9933 CASE_FLT_FN (BUILT_IN_POW10):
9934 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9935
9936 CASE_FLT_FN (BUILT_IN_EXPM1):
9937 if (validate_arg (arg0, REAL_TYPE))
9938 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9939 break;
9940
9941 CASE_FLT_FN (BUILT_IN_LOG):
9942 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
9943
9944 CASE_FLT_FN (BUILT_IN_LOG2):
9945 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
9946
9947 CASE_FLT_FN (BUILT_IN_LOG10):
9948 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
9949
9950 CASE_FLT_FN (BUILT_IN_LOG1P):
9951 if (validate_arg (arg0, REAL_TYPE))
9952 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9953 &dconstm1, NULL, false);
9954 break;
9955
9956 CASE_FLT_FN (BUILT_IN_J0):
9957 if (validate_arg (arg0, REAL_TYPE))
9958 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9959 NULL, NULL, 0);
9960 break;
9961
9962 CASE_FLT_FN (BUILT_IN_J1):
9963 if (validate_arg (arg0, REAL_TYPE))
9964 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9965 NULL, NULL, 0);
9966 break;
9967
9968 CASE_FLT_FN (BUILT_IN_Y0):
9969 if (validate_arg (arg0, REAL_TYPE))
9970 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9971 &dconst0, NULL, false);
9972 break;
9973
9974 CASE_FLT_FN (BUILT_IN_Y1):
9975 if (validate_arg (arg0, REAL_TYPE))
9976 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9977 &dconst0, NULL, false);
9978 break;
9979
9980 CASE_FLT_FN (BUILT_IN_NAN):
9981 case BUILT_IN_NAND32:
9982 case BUILT_IN_NAND64:
9983 case BUILT_IN_NAND128:
9984 return fold_builtin_nan (arg0, type, true);
9985
9986 CASE_FLT_FN (BUILT_IN_NANS):
9987 return fold_builtin_nan (arg0, type, false);
9988
9989 CASE_FLT_FN (BUILT_IN_FLOOR):
9990 return fold_builtin_floor (loc, fndecl, arg0);
9991
9992 CASE_FLT_FN (BUILT_IN_CEIL):
9993 return fold_builtin_ceil (loc, fndecl, arg0);
9994
9995 CASE_FLT_FN (BUILT_IN_TRUNC):
9996 return fold_builtin_trunc (loc, fndecl, arg0);
9997
9998 CASE_FLT_FN (BUILT_IN_ROUND):
9999 return fold_builtin_round (loc, fndecl, arg0);
10000
10001 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10002 CASE_FLT_FN (BUILT_IN_RINT):
10003 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10004
10005 CASE_FLT_FN (BUILT_IN_ICEIL):
10006 CASE_FLT_FN (BUILT_IN_LCEIL):
10007 CASE_FLT_FN (BUILT_IN_LLCEIL):
10008 CASE_FLT_FN (BUILT_IN_LFLOOR):
10009 CASE_FLT_FN (BUILT_IN_IFLOOR):
10010 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10011 CASE_FLT_FN (BUILT_IN_IROUND):
10012 CASE_FLT_FN (BUILT_IN_LROUND):
10013 CASE_FLT_FN (BUILT_IN_LLROUND):
10014 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10015
10016 CASE_FLT_FN (BUILT_IN_IRINT):
10017 CASE_FLT_FN (BUILT_IN_LRINT):
10018 CASE_FLT_FN (BUILT_IN_LLRINT):
10019 return fold_fixed_mathfn (loc, fndecl, arg0);
10020
10021 case BUILT_IN_BSWAP16:
10022 case BUILT_IN_BSWAP32:
10023 case BUILT_IN_BSWAP64:
10024 return fold_builtin_bswap (fndecl, arg0);
10025
10026 CASE_INT_FN (BUILT_IN_FFS):
10027 CASE_INT_FN (BUILT_IN_CLZ):
10028 CASE_INT_FN (BUILT_IN_CTZ):
10029 CASE_INT_FN (BUILT_IN_CLRSB):
10030 CASE_INT_FN (BUILT_IN_POPCOUNT):
10031 CASE_INT_FN (BUILT_IN_PARITY):
10032 return fold_builtin_bitop (fndecl, arg0);
10033
10034 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10035 return fold_builtin_signbit (loc, arg0, type);
10036
10037 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10038 return fold_builtin_significand (loc, arg0, type);
10039
10040 CASE_FLT_FN (BUILT_IN_ILOGB):
10041 CASE_FLT_FN (BUILT_IN_LOGB):
10042 return fold_builtin_logb (loc, arg0, type);
10043
10044 case BUILT_IN_ISASCII:
10045 return fold_builtin_isascii (loc, arg0);
10046
10047 case BUILT_IN_TOASCII:
10048 return fold_builtin_toascii (loc, arg0);
10049
10050 case BUILT_IN_ISDIGIT:
10051 return fold_builtin_isdigit (loc, arg0);
10052
10053 CASE_FLT_FN (BUILT_IN_FINITE):
10054 case BUILT_IN_FINITED32:
10055 case BUILT_IN_FINITED64:
10056 case BUILT_IN_FINITED128:
10057 case BUILT_IN_ISFINITE:
10058 {
10059 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10060 if (ret)
10061 return ret;
10062 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10063 }
10064
10065 CASE_FLT_FN (BUILT_IN_ISINF):
10066 case BUILT_IN_ISINFD32:
10067 case BUILT_IN_ISINFD64:
10068 case BUILT_IN_ISINFD128:
10069 {
10070 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10071 if (ret)
10072 return ret;
10073 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10074 }
10075
10076 case BUILT_IN_ISNORMAL:
10077 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10078
10079 case BUILT_IN_ISINF_SIGN:
10080 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10081
10082 CASE_FLT_FN (BUILT_IN_ISNAN):
10083 case BUILT_IN_ISNAND32:
10084 case BUILT_IN_ISNAND64:
10085 case BUILT_IN_ISNAND128:
10086 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10087
10088 case BUILT_IN_PRINTF:
10089 case BUILT_IN_PRINTF_UNLOCKED:
10090 case BUILT_IN_VPRINTF:
10091 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10092
10093 case BUILT_IN_FREE:
10094 if (integer_zerop (arg0))
10095 return build_empty_stmt (loc);
10096 break;
10097
10098 default:
10099 break;
10100 }
10101
10102 return NULL_TREE;
10103
10104 }
10105
10106 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10107 IGNORE is true if the result of the function call is ignored. This
10108 function returns NULL_TREE if no simplification was possible. */
10109
10110 static tree
10111 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10112 {
10113 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10114 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10115
10116 switch (fcode)
10117 {
10118 CASE_FLT_FN (BUILT_IN_JN):
10119 if (validate_arg (arg0, INTEGER_TYPE)
10120 && validate_arg (arg1, REAL_TYPE))
10121 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10122 break;
10123
10124 CASE_FLT_FN (BUILT_IN_YN):
10125 if (validate_arg (arg0, INTEGER_TYPE)
10126 && validate_arg (arg1, REAL_TYPE))
10127 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10128 &dconst0, false);
10129 break;
10130
10131 CASE_FLT_FN (BUILT_IN_DREM):
10132 CASE_FLT_FN (BUILT_IN_REMAINDER):
10133 if (validate_arg (arg0, REAL_TYPE)
10134 && validate_arg (arg1, REAL_TYPE))
10135 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10136 break;
10137
10138 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10139 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10140 if (validate_arg (arg0, REAL_TYPE)
10141 && validate_arg (arg1, POINTER_TYPE))
10142 return do_mpfr_lgamma_r (arg0, arg1, type);
10143 break;
10144
10145 CASE_FLT_FN (BUILT_IN_ATAN2):
10146 if (validate_arg (arg0, REAL_TYPE)
10147 && validate_arg (arg1, REAL_TYPE))
10148 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10149 break;
10150
10151 CASE_FLT_FN (BUILT_IN_FDIM):
10152 if (validate_arg (arg0, REAL_TYPE)
10153 && validate_arg (arg1, REAL_TYPE))
10154 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10155 break;
10156
10157 CASE_FLT_FN (BUILT_IN_HYPOT):
10158 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10159
10160 CASE_FLT_FN (BUILT_IN_CPOW):
10161 if (validate_arg (arg0, COMPLEX_TYPE)
10162 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10163 && validate_arg (arg1, COMPLEX_TYPE)
10164 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10165 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10166 break;
10167
10168 CASE_FLT_FN (BUILT_IN_LDEXP):
10169 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10170 CASE_FLT_FN (BUILT_IN_SCALBN):
10171 CASE_FLT_FN (BUILT_IN_SCALBLN):
10172 return fold_builtin_load_exponent (loc, arg0, arg1,
10173 type, /*ldexp=*/false);
10174
10175 CASE_FLT_FN (BUILT_IN_FREXP):
10176 return fold_builtin_frexp (loc, arg0, arg1, type);
10177
10178 CASE_FLT_FN (BUILT_IN_MODF):
10179 return fold_builtin_modf (loc, arg0, arg1, type);
10180
10181 case BUILT_IN_STRSTR:
10182 return fold_builtin_strstr (loc, arg0, arg1, type);
10183
10184 case BUILT_IN_STRSPN:
10185 return fold_builtin_strspn (loc, arg0, arg1);
10186
10187 case BUILT_IN_STRCSPN:
10188 return fold_builtin_strcspn (loc, arg0, arg1);
10189
10190 case BUILT_IN_STRCHR:
10191 case BUILT_IN_INDEX:
10192 return fold_builtin_strchr (loc, arg0, arg1, type);
10193
10194 case BUILT_IN_STRRCHR:
10195 case BUILT_IN_RINDEX:
10196 return fold_builtin_strrchr (loc, arg0, arg1, type);
10197
10198 case BUILT_IN_STPCPY:
10199 if (ignore)
10200 {
10201 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10202 if (!fn)
10203 break;
10204
10205 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10206 }
10207 else
10208 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10209 break;
10210
10211 case BUILT_IN_STRCMP:
10212 return fold_builtin_strcmp (loc, arg0, arg1);
10213
10214 case BUILT_IN_STRPBRK:
10215 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10216
10217 case BUILT_IN_EXPECT:
10218 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10219
10220 CASE_FLT_FN (BUILT_IN_POW):
10221 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10222
10223 CASE_FLT_FN (BUILT_IN_POWI):
10224 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10225
10226 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10227 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10228
10229 CASE_FLT_FN (BUILT_IN_FMIN):
10230 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10231
10232 CASE_FLT_FN (BUILT_IN_FMAX):
10233 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10234
10235 case BUILT_IN_ISGREATER:
10236 return fold_builtin_unordered_cmp (loc, fndecl,
10237 arg0, arg1, UNLE_EXPR, LE_EXPR);
10238 case BUILT_IN_ISGREATEREQUAL:
10239 return fold_builtin_unordered_cmp (loc, fndecl,
10240 arg0, arg1, UNLT_EXPR, LT_EXPR);
10241 case BUILT_IN_ISLESS:
10242 return fold_builtin_unordered_cmp (loc, fndecl,
10243 arg0, arg1, UNGE_EXPR, GE_EXPR);
10244 case BUILT_IN_ISLESSEQUAL:
10245 return fold_builtin_unordered_cmp (loc, fndecl,
10246 arg0, arg1, UNGT_EXPR, GT_EXPR);
10247 case BUILT_IN_ISLESSGREATER:
10248 return fold_builtin_unordered_cmp (loc, fndecl,
10249 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10250 case BUILT_IN_ISUNORDERED:
10251 return fold_builtin_unordered_cmp (loc, fndecl,
10252 arg0, arg1, UNORDERED_EXPR,
10253 NOP_EXPR);
10254
10255 /* We do the folding for va_start in the expander. */
10256 case BUILT_IN_VA_START:
10257 break;
10258
10259 case BUILT_IN_OBJECT_SIZE:
10260 return fold_builtin_object_size (arg0, arg1);
10261
10262 case BUILT_IN_PRINTF:
10263 case BUILT_IN_PRINTF_UNLOCKED:
10264 case BUILT_IN_VPRINTF:
10265 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10266
10267 case BUILT_IN_PRINTF_CHK:
10268 case BUILT_IN_VPRINTF_CHK:
10269 if (!validate_arg (arg0, INTEGER_TYPE)
10270 || TREE_SIDE_EFFECTS (arg0))
10271 return NULL_TREE;
10272 else
10273 return fold_builtin_printf (loc, fndecl,
10274 arg1, NULL_TREE, ignore, fcode);
10275 break;
10276
10277 case BUILT_IN_FPRINTF:
10278 case BUILT_IN_FPRINTF_UNLOCKED:
10279 case BUILT_IN_VFPRINTF:
10280 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10281 ignore, fcode);
10282
10283 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10284 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10285
10286 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10287 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10288
10289 default:
10290 break;
10291 }
10292 return NULL_TREE;
10293 }
10294
10295 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10296 and ARG2. IGNORE is true if the result of the function call is ignored.
10297 This function returns NULL_TREE if no simplification was possible. */
10298
10299 static tree
10300 fold_builtin_3 (location_t loc, tree fndecl,
10301 tree arg0, tree arg1, tree arg2, bool ignore)
10302 {
10303 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10304 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10305 switch (fcode)
10306 {
10307
10308 CASE_FLT_FN (BUILT_IN_SINCOS):
10309 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10310
10311 CASE_FLT_FN (BUILT_IN_FMA):
10312 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10313 break;
10314
10315 CASE_FLT_FN (BUILT_IN_REMQUO):
10316 if (validate_arg (arg0, REAL_TYPE)
10317 && validate_arg (arg1, REAL_TYPE)
10318 && validate_arg (arg2, POINTER_TYPE))
10319 return do_mpfr_remquo (arg0, arg1, arg2);
10320 break;
10321
10322 case BUILT_IN_STRNCAT:
10323 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10324
10325 case BUILT_IN_STRNCMP:
10326 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10327
10328 case BUILT_IN_MEMCHR:
10329 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10330
10331 case BUILT_IN_BCMP:
10332 case BUILT_IN_MEMCMP:
10333 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10334
10335 case BUILT_IN_PRINTF_CHK:
10336 case BUILT_IN_VPRINTF_CHK:
10337 if (!validate_arg (arg0, INTEGER_TYPE)
10338 || TREE_SIDE_EFFECTS (arg0))
10339 return NULL_TREE;
10340 else
10341 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10342 break;
10343
10344 case BUILT_IN_FPRINTF:
10345 case BUILT_IN_FPRINTF_UNLOCKED:
10346 case BUILT_IN_VFPRINTF:
10347 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10348 ignore, fcode);
10349
10350 case BUILT_IN_FPRINTF_CHK:
10351 case BUILT_IN_VFPRINTF_CHK:
10352 if (!validate_arg (arg1, INTEGER_TYPE)
10353 || TREE_SIDE_EFFECTS (arg1))
10354 return NULL_TREE;
10355 else
10356 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10357 ignore, fcode);
10358
10359 case BUILT_IN_EXPECT:
10360 return fold_builtin_expect (loc, arg0, arg1, arg2);
10361
10362 default:
10363 break;
10364 }
10365 return NULL_TREE;
10366 }
10367
10368 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10369 ARG2, and ARG3. IGNORE is true if the result of the function call is
10370 ignored. This function returns NULL_TREE if no simplification was
10371 possible. */
10372
10373 static tree
10374 fold_builtin_4 (location_t loc, tree fndecl,
10375 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10376 {
10377 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10378
10379 switch (fcode)
10380 {
10381 case BUILT_IN_FPRINTF_CHK:
10382 case BUILT_IN_VFPRINTF_CHK:
10383 if (!validate_arg (arg1, INTEGER_TYPE)
10384 || TREE_SIDE_EFFECTS (arg1))
10385 return NULL_TREE;
10386 else
10387 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10388 ignore, fcode);
10389 break;
10390
10391 default:
10392 break;
10393 }
10394 return NULL_TREE;
10395 }
10396
10397 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10398 arguments, where NARGS <= 4. IGNORE is true if the result of the
10399 function call is ignored. This function returns NULL_TREE if no
10400 simplification was possible. Note that this only folds builtins with
10401 fixed argument patterns. Foldings that do varargs-to-varargs
10402 transformations, or that match calls with more than 4 arguments,
10403 need to be handled with fold_builtin_varargs instead. */
10404
10405 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10406
10407 tree
10408 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10409 {
10410 tree ret = NULL_TREE;
10411
10412 switch (nargs)
10413 {
10414 case 0:
10415 ret = fold_builtin_0 (loc, fndecl, ignore);
10416 break;
10417 case 1:
10418 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10419 break;
10420 case 2:
10421 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10422 break;
10423 case 3:
10424 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10425 break;
10426 case 4:
10427 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10428 ignore);
10429 break;
10430 default:
10431 break;
10432 }
10433 if (ret)
10434 {
10435 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10436 SET_EXPR_LOCATION (ret, loc);
10437 TREE_NO_WARNING (ret) = 1;
10438 return ret;
10439 }
10440 return NULL_TREE;
10441 }
10442
10443 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10444 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10445 of arguments in ARGS to be omitted. OLDNARGS is the number of
10446 elements in ARGS. */
10447
10448 static tree
10449 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10450 int skip, tree fndecl, int n, va_list newargs)
10451 {
10452 int nargs = oldnargs - skip + n;
10453 tree *buffer;
10454
10455 if (n > 0)
10456 {
10457 int i, j;
10458
10459 buffer = XALLOCAVEC (tree, nargs);
10460 for (i = 0; i < n; i++)
10461 buffer[i] = va_arg (newargs, tree);
10462 for (j = skip; j < oldnargs; j++, i++)
10463 buffer[i] = args[j];
10464 }
10465 else
10466 buffer = args + skip;
10467
10468 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10469 }
10470
10471 /* Return true if FNDECL shouldn't be folded right now.
10472 If a built-in function has an inline attribute always_inline
10473 wrapper, defer folding it after always_inline functions have
10474 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10475 might not be performed. */
10476
10477 bool
10478 avoid_folding_inline_builtin (tree fndecl)
10479 {
10480 return (DECL_DECLARED_INLINE_P (fndecl)
10481 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10482 && cfun
10483 && !cfun->always_inline_functions_inlined
10484 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10485 }
10486
10487 /* A wrapper function for builtin folding that prevents warnings for
10488 "statement without effect" and the like, caused by removing the
10489 call node earlier than the warning is generated. */
10490
10491 tree
10492 fold_call_expr (location_t loc, tree exp, bool ignore)
10493 {
10494 tree ret = NULL_TREE;
10495 tree fndecl = get_callee_fndecl (exp);
10496 if (fndecl
10497 && TREE_CODE (fndecl) == FUNCTION_DECL
10498 && DECL_BUILT_IN (fndecl)
10499 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10500 yet. Defer folding until we see all the arguments
10501 (after inlining). */
10502 && !CALL_EXPR_VA_ARG_PACK (exp))
10503 {
10504 int nargs = call_expr_nargs (exp);
10505
10506 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10507 instead last argument is __builtin_va_arg_pack (). Defer folding
10508 even in that case, until arguments are finalized. */
10509 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10510 {
10511 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10512 if (fndecl2
10513 && TREE_CODE (fndecl2) == FUNCTION_DECL
10514 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10515 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10516 return NULL_TREE;
10517 }
10518
10519 if (avoid_folding_inline_builtin (fndecl))
10520 return NULL_TREE;
10521
10522 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10523 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10524 CALL_EXPR_ARGP (exp), ignore);
10525 else
10526 {
10527 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10528 {
10529 tree *args = CALL_EXPR_ARGP (exp);
10530 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10531 }
10532 if (!ret)
10533 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10534 if (ret)
10535 return ret;
10536 }
10537 }
10538 return NULL_TREE;
10539 }
10540
10541 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10542 N arguments are passed in the array ARGARRAY. */
10543
10544 tree
10545 fold_builtin_call_array (location_t loc, tree type,
10546 tree fn,
10547 int n,
10548 tree *argarray)
10549 {
10550 tree ret = NULL_TREE;
10551 tree exp;
10552
10553 if (TREE_CODE (fn) == ADDR_EXPR)
10554 {
10555 tree fndecl = TREE_OPERAND (fn, 0);
10556 if (TREE_CODE (fndecl) == FUNCTION_DECL
10557 && DECL_BUILT_IN (fndecl))
10558 {
10559 /* If last argument is __builtin_va_arg_pack (), arguments to this
10560 function are not finalized yet. Defer folding until they are. */
10561 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10562 {
10563 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10564 if (fndecl2
10565 && TREE_CODE (fndecl2) == FUNCTION_DECL
10566 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10567 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10568 return build_call_array_loc (loc, type, fn, n, argarray);
10569 }
10570 if (avoid_folding_inline_builtin (fndecl))
10571 return build_call_array_loc (loc, type, fn, n, argarray);
10572 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10573 {
10574 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10575 if (ret)
10576 return ret;
10577
10578 return build_call_array_loc (loc, type, fn, n, argarray);
10579 }
10580 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10581 {
10582 /* First try the transformations that don't require consing up
10583 an exp. */
10584 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10585 if (ret)
10586 return ret;
10587 }
10588
10589 /* If we got this far, we need to build an exp. */
10590 exp = build_call_array_loc (loc, type, fn, n, argarray);
10591 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10592 return ret ? ret : exp;
10593 }
10594 }
10595
10596 return build_call_array_loc (loc, type, fn, n, argarray);
10597 }
10598
10599 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10600 along with N new arguments specified as the "..." parameters. SKIP
10601 is the number of arguments in EXP to be omitted. This function is used
10602 to do varargs-to-varargs transformations. */
10603
10604 static tree
10605 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10606 {
10607 va_list ap;
10608 tree t;
10609
10610 va_start (ap, n);
10611 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10612 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10613 va_end (ap);
10614
10615 return t;
10616 }
10617
10618 /* Validate a single argument ARG against a tree code CODE representing
10619 a type. */
10620
10621 static bool
10622 validate_arg (const_tree arg, enum tree_code code)
10623 {
10624 if (!arg)
10625 return false;
10626 else if (code == POINTER_TYPE)
10627 return POINTER_TYPE_P (TREE_TYPE (arg));
10628 else if (code == INTEGER_TYPE)
10629 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10630 return code == TREE_CODE (TREE_TYPE (arg));
10631 }
10632
10633 /* This function validates the types of a function call argument list
10634 against a specified list of tree_codes. If the last specifier is a 0,
10635 that represents an ellipses, otherwise the last specifier must be a
10636 VOID_TYPE.
10637
10638 This is the GIMPLE version of validate_arglist. Eventually we want to
10639 completely convert builtins.c to work from GIMPLEs and the tree based
10640 validate_arglist will then be removed. */
10641
10642 bool
10643 validate_gimple_arglist (const_gimple call, ...)
10644 {
10645 enum tree_code code;
10646 bool res = 0;
10647 va_list ap;
10648 const_tree arg;
10649 size_t i;
10650
10651 va_start (ap, call);
10652 i = 0;
10653
10654 do
10655 {
10656 code = (enum tree_code) va_arg (ap, int);
10657 switch (code)
10658 {
10659 case 0:
10660 /* This signifies an ellipses, any further arguments are all ok. */
10661 res = true;
10662 goto end;
10663 case VOID_TYPE:
10664 /* This signifies an endlink, if no arguments remain, return
10665 true, otherwise return false. */
10666 res = (i == gimple_call_num_args (call));
10667 goto end;
10668 default:
10669 /* If no parameters remain or the parameter's code does not
10670 match the specified code, return false. Otherwise continue
10671 checking any remaining arguments. */
10672 arg = gimple_call_arg (call, i++);
10673 if (!validate_arg (arg, code))
10674 goto end;
10675 break;
10676 }
10677 }
10678 while (1);
10679
10680 /* We need gotos here since we can only have one VA_CLOSE in a
10681 function. */
10682 end: ;
10683 va_end (ap);
10684
10685 return res;
10686 }
10687
10688 /* Default target-specific builtin expander that does nothing. */
10689
10690 rtx
10691 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10692 rtx target ATTRIBUTE_UNUSED,
10693 rtx subtarget ATTRIBUTE_UNUSED,
10694 machine_mode mode ATTRIBUTE_UNUSED,
10695 int ignore ATTRIBUTE_UNUSED)
10696 {
10697 return NULL_RTX;
10698 }
10699
10700 /* Returns true is EXP represents data that would potentially reside
10701 in a readonly section. */
10702
10703 bool
10704 readonly_data_expr (tree exp)
10705 {
10706 STRIP_NOPS (exp);
10707
10708 if (TREE_CODE (exp) != ADDR_EXPR)
10709 return false;
10710
10711 exp = get_base_address (TREE_OPERAND (exp, 0));
10712 if (!exp)
10713 return false;
10714
10715 /* Make sure we call decl_readonly_section only for trees it
10716 can handle (since it returns true for everything it doesn't
10717 understand). */
10718 if (TREE_CODE (exp) == STRING_CST
10719 || TREE_CODE (exp) == CONSTRUCTOR
10720 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10721 return decl_readonly_section (exp, 0);
10722 else
10723 return false;
10724 }
10725
10726 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10727 to the call, and TYPE is its return type.
10728
10729 Return NULL_TREE if no simplification was possible, otherwise return the
10730 simplified form of the call as a tree.
10731
10732 The simplified form may be a constant or other expression which
10733 computes the same value, but in a more efficient manner (including
10734 calls to other builtin functions).
10735
10736 The call may contain arguments which need to be evaluated, but
10737 which are not useful to determine the result of the call. In
10738 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10739 COMPOUND_EXPR will be an argument which must be evaluated.
10740 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10741 COMPOUND_EXPR in the chain will contain the tree for the simplified
10742 form of the builtin function call. */
10743
10744 static tree
10745 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10746 {
10747 if (!validate_arg (s1, POINTER_TYPE)
10748 || !validate_arg (s2, POINTER_TYPE))
10749 return NULL_TREE;
10750 else
10751 {
10752 tree fn;
10753 const char *p1, *p2;
10754
10755 p2 = c_getstr (s2);
10756 if (p2 == NULL)
10757 return NULL_TREE;
10758
10759 p1 = c_getstr (s1);
10760 if (p1 != NULL)
10761 {
10762 const char *r = strstr (p1, p2);
10763 tree tem;
10764
10765 if (r == NULL)
10766 return build_int_cst (TREE_TYPE (s1), 0);
10767
10768 /* Return an offset into the constant string argument. */
10769 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10770 return fold_convert_loc (loc, type, tem);
10771 }
10772
10773 /* The argument is const char *, and the result is char *, so we need
10774 a type conversion here to avoid a warning. */
10775 if (p2[0] == '\0')
10776 return fold_convert_loc (loc, type, s1);
10777
10778 if (p2[1] != '\0')
10779 return NULL_TREE;
10780
10781 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10782 if (!fn)
10783 return NULL_TREE;
10784
10785 /* New argument list transforming strstr(s1, s2) to
10786 strchr(s1, s2[0]). */
10787 return build_call_expr_loc (loc, fn, 2, s1,
10788 build_int_cst (integer_type_node, p2[0]));
10789 }
10790 }
10791
10792 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10793 the call, and TYPE is its return type.
10794
10795 Return NULL_TREE if no simplification was possible, otherwise return the
10796 simplified form of the call as a tree.
10797
10798 The simplified form may be a constant or other expression which
10799 computes the same value, but in a more efficient manner (including
10800 calls to other builtin functions).
10801
10802 The call may contain arguments which need to be evaluated, but
10803 which are not useful to determine the result of the call. In
10804 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10805 COMPOUND_EXPR will be an argument which must be evaluated.
10806 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10807 COMPOUND_EXPR in the chain will contain the tree for the simplified
10808 form of the builtin function call. */
10809
10810 static tree
10811 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10812 {
10813 if (!validate_arg (s1, POINTER_TYPE)
10814 || !validate_arg (s2, INTEGER_TYPE))
10815 return NULL_TREE;
10816 else
10817 {
10818 const char *p1;
10819
10820 if (TREE_CODE (s2) != INTEGER_CST)
10821 return NULL_TREE;
10822
10823 p1 = c_getstr (s1);
10824 if (p1 != NULL)
10825 {
10826 char c;
10827 const char *r;
10828 tree tem;
10829
10830 if (target_char_cast (s2, &c))
10831 return NULL_TREE;
10832
10833 r = strchr (p1, c);
10834
10835 if (r == NULL)
10836 return build_int_cst (TREE_TYPE (s1), 0);
10837
10838 /* Return an offset into the constant string argument. */
10839 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10840 return fold_convert_loc (loc, type, tem);
10841 }
10842 return NULL_TREE;
10843 }
10844 }
10845
10846 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10847 the call, and TYPE is its return type.
10848
10849 Return NULL_TREE if no simplification was possible, otherwise return the
10850 simplified form of the call as a tree.
10851
10852 The simplified form may be a constant or other expression which
10853 computes the same value, but in a more efficient manner (including
10854 calls to other builtin functions).
10855
10856 The call may contain arguments which need to be evaluated, but
10857 which are not useful to determine the result of the call. In
10858 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10859 COMPOUND_EXPR will be an argument which must be evaluated.
10860 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10861 COMPOUND_EXPR in the chain will contain the tree for the simplified
10862 form of the builtin function call. */
10863
10864 static tree
10865 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10866 {
10867 if (!validate_arg (s1, POINTER_TYPE)
10868 || !validate_arg (s2, INTEGER_TYPE))
10869 return NULL_TREE;
10870 else
10871 {
10872 tree fn;
10873 const char *p1;
10874
10875 if (TREE_CODE (s2) != INTEGER_CST)
10876 return NULL_TREE;
10877
10878 p1 = c_getstr (s1);
10879 if (p1 != NULL)
10880 {
10881 char c;
10882 const char *r;
10883 tree tem;
10884
10885 if (target_char_cast (s2, &c))
10886 return NULL_TREE;
10887
10888 r = strrchr (p1, c);
10889
10890 if (r == NULL)
10891 return build_int_cst (TREE_TYPE (s1), 0);
10892
10893 /* Return an offset into the constant string argument. */
10894 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10895 return fold_convert_loc (loc, type, tem);
10896 }
10897
10898 if (! integer_zerop (s2))
10899 return NULL_TREE;
10900
10901 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10902 if (!fn)
10903 return NULL_TREE;
10904
10905 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10906 return build_call_expr_loc (loc, fn, 2, s1, s2);
10907 }
10908 }
10909
10910 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10911 to the call, and TYPE is its return type.
10912
10913 Return NULL_TREE if no simplification was possible, otherwise return the
10914 simplified form of the call as a tree.
10915
10916 The simplified form may be a constant or other expression which
10917 computes the same value, but in a more efficient manner (including
10918 calls to other builtin functions).
10919
10920 The call may contain arguments which need to be evaluated, but
10921 which are not useful to determine the result of the call. In
10922 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10923 COMPOUND_EXPR will be an argument which must be evaluated.
10924 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10925 COMPOUND_EXPR in the chain will contain the tree for the simplified
10926 form of the builtin function call. */
10927
10928 static tree
10929 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10930 {
10931 if (!validate_arg (s1, POINTER_TYPE)
10932 || !validate_arg (s2, POINTER_TYPE))
10933 return NULL_TREE;
10934 else
10935 {
10936 tree fn;
10937 const char *p1, *p2;
10938
10939 p2 = c_getstr (s2);
10940 if (p2 == NULL)
10941 return NULL_TREE;
10942
10943 p1 = c_getstr (s1);
10944 if (p1 != NULL)
10945 {
10946 const char *r = strpbrk (p1, p2);
10947 tree tem;
10948
10949 if (r == NULL)
10950 return build_int_cst (TREE_TYPE (s1), 0);
10951
10952 /* Return an offset into the constant string argument. */
10953 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10954 return fold_convert_loc (loc, type, tem);
10955 }
10956
10957 if (p2[0] == '\0')
10958 /* strpbrk(x, "") == NULL.
10959 Evaluate and ignore s1 in case it had side-effects. */
10960 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
10961
10962 if (p2[1] != '\0')
10963 return NULL_TREE; /* Really call strpbrk. */
10964
10965 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10966 if (!fn)
10967 return NULL_TREE;
10968
10969 /* New argument list transforming strpbrk(s1, s2) to
10970 strchr(s1, s2[0]). */
10971 return build_call_expr_loc (loc, fn, 2, s1,
10972 build_int_cst (integer_type_node, p2[0]));
10973 }
10974 }
10975
10976 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
10977 arguments to the call.
10978
10979 Return NULL_TREE if no simplification was possible, otherwise return the
10980 simplified form of the call as a tree.
10981
10982 The simplified form may be a constant or other expression which
10983 computes the same value, but in a more efficient manner (including
10984 calls to other builtin functions).
10985
10986 The call may contain arguments which need to be evaluated, but
10987 which are not useful to determine the result of the call. In
10988 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10989 COMPOUND_EXPR will be an argument which must be evaluated.
10990 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10991 COMPOUND_EXPR in the chain will contain the tree for the simplified
10992 form of the builtin function call. */
10993
10994 static tree
10995 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
10996 {
10997 if (!validate_arg (dst, POINTER_TYPE)
10998 || !validate_arg (src, POINTER_TYPE)
10999 || !validate_arg (len, INTEGER_TYPE))
11000 return NULL_TREE;
11001 else
11002 {
11003 const char *p = c_getstr (src);
11004
11005 /* If the requested length is zero, or the src parameter string
11006 length is zero, return the dst parameter. */
11007 if (integer_zerop (len) || (p && *p == '\0'))
11008 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11009
11010 /* If the requested len is greater than or equal to the string
11011 length, call strcat. */
11012 if (TREE_CODE (len) == INTEGER_CST && p
11013 && compare_tree_int (len, strlen (p)) >= 0)
11014 {
11015 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11016
11017 /* If the replacement _DECL isn't initialized, don't do the
11018 transformation. */
11019 if (!fn)
11020 return NULL_TREE;
11021
11022 return build_call_expr_loc (loc, fn, 2, dst, src);
11023 }
11024 return NULL_TREE;
11025 }
11026 }
11027
11028 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11029 to the call.
11030
11031 Return NULL_TREE if no simplification was possible, otherwise return the
11032 simplified form of the call as a tree.
11033
11034 The simplified form may be a constant or other expression which
11035 computes the same value, but in a more efficient manner (including
11036 calls to other builtin functions).
11037
11038 The call may contain arguments which need to be evaluated, but
11039 which are not useful to determine the result of the call. In
11040 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11041 COMPOUND_EXPR will be an argument which must be evaluated.
11042 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11043 COMPOUND_EXPR in the chain will contain the tree for the simplified
11044 form of the builtin function call. */
11045
11046 static tree
11047 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11048 {
11049 if (!validate_arg (s1, POINTER_TYPE)
11050 || !validate_arg (s2, POINTER_TYPE))
11051 return NULL_TREE;
11052 else
11053 {
11054 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11055
11056 /* If both arguments are constants, evaluate at compile-time. */
11057 if (p1 && p2)
11058 {
11059 const size_t r = strspn (p1, p2);
11060 return build_int_cst (size_type_node, r);
11061 }
11062
11063 /* If either argument is "", return NULL_TREE. */
11064 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11065 /* Evaluate and ignore both arguments in case either one has
11066 side-effects. */
11067 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11068 s1, s2);
11069 return NULL_TREE;
11070 }
11071 }
11072
11073 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11074 to the call.
11075
11076 Return NULL_TREE if no simplification was possible, otherwise return the
11077 simplified form of the call as a tree.
11078
11079 The simplified form may be a constant or other expression which
11080 computes the same value, but in a more efficient manner (including
11081 calls to other builtin functions).
11082
11083 The call may contain arguments which need to be evaluated, but
11084 which are not useful to determine the result of the call. In
11085 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11086 COMPOUND_EXPR will be an argument which must be evaluated.
11087 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11088 COMPOUND_EXPR in the chain will contain the tree for the simplified
11089 form of the builtin function call. */
11090
11091 static tree
11092 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11093 {
11094 if (!validate_arg (s1, POINTER_TYPE)
11095 || !validate_arg (s2, POINTER_TYPE))
11096 return NULL_TREE;
11097 else
11098 {
11099 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11100
11101 /* If both arguments are constants, evaluate at compile-time. */
11102 if (p1 && p2)
11103 {
11104 const size_t r = strcspn (p1, p2);
11105 return build_int_cst (size_type_node, r);
11106 }
11107
11108 /* If the first argument is "", return NULL_TREE. */
11109 if (p1 && *p1 == '\0')
11110 {
11111 /* Evaluate and ignore argument s2 in case it has
11112 side-effects. */
11113 return omit_one_operand_loc (loc, size_type_node,
11114 size_zero_node, s2);
11115 }
11116
11117 /* If the second argument is "", return __builtin_strlen(s1). */
11118 if (p2 && *p2 == '\0')
11119 {
11120 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11121
11122 /* If the replacement _DECL isn't initialized, don't do the
11123 transformation. */
11124 if (!fn)
11125 return NULL_TREE;
11126
11127 return build_call_expr_loc (loc, fn, 1, s1);
11128 }
11129 return NULL_TREE;
11130 }
11131 }
11132
11133 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11134 produced. False otherwise. This is done so that we don't output the error
11135 or warning twice or three times. */
11136
11137 bool
11138 fold_builtin_next_arg (tree exp, bool va_start_p)
11139 {
11140 tree fntype = TREE_TYPE (current_function_decl);
11141 int nargs = call_expr_nargs (exp);
11142 tree arg;
11143 /* There is good chance the current input_location points inside the
11144 definition of the va_start macro (perhaps on the token for
11145 builtin) in a system header, so warnings will not be emitted.
11146 Use the location in real source code. */
11147 source_location current_location =
11148 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11149 NULL);
11150
11151 if (!stdarg_p (fntype))
11152 {
11153 error ("%<va_start%> used in function with fixed args");
11154 return true;
11155 }
11156
11157 if (va_start_p)
11158 {
11159 if (va_start_p && (nargs != 2))
11160 {
11161 error ("wrong number of arguments to function %<va_start%>");
11162 return true;
11163 }
11164 arg = CALL_EXPR_ARG (exp, 1);
11165 }
11166 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11167 when we checked the arguments and if needed issued a warning. */
11168 else
11169 {
11170 if (nargs == 0)
11171 {
11172 /* Evidently an out of date version of <stdarg.h>; can't validate
11173 va_start's second argument, but can still work as intended. */
11174 warning_at (current_location,
11175 OPT_Wvarargs,
11176 "%<__builtin_next_arg%> called without an argument");
11177 return true;
11178 }
11179 else if (nargs > 1)
11180 {
11181 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11182 return true;
11183 }
11184 arg = CALL_EXPR_ARG (exp, 0);
11185 }
11186
11187 if (TREE_CODE (arg) == SSA_NAME)
11188 arg = SSA_NAME_VAR (arg);
11189
11190 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11191 or __builtin_next_arg (0) the first time we see it, after checking
11192 the arguments and if needed issuing a warning. */
11193 if (!integer_zerop (arg))
11194 {
11195 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11196
11197 /* Strip off all nops for the sake of the comparison. This
11198 is not quite the same as STRIP_NOPS. It does more.
11199 We must also strip off INDIRECT_EXPR for C++ reference
11200 parameters. */
11201 while (CONVERT_EXPR_P (arg)
11202 || TREE_CODE (arg) == INDIRECT_REF)
11203 arg = TREE_OPERAND (arg, 0);
11204 if (arg != last_parm)
11205 {
11206 /* FIXME: Sometimes with the tree optimizers we can get the
11207 not the last argument even though the user used the last
11208 argument. We just warn and set the arg to be the last
11209 argument so that we will get wrong-code because of
11210 it. */
11211 warning_at (current_location,
11212 OPT_Wvarargs,
11213 "second parameter of %<va_start%> not last named argument");
11214 }
11215
11216 /* Undefined by C99 7.15.1.4p4 (va_start):
11217 "If the parameter parmN is declared with the register storage
11218 class, with a function or array type, or with a type that is
11219 not compatible with the type that results after application of
11220 the default argument promotions, the behavior is undefined."
11221 */
11222 else if (DECL_REGISTER (arg))
11223 {
11224 warning_at (current_location,
11225 OPT_Wvarargs,
11226 "undefined behaviour when second parameter of "
11227 "%<va_start%> is declared with %<register%> storage");
11228 }
11229
11230 /* We want to verify the second parameter just once before the tree
11231 optimizers are run and then avoid keeping it in the tree,
11232 as otherwise we could warn even for correct code like:
11233 void foo (int i, ...)
11234 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11235 if (va_start_p)
11236 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11237 else
11238 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11239 }
11240 return false;
11241 }
11242
11243
11244 /* Expand a call EXP to __builtin_object_size. */
11245
11246 static rtx
11247 expand_builtin_object_size (tree exp)
11248 {
11249 tree ost;
11250 int object_size_type;
11251 tree fndecl = get_callee_fndecl (exp);
11252
11253 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11254 {
11255 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11256 exp, fndecl);
11257 expand_builtin_trap ();
11258 return const0_rtx;
11259 }
11260
11261 ost = CALL_EXPR_ARG (exp, 1);
11262 STRIP_NOPS (ost);
11263
11264 if (TREE_CODE (ost) != INTEGER_CST
11265 || tree_int_cst_sgn (ost) < 0
11266 || compare_tree_int (ost, 3) > 0)
11267 {
11268 error ("%Klast argument of %D is not integer constant between 0 and 3",
11269 exp, fndecl);
11270 expand_builtin_trap ();
11271 return const0_rtx;
11272 }
11273
11274 object_size_type = tree_to_shwi (ost);
11275
11276 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11277 }
11278
11279 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11280 FCODE is the BUILT_IN_* to use.
11281 Return NULL_RTX if we failed; the caller should emit a normal call,
11282 otherwise try to get the result in TARGET, if convenient (and in
11283 mode MODE if that's convenient). */
11284
11285 static rtx
11286 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11287 enum built_in_function fcode)
11288 {
11289 tree dest, src, len, size;
11290
11291 if (!validate_arglist (exp,
11292 POINTER_TYPE,
11293 fcode == BUILT_IN_MEMSET_CHK
11294 ? INTEGER_TYPE : POINTER_TYPE,
11295 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11296 return NULL_RTX;
11297
11298 dest = CALL_EXPR_ARG (exp, 0);
11299 src = CALL_EXPR_ARG (exp, 1);
11300 len = CALL_EXPR_ARG (exp, 2);
11301 size = CALL_EXPR_ARG (exp, 3);
11302
11303 if (! tree_fits_uhwi_p (size))
11304 return NULL_RTX;
11305
11306 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11307 {
11308 tree fn;
11309
11310 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11311 {
11312 warning_at (tree_nonartificial_location (exp),
11313 0, "%Kcall to %D will always overflow destination buffer",
11314 exp, get_callee_fndecl (exp));
11315 return NULL_RTX;
11316 }
11317
11318 fn = NULL_TREE;
11319 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11320 mem{cpy,pcpy,move,set} is available. */
11321 switch (fcode)
11322 {
11323 case BUILT_IN_MEMCPY_CHK:
11324 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11325 break;
11326 case BUILT_IN_MEMPCPY_CHK:
11327 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11328 break;
11329 case BUILT_IN_MEMMOVE_CHK:
11330 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11331 break;
11332 case BUILT_IN_MEMSET_CHK:
11333 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11334 break;
11335 default:
11336 break;
11337 }
11338
11339 if (! fn)
11340 return NULL_RTX;
11341
11342 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11343 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11344 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11345 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11346 }
11347 else if (fcode == BUILT_IN_MEMSET_CHK)
11348 return NULL_RTX;
11349 else
11350 {
11351 unsigned int dest_align = get_pointer_alignment (dest);
11352
11353 /* If DEST is not a pointer type, call the normal function. */
11354 if (dest_align == 0)
11355 return NULL_RTX;
11356
11357 /* If SRC and DEST are the same (and not volatile), do nothing. */
11358 if (operand_equal_p (src, dest, 0))
11359 {
11360 tree expr;
11361
11362 if (fcode != BUILT_IN_MEMPCPY_CHK)
11363 {
11364 /* Evaluate and ignore LEN in case it has side-effects. */
11365 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11366 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11367 }
11368
11369 expr = fold_build_pointer_plus (dest, len);
11370 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11371 }
11372
11373 /* __memmove_chk special case. */
11374 if (fcode == BUILT_IN_MEMMOVE_CHK)
11375 {
11376 unsigned int src_align = get_pointer_alignment (src);
11377
11378 if (src_align == 0)
11379 return NULL_RTX;
11380
11381 /* If src is categorized for a readonly section we can use
11382 normal __memcpy_chk. */
11383 if (readonly_data_expr (src))
11384 {
11385 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11386 if (!fn)
11387 return NULL_RTX;
11388 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11389 dest, src, len, size);
11390 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11391 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11392 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11393 }
11394 }
11395 return NULL_RTX;
11396 }
11397 }
11398
11399 /* Emit warning if a buffer overflow is detected at compile time. */
11400
11401 static void
11402 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11403 {
11404 int is_strlen = 0;
11405 tree len, size;
11406 location_t loc = tree_nonartificial_location (exp);
11407
11408 switch (fcode)
11409 {
11410 case BUILT_IN_STRCPY_CHK:
11411 case BUILT_IN_STPCPY_CHK:
11412 /* For __strcat_chk the warning will be emitted only if overflowing
11413 by at least strlen (dest) + 1 bytes. */
11414 case BUILT_IN_STRCAT_CHK:
11415 len = CALL_EXPR_ARG (exp, 1);
11416 size = CALL_EXPR_ARG (exp, 2);
11417 is_strlen = 1;
11418 break;
11419 case BUILT_IN_STRNCAT_CHK:
11420 case BUILT_IN_STRNCPY_CHK:
11421 case BUILT_IN_STPNCPY_CHK:
11422 len = CALL_EXPR_ARG (exp, 2);
11423 size = CALL_EXPR_ARG (exp, 3);
11424 break;
11425 case BUILT_IN_SNPRINTF_CHK:
11426 case BUILT_IN_VSNPRINTF_CHK:
11427 len = CALL_EXPR_ARG (exp, 1);
11428 size = CALL_EXPR_ARG (exp, 3);
11429 break;
11430 default:
11431 gcc_unreachable ();
11432 }
11433
11434 if (!len || !size)
11435 return;
11436
11437 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11438 return;
11439
11440 if (is_strlen)
11441 {
11442 len = c_strlen (len, 1);
11443 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11444 return;
11445 }
11446 else if (fcode == BUILT_IN_STRNCAT_CHK)
11447 {
11448 tree src = CALL_EXPR_ARG (exp, 1);
11449 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11450 return;
11451 src = c_strlen (src, 1);
11452 if (! src || ! tree_fits_uhwi_p (src))
11453 {
11454 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11455 exp, get_callee_fndecl (exp));
11456 return;
11457 }
11458 else if (tree_int_cst_lt (src, size))
11459 return;
11460 }
11461 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
11462 return;
11463
11464 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11465 exp, get_callee_fndecl (exp));
11466 }
11467
11468 /* Emit warning if a buffer overflow is detected at compile time
11469 in __sprintf_chk/__vsprintf_chk calls. */
11470
11471 static void
11472 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11473 {
11474 tree size, len, fmt;
11475 const char *fmt_str;
11476 int nargs = call_expr_nargs (exp);
11477
11478 /* Verify the required arguments in the original call. */
11479
11480 if (nargs < 4)
11481 return;
11482 size = CALL_EXPR_ARG (exp, 2);
11483 fmt = CALL_EXPR_ARG (exp, 3);
11484
11485 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11486 return;
11487
11488 /* Check whether the format is a literal string constant. */
11489 fmt_str = c_getstr (fmt);
11490 if (fmt_str == NULL)
11491 return;
11492
11493 if (!init_target_chars ())
11494 return;
11495
11496 /* If the format doesn't contain % args or %%, we know its size. */
11497 if (strchr (fmt_str, target_percent) == 0)
11498 len = build_int_cstu (size_type_node, strlen (fmt_str));
11499 /* If the format is "%s" and first ... argument is a string literal,
11500 we know it too. */
11501 else if (fcode == BUILT_IN_SPRINTF_CHK
11502 && strcmp (fmt_str, target_percent_s) == 0)
11503 {
11504 tree arg;
11505
11506 if (nargs < 5)
11507 return;
11508 arg = CALL_EXPR_ARG (exp, 4);
11509 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11510 return;
11511
11512 len = c_strlen (arg, 1);
11513 if (!len || ! tree_fits_uhwi_p (len))
11514 return;
11515 }
11516 else
11517 return;
11518
11519 if (! tree_int_cst_lt (len, size))
11520 warning_at (tree_nonartificial_location (exp),
11521 0, "%Kcall to %D will always overflow destination buffer",
11522 exp, get_callee_fndecl (exp));
11523 }
11524
11525 /* Emit warning if a free is called with address of a variable. */
11526
11527 static void
11528 maybe_emit_free_warning (tree exp)
11529 {
11530 tree arg = CALL_EXPR_ARG (exp, 0);
11531
11532 STRIP_NOPS (arg);
11533 if (TREE_CODE (arg) != ADDR_EXPR)
11534 return;
11535
11536 arg = get_base_address (TREE_OPERAND (arg, 0));
11537 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11538 return;
11539
11540 if (SSA_VAR_P (arg))
11541 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11542 "%Kattempt to free a non-heap object %qD", exp, arg);
11543 else
11544 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11545 "%Kattempt to free a non-heap object", exp);
11546 }
11547
11548 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11549 if possible. */
11550
11551 static tree
11552 fold_builtin_object_size (tree ptr, tree ost)
11553 {
11554 unsigned HOST_WIDE_INT bytes;
11555 int object_size_type;
11556
11557 if (!validate_arg (ptr, POINTER_TYPE)
11558 || !validate_arg (ost, INTEGER_TYPE))
11559 return NULL_TREE;
11560
11561 STRIP_NOPS (ost);
11562
11563 if (TREE_CODE (ost) != INTEGER_CST
11564 || tree_int_cst_sgn (ost) < 0
11565 || compare_tree_int (ost, 3) > 0)
11566 return NULL_TREE;
11567
11568 object_size_type = tree_to_shwi (ost);
11569
11570 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11571 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11572 and (size_t) 0 for types 2 and 3. */
11573 if (TREE_SIDE_EFFECTS (ptr))
11574 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11575
11576 if (TREE_CODE (ptr) == ADDR_EXPR)
11577 {
11578 bytes = compute_builtin_object_size (ptr, object_size_type);
11579 if (wi::fits_to_tree_p (bytes, size_type_node))
11580 return build_int_cstu (size_type_node, bytes);
11581 }
11582 else if (TREE_CODE (ptr) == SSA_NAME)
11583 {
11584 /* If object size is not known yet, delay folding until
11585 later. Maybe subsequent passes will help determining
11586 it. */
11587 bytes = compute_builtin_object_size (ptr, object_size_type);
11588 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11589 && wi::fits_to_tree_p (bytes, size_type_node))
11590 return build_int_cstu (size_type_node, bytes);
11591 }
11592
11593 return NULL_TREE;
11594 }
11595
11596 /* Builtins with folding operations that operate on "..." arguments
11597 need special handling; we need to store the arguments in a convenient
11598 data structure before attempting any folding. Fortunately there are
11599 only a few builtins that fall into this category. FNDECL is the
11600 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11601 result of the function call is ignored. */
11602
11603 static tree
11604 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
11605 bool ignore ATTRIBUTE_UNUSED)
11606 {
11607 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11608 tree ret = NULL_TREE;
11609
11610 switch (fcode)
11611 {
11612 case BUILT_IN_FPCLASSIFY:
11613 ret = fold_builtin_fpclassify (loc, exp);
11614 break;
11615
11616 default:
11617 break;
11618 }
11619 if (ret)
11620 {
11621 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11622 SET_EXPR_LOCATION (ret, loc);
11623 TREE_NO_WARNING (ret) = 1;
11624 return ret;
11625 }
11626 return NULL_TREE;
11627 }
11628
11629 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
11630 FMT and ARG are the arguments to the call; we don't fold cases with
11631 more than 2 arguments, and ARG may be null if this is a 1-argument case.
11632
11633 Return NULL_TREE if no simplification was possible, otherwise return the
11634 simplified form of the call as a tree. FCODE is the BUILT_IN_*
11635 code of the function to be simplified. */
11636
11637 static tree
11638 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
11639 tree arg, bool ignore,
11640 enum built_in_function fcode)
11641 {
11642 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
11643 const char *fmt_str = NULL;
11644
11645 /* If the return value is used, don't do the transformation. */
11646 if (! ignore)
11647 return NULL_TREE;
11648
11649 /* Verify the required arguments in the original call. */
11650 if (!validate_arg (fmt, POINTER_TYPE))
11651 return NULL_TREE;
11652
11653 /* Check whether the format is a literal string constant. */
11654 fmt_str = c_getstr (fmt);
11655 if (fmt_str == NULL)
11656 return NULL_TREE;
11657
11658 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
11659 {
11660 /* If we're using an unlocked function, assume the other
11661 unlocked functions exist explicitly. */
11662 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
11663 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
11664 }
11665 else
11666 {
11667 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
11668 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
11669 }
11670
11671 if (!init_target_chars ())
11672 return NULL_TREE;
11673
11674 if (strcmp (fmt_str, target_percent_s) == 0
11675 || strchr (fmt_str, target_percent) == NULL)
11676 {
11677 const char *str;
11678
11679 if (strcmp (fmt_str, target_percent_s) == 0)
11680 {
11681 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
11682 return NULL_TREE;
11683
11684 if (!arg || !validate_arg (arg, POINTER_TYPE))
11685 return NULL_TREE;
11686
11687 str = c_getstr (arg);
11688 if (str == NULL)
11689 return NULL_TREE;
11690 }
11691 else
11692 {
11693 /* The format specifier doesn't contain any '%' characters. */
11694 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
11695 && arg)
11696 return NULL_TREE;
11697 str = fmt_str;
11698 }
11699
11700 /* If the string was "", printf does nothing. */
11701 if (str[0] == '\0')
11702 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
11703
11704 /* If the string has length of 1, call putchar. */
11705 if (str[1] == '\0')
11706 {
11707 /* Given printf("c"), (where c is any one character,)
11708 convert "c"[0] to an int and pass that to the replacement
11709 function. */
11710 newarg = build_int_cst (integer_type_node, str[0]);
11711 if (fn_putchar)
11712 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
11713 }
11714 else
11715 {
11716 /* If the string was "string\n", call puts("string"). */
11717 size_t len = strlen (str);
11718 if ((unsigned char)str[len - 1] == target_newline
11719 && (size_t) (int) len == len
11720 && (int) len > 0)
11721 {
11722 char *newstr;
11723 tree offset_node, string_cst;
11724
11725 /* Create a NUL-terminated string that's one char shorter
11726 than the original, stripping off the trailing '\n'. */
11727 newarg = build_string_literal (len, str);
11728 string_cst = string_constant (newarg, &offset_node);
11729 gcc_checking_assert (string_cst
11730 && (TREE_STRING_LENGTH (string_cst)
11731 == (int) len)
11732 && integer_zerop (offset_node)
11733 && (unsigned char)
11734 TREE_STRING_POINTER (string_cst)[len - 1]
11735 == target_newline);
11736 /* build_string_literal creates a new STRING_CST,
11737 modify it in place to avoid double copying. */
11738 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
11739 newstr[len - 1] = '\0';
11740 if (fn_puts)
11741 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
11742 }
11743 else
11744 /* We'd like to arrange to call fputs(string,stdout) here,
11745 but we need stdout and don't have a way to get it yet. */
11746 return NULL_TREE;
11747 }
11748 }
11749
11750 /* The other optimizations can be done only on the non-va_list variants. */
11751 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
11752 return NULL_TREE;
11753
11754 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
11755 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
11756 {
11757 if (!arg || !validate_arg (arg, POINTER_TYPE))
11758 return NULL_TREE;
11759 if (fn_puts)
11760 call = build_call_expr_loc (loc, fn_puts, 1, arg);
11761 }
11762
11763 /* If the format specifier was "%c", call __builtin_putchar(arg). */
11764 else if (strcmp (fmt_str, target_percent_c) == 0)
11765 {
11766 if (!arg || !validate_arg (arg, INTEGER_TYPE))
11767 return NULL_TREE;
11768 if (fn_putchar)
11769 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
11770 }
11771
11772 if (!call)
11773 return NULL_TREE;
11774
11775 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
11776 }
11777
11778 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
11779 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
11780 more than 3 arguments, and ARG may be null in the 2-argument case.
11781
11782 Return NULL_TREE if no simplification was possible, otherwise return the
11783 simplified form of the call as a tree. FCODE is the BUILT_IN_*
11784 code of the function to be simplified. */
11785
11786 static tree
11787 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
11788 tree fmt, tree arg, bool ignore,
11789 enum built_in_function fcode)
11790 {
11791 tree fn_fputc, fn_fputs, call = NULL_TREE;
11792 const char *fmt_str = NULL;
11793
11794 /* If the return value is used, don't do the transformation. */
11795 if (! ignore)
11796 return NULL_TREE;
11797
11798 /* Verify the required arguments in the original call. */
11799 if (!validate_arg (fp, POINTER_TYPE))
11800 return NULL_TREE;
11801 if (!validate_arg (fmt, POINTER_TYPE))
11802 return NULL_TREE;
11803
11804 /* Check whether the format is a literal string constant. */
11805 fmt_str = c_getstr (fmt);
11806 if (fmt_str == NULL)
11807 return NULL_TREE;
11808
11809 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
11810 {
11811 /* If we're using an unlocked function, assume the other
11812 unlocked functions exist explicitly. */
11813 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
11814 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
11815 }
11816 else
11817 {
11818 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
11819 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
11820 }
11821
11822 if (!init_target_chars ())
11823 return NULL_TREE;
11824
11825 /* If the format doesn't contain % args or %%, use strcpy. */
11826 if (strchr (fmt_str, target_percent) == NULL)
11827 {
11828 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
11829 && arg)
11830 return NULL_TREE;
11831
11832 /* If the format specifier was "", fprintf does nothing. */
11833 if (fmt_str[0] == '\0')
11834 {
11835 /* If FP has side-effects, just wait until gimplification is
11836 done. */
11837 if (TREE_SIDE_EFFECTS (fp))
11838 return NULL_TREE;
11839
11840 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
11841 }
11842
11843 /* When "string" doesn't contain %, replace all cases of
11844 fprintf (fp, string) with fputs (string, fp). The fputs
11845 builtin will take care of special cases like length == 1. */
11846 if (fn_fputs)
11847 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
11848 }
11849
11850 /* The other optimizations can be done only on the non-va_list variants. */
11851 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
11852 return NULL_TREE;
11853
11854 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
11855 else if (strcmp (fmt_str, target_percent_s) == 0)
11856 {
11857 if (!arg || !validate_arg (arg, POINTER_TYPE))
11858 return NULL_TREE;
11859 if (fn_fputs)
11860 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
11861 }
11862
11863 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
11864 else if (strcmp (fmt_str, target_percent_c) == 0)
11865 {
11866 if (!arg || !validate_arg (arg, INTEGER_TYPE))
11867 return NULL_TREE;
11868 if (fn_fputc)
11869 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
11870 }
11871
11872 if (!call)
11873 return NULL_TREE;
11874 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
11875 }
11876
11877 /* Initialize format string characters in the target charset. */
11878
11879 bool
11880 init_target_chars (void)
11881 {
11882 static bool init;
11883 if (!init)
11884 {
11885 target_newline = lang_hooks.to_target_charset ('\n');
11886 target_percent = lang_hooks.to_target_charset ('%');
11887 target_c = lang_hooks.to_target_charset ('c');
11888 target_s = lang_hooks.to_target_charset ('s');
11889 if (target_newline == 0 || target_percent == 0 || target_c == 0
11890 || target_s == 0)
11891 return false;
11892
11893 target_percent_c[0] = target_percent;
11894 target_percent_c[1] = target_c;
11895 target_percent_c[2] = '\0';
11896
11897 target_percent_s[0] = target_percent;
11898 target_percent_s[1] = target_s;
11899 target_percent_s[2] = '\0';
11900
11901 target_percent_s_newline[0] = target_percent;
11902 target_percent_s_newline[1] = target_s;
11903 target_percent_s_newline[2] = target_newline;
11904 target_percent_s_newline[3] = '\0';
11905
11906 init = true;
11907 }
11908 return true;
11909 }
11910
11911 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11912 and no overflow/underflow occurred. INEXACT is true if M was not
11913 exactly calculated. TYPE is the tree type for the result. This
11914 function assumes that you cleared the MPFR flags and then
11915 calculated M to see if anything subsequently set a flag prior to
11916 entering this function. Return NULL_TREE if any checks fail. */
11917
11918 static tree
11919 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11920 {
11921 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11922 overflow/underflow occurred. If -frounding-math, proceed iff the
11923 result of calling FUNC was exact. */
11924 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11925 && (!flag_rounding_math || !inexact))
11926 {
11927 REAL_VALUE_TYPE rr;
11928
11929 real_from_mpfr (&rr, m, type, GMP_RNDN);
11930 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11931 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11932 but the mpft_t is not, then we underflowed in the
11933 conversion. */
11934 if (real_isfinite (&rr)
11935 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11936 {
11937 REAL_VALUE_TYPE rmode;
11938
11939 real_convert (&rmode, TYPE_MODE (type), &rr);
11940 /* Proceed iff the specified mode can hold the value. */
11941 if (real_identical (&rmode, &rr))
11942 return build_real (type, rmode);
11943 }
11944 }
11945 return NULL_TREE;
11946 }
11947
11948 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11949 number and no overflow/underflow occurred. INEXACT is true if M
11950 was not exactly calculated. TYPE is the tree type for the result.
11951 This function assumes that you cleared the MPFR flags and then
11952 calculated M to see if anything subsequently set a flag prior to
11953 entering this function. Return NULL_TREE if any checks fail, if
11954 FORCE_CONVERT is true, then bypass the checks. */
11955
11956 static tree
11957 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11958 {
11959 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11960 overflow/underflow occurred. If -frounding-math, proceed iff the
11961 result of calling FUNC was exact. */
11962 if (force_convert
11963 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11964 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11965 && (!flag_rounding_math || !inexact)))
11966 {
11967 REAL_VALUE_TYPE re, im;
11968
11969 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
11970 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
11971 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11972 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11973 but the mpft_t is not, then we underflowed in the
11974 conversion. */
11975 if (force_convert
11976 || (real_isfinite (&re) && real_isfinite (&im)
11977 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11978 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11979 {
11980 REAL_VALUE_TYPE re_mode, im_mode;
11981
11982 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11983 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11984 /* Proceed iff the specified mode can hold the value. */
11985 if (force_convert
11986 || (real_identical (&re_mode, &re)
11987 && real_identical (&im_mode, &im)))
11988 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11989 build_real (TREE_TYPE (type), im_mode));
11990 }
11991 }
11992 return NULL_TREE;
11993 }
11994
11995 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11996 FUNC on it and return the resulting value as a tree with type TYPE.
11997 If MIN and/or MAX are not NULL, then the supplied ARG must be
11998 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11999 acceptable values, otherwise they are not. The mpfr precision is
12000 set to the precision of TYPE. We assume that function FUNC returns
12001 zero if the result could be calculated exactly within the requested
12002 precision. */
12003
12004 static tree
12005 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12006 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12007 bool inclusive)
12008 {
12009 tree result = NULL_TREE;
12010
12011 STRIP_NOPS (arg);
12012
12013 /* To proceed, MPFR must exactly represent the target floating point
12014 format, which only happens when the target base equals two. */
12015 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12016 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12017 {
12018 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12019
12020 if (real_isfinite (ra)
12021 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12022 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12023 {
12024 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12025 const int prec = fmt->p;
12026 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12027 int inexact;
12028 mpfr_t m;
12029
12030 mpfr_init2 (m, prec);
12031 mpfr_from_real (m, ra, GMP_RNDN);
12032 mpfr_clear_flags ();
12033 inexact = func (m, m, rnd);
12034 result = do_mpfr_ckconv (m, type, inexact);
12035 mpfr_clear (m);
12036 }
12037 }
12038
12039 return result;
12040 }
12041
12042 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12043 FUNC on it and return the resulting value as a tree with type TYPE.
12044 The mpfr precision is set to the precision of TYPE. We assume that
12045 function FUNC returns zero if the result could be calculated
12046 exactly within the requested precision. */
12047
12048 static tree
12049 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12050 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12051 {
12052 tree result = NULL_TREE;
12053
12054 STRIP_NOPS (arg1);
12055 STRIP_NOPS (arg2);
12056
12057 /* To proceed, MPFR must exactly represent the target floating point
12058 format, which only happens when the target base equals two. */
12059 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12060 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12061 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12062 {
12063 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12064 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12065
12066 if (real_isfinite (ra1) && real_isfinite (ra2))
12067 {
12068 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12069 const int prec = fmt->p;
12070 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12071 int inexact;
12072 mpfr_t m1, m2;
12073
12074 mpfr_inits2 (prec, m1, m2, NULL);
12075 mpfr_from_real (m1, ra1, GMP_RNDN);
12076 mpfr_from_real (m2, ra2, GMP_RNDN);
12077 mpfr_clear_flags ();
12078 inexact = func (m1, m1, m2, rnd);
12079 result = do_mpfr_ckconv (m1, type, inexact);
12080 mpfr_clears (m1, m2, NULL);
12081 }
12082 }
12083
12084 return result;
12085 }
12086
12087 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12088 FUNC on it and return the resulting value as a tree with type TYPE.
12089 The mpfr precision is set to the precision of TYPE. We assume that
12090 function FUNC returns zero if the result could be calculated
12091 exactly within the requested precision. */
12092
12093 static tree
12094 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12095 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12096 {
12097 tree result = NULL_TREE;
12098
12099 STRIP_NOPS (arg1);
12100 STRIP_NOPS (arg2);
12101 STRIP_NOPS (arg3);
12102
12103 /* To proceed, MPFR must exactly represent the target floating point
12104 format, which only happens when the target base equals two. */
12105 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12106 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12107 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12108 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12109 {
12110 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12111 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12112 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12113
12114 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12115 {
12116 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12117 const int prec = fmt->p;
12118 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12119 int inexact;
12120 mpfr_t m1, m2, m3;
12121
12122 mpfr_inits2 (prec, m1, m2, m3, NULL);
12123 mpfr_from_real (m1, ra1, GMP_RNDN);
12124 mpfr_from_real (m2, ra2, GMP_RNDN);
12125 mpfr_from_real (m3, ra3, GMP_RNDN);
12126 mpfr_clear_flags ();
12127 inexact = func (m1, m1, m2, m3, rnd);
12128 result = do_mpfr_ckconv (m1, type, inexact);
12129 mpfr_clears (m1, m2, m3, NULL);
12130 }
12131 }
12132
12133 return result;
12134 }
12135
12136 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12137 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12138 If ARG_SINP and ARG_COSP are NULL then the result is returned
12139 as a complex value.
12140 The type is taken from the type of ARG and is used for setting the
12141 precision of the calculation and results. */
12142
12143 static tree
12144 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12145 {
12146 tree const type = TREE_TYPE (arg);
12147 tree result = NULL_TREE;
12148
12149 STRIP_NOPS (arg);
12150
12151 /* To proceed, MPFR must exactly represent the target floating point
12152 format, which only happens when the target base equals two. */
12153 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12154 && TREE_CODE (arg) == REAL_CST
12155 && !TREE_OVERFLOW (arg))
12156 {
12157 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12158
12159 if (real_isfinite (ra))
12160 {
12161 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12162 const int prec = fmt->p;
12163 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12164 tree result_s, result_c;
12165 int inexact;
12166 mpfr_t m, ms, mc;
12167
12168 mpfr_inits2 (prec, m, ms, mc, NULL);
12169 mpfr_from_real (m, ra, GMP_RNDN);
12170 mpfr_clear_flags ();
12171 inexact = mpfr_sin_cos (ms, mc, m, rnd);
12172 result_s = do_mpfr_ckconv (ms, type, inexact);
12173 result_c = do_mpfr_ckconv (mc, type, inexact);
12174 mpfr_clears (m, ms, mc, NULL);
12175 if (result_s && result_c)
12176 {
12177 /* If we are to return in a complex value do so. */
12178 if (!arg_sinp && !arg_cosp)
12179 return build_complex (build_complex_type (type),
12180 result_c, result_s);
12181
12182 /* Dereference the sin/cos pointer arguments. */
12183 arg_sinp = build_fold_indirect_ref (arg_sinp);
12184 arg_cosp = build_fold_indirect_ref (arg_cosp);
12185 /* Proceed if valid pointer type were passed in. */
12186 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12187 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12188 {
12189 /* Set the values. */
12190 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12191 result_s);
12192 TREE_SIDE_EFFECTS (result_s) = 1;
12193 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12194 result_c);
12195 TREE_SIDE_EFFECTS (result_c) = 1;
12196 /* Combine the assignments into a compound expr. */
12197 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12198 result_s, result_c));
12199 }
12200 }
12201 }
12202 }
12203 return result;
12204 }
12205
12206 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12207 two-argument mpfr order N Bessel function FUNC on them and return
12208 the resulting value as a tree with type TYPE. The mpfr precision
12209 is set to the precision of TYPE. We assume that function FUNC
12210 returns zero if the result could be calculated exactly within the
12211 requested precision. */
12212 static tree
12213 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12214 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12215 const REAL_VALUE_TYPE *min, bool inclusive)
12216 {
12217 tree result = NULL_TREE;
12218
12219 STRIP_NOPS (arg1);
12220 STRIP_NOPS (arg2);
12221
12222 /* To proceed, MPFR must exactly represent the target floating point
12223 format, which only happens when the target base equals two. */
12224 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12225 && tree_fits_shwi_p (arg1)
12226 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12227 {
12228 const HOST_WIDE_INT n = tree_to_shwi (arg1);
12229 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12230
12231 if (n == (long)n
12232 && real_isfinite (ra)
12233 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12234 {
12235 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12236 const int prec = fmt->p;
12237 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12238 int inexact;
12239 mpfr_t m;
12240
12241 mpfr_init2 (m, prec);
12242 mpfr_from_real (m, ra, GMP_RNDN);
12243 mpfr_clear_flags ();
12244 inexact = func (m, n, m, rnd);
12245 result = do_mpfr_ckconv (m, type, inexact);
12246 mpfr_clear (m);
12247 }
12248 }
12249
12250 return result;
12251 }
12252
12253 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12254 the pointer *(ARG_QUO) and return the result. The type is taken
12255 from the type of ARG0 and is used for setting the precision of the
12256 calculation and results. */
12257
12258 static tree
12259 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12260 {
12261 tree const type = TREE_TYPE (arg0);
12262 tree result = NULL_TREE;
12263
12264 STRIP_NOPS (arg0);
12265 STRIP_NOPS (arg1);
12266
12267 /* To proceed, MPFR must exactly represent the target floating point
12268 format, which only happens when the target base equals two. */
12269 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12270 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12271 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12272 {
12273 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12274 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12275
12276 if (real_isfinite (ra0) && real_isfinite (ra1))
12277 {
12278 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12279 const int prec = fmt->p;
12280 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12281 tree result_rem;
12282 long integer_quo;
12283 mpfr_t m0, m1;
12284
12285 mpfr_inits2 (prec, m0, m1, NULL);
12286 mpfr_from_real (m0, ra0, GMP_RNDN);
12287 mpfr_from_real (m1, ra1, GMP_RNDN);
12288 mpfr_clear_flags ();
12289 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12290 /* Remquo is independent of the rounding mode, so pass
12291 inexact=0 to do_mpfr_ckconv(). */
12292 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12293 mpfr_clears (m0, m1, NULL);
12294 if (result_rem)
12295 {
12296 /* MPFR calculates quo in the host's long so it may
12297 return more bits in quo than the target int can hold
12298 if sizeof(host long) > sizeof(target int). This can
12299 happen even for native compilers in LP64 mode. In
12300 these cases, modulo the quo value with the largest
12301 number that the target int can hold while leaving one
12302 bit for the sign. */
12303 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12304 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12305
12306 /* Dereference the quo pointer argument. */
12307 arg_quo = build_fold_indirect_ref (arg_quo);
12308 /* Proceed iff a valid pointer type was passed in. */
12309 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12310 {
12311 /* Set the value. */
12312 tree result_quo
12313 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12314 build_int_cst (TREE_TYPE (arg_quo),
12315 integer_quo));
12316 TREE_SIDE_EFFECTS (result_quo) = 1;
12317 /* Combine the quo assignment with the rem. */
12318 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12319 result_quo, result_rem));
12320 }
12321 }
12322 }
12323 }
12324 return result;
12325 }
12326
12327 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12328 resulting value as a tree with type TYPE. The mpfr precision is
12329 set to the precision of TYPE. We assume that this mpfr function
12330 returns zero if the result could be calculated exactly within the
12331 requested precision. In addition, the integer pointer represented
12332 by ARG_SG will be dereferenced and set to the appropriate signgam
12333 (-1,1) value. */
12334
12335 static tree
12336 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12337 {
12338 tree result = NULL_TREE;
12339
12340 STRIP_NOPS (arg);
12341
12342 /* To proceed, MPFR must exactly represent the target floating point
12343 format, which only happens when the target base equals two. Also
12344 verify ARG is a constant and that ARG_SG is an int pointer. */
12345 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12346 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12347 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12348 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12349 {
12350 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12351
12352 /* In addition to NaN and Inf, the argument cannot be zero or a
12353 negative integer. */
12354 if (real_isfinite (ra)
12355 && ra->cl != rvc_zero
12356 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12357 {
12358 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12359 const int prec = fmt->p;
12360 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12361 int inexact, sg;
12362 mpfr_t m;
12363 tree result_lg;
12364
12365 mpfr_init2 (m, prec);
12366 mpfr_from_real (m, ra, GMP_RNDN);
12367 mpfr_clear_flags ();
12368 inexact = mpfr_lgamma (m, &sg, m, rnd);
12369 result_lg = do_mpfr_ckconv (m, type, inexact);
12370 mpfr_clear (m);
12371 if (result_lg)
12372 {
12373 tree result_sg;
12374
12375 /* Dereference the arg_sg pointer argument. */
12376 arg_sg = build_fold_indirect_ref (arg_sg);
12377 /* Assign the signgam value into *arg_sg. */
12378 result_sg = fold_build2 (MODIFY_EXPR,
12379 TREE_TYPE (arg_sg), arg_sg,
12380 build_int_cst (TREE_TYPE (arg_sg), sg));
12381 TREE_SIDE_EFFECTS (result_sg) = 1;
12382 /* Combine the signgam assignment with the lgamma result. */
12383 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12384 result_sg, result_lg));
12385 }
12386 }
12387 }
12388
12389 return result;
12390 }
12391
12392 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12393 function FUNC on it and return the resulting value as a tree with
12394 type TYPE. The mpfr precision is set to the precision of TYPE. We
12395 assume that function FUNC returns zero if the result could be
12396 calculated exactly within the requested precision. */
12397
12398 static tree
12399 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12400 {
12401 tree result = NULL_TREE;
12402
12403 STRIP_NOPS (arg);
12404
12405 /* To proceed, MPFR must exactly represent the target floating point
12406 format, which only happens when the target base equals two. */
12407 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12408 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12409 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12410 {
12411 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12412 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12413
12414 if (real_isfinite (re) && real_isfinite (im))
12415 {
12416 const struct real_format *const fmt =
12417 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12418 const int prec = fmt->p;
12419 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12420 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12421 int inexact;
12422 mpc_t m;
12423
12424 mpc_init2 (m, prec);
12425 mpfr_from_real (mpc_realref (m), re, rnd);
12426 mpfr_from_real (mpc_imagref (m), im, rnd);
12427 mpfr_clear_flags ();
12428 inexact = func (m, m, crnd);
12429 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
12430 mpc_clear (m);
12431 }
12432 }
12433
12434 return result;
12435 }
12436
12437 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12438 mpc function FUNC on it and return the resulting value as a tree
12439 with type TYPE. The mpfr precision is set to the precision of
12440 TYPE. We assume that function FUNC returns zero if the result
12441 could be calculated exactly within the requested precision. If
12442 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12443 in the arguments and/or results. */
12444
12445 tree
12446 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12447 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12448 {
12449 tree result = NULL_TREE;
12450
12451 STRIP_NOPS (arg0);
12452 STRIP_NOPS (arg1);
12453
12454 /* To proceed, MPFR must exactly represent the target floating point
12455 format, which only happens when the target base equals two. */
12456 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12457 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12458 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12459 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12460 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12461 {
12462 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12463 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12464 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12465 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12466
12467 if (do_nonfinite
12468 || (real_isfinite (re0) && real_isfinite (im0)
12469 && real_isfinite (re1) && real_isfinite (im1)))
12470 {
12471 const struct real_format *const fmt =
12472 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12473 const int prec = fmt->p;
12474 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12475 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12476 int inexact;
12477 mpc_t m0, m1;
12478
12479 mpc_init2 (m0, prec);
12480 mpc_init2 (m1, prec);
12481 mpfr_from_real (mpc_realref (m0), re0, rnd);
12482 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12483 mpfr_from_real (mpc_realref (m1), re1, rnd);
12484 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12485 mpfr_clear_flags ();
12486 inexact = func (m0, m0, m1, crnd);
12487 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12488 mpc_clear (m0);
12489 mpc_clear (m1);
12490 }
12491 }
12492
12493 return result;
12494 }
12495
12496 /* A wrapper function for builtin folding that prevents warnings for
12497 "statement without effect" and the like, caused by removing the
12498 call node earlier than the warning is generated. */
12499
12500 tree
12501 fold_call_stmt (gimple stmt, bool ignore)
12502 {
12503 tree ret = NULL_TREE;
12504 tree fndecl = gimple_call_fndecl (stmt);
12505 location_t loc = gimple_location (stmt);
12506 if (fndecl
12507 && TREE_CODE (fndecl) == FUNCTION_DECL
12508 && DECL_BUILT_IN (fndecl)
12509 && !gimple_call_va_arg_pack_p (stmt))
12510 {
12511 int nargs = gimple_call_num_args (stmt);
12512 tree *args = (nargs > 0
12513 ? gimple_call_arg_ptr (stmt, 0)
12514 : &error_mark_node);
12515
12516 if (avoid_folding_inline_builtin (fndecl))
12517 return NULL_TREE;
12518 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12519 {
12520 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12521 }
12522 else
12523 {
12524 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
12525 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
12526 if (ret)
12527 {
12528 /* Propagate location information from original call to
12529 expansion of builtin. Otherwise things like
12530 maybe_emit_chk_warning, that operate on the expansion
12531 of a builtin, will use the wrong location information. */
12532 if (gimple_has_location (stmt))
12533 {
12534 tree realret = ret;
12535 if (TREE_CODE (ret) == NOP_EXPR)
12536 realret = TREE_OPERAND (ret, 0);
12537 if (CAN_HAVE_LOCATION_P (realret)
12538 && !EXPR_HAS_LOCATION (realret))
12539 SET_EXPR_LOCATION (realret, loc);
12540 return realret;
12541 }
12542 return ret;
12543 }
12544 }
12545 }
12546 return NULL_TREE;
12547 }
12548
12549 /* Look up the function in builtin_decl that corresponds to DECL
12550 and set ASMSPEC as its user assembler name. DECL must be a
12551 function decl that declares a builtin. */
12552
12553 void
12554 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12555 {
12556 tree builtin;
12557 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12558 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12559 && asmspec != 0);
12560
12561 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12562 set_user_assembler_name (builtin, asmspec);
12563 switch (DECL_FUNCTION_CODE (decl))
12564 {
12565 case BUILT_IN_MEMCPY:
12566 init_block_move_fn (asmspec);
12567 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12568 break;
12569 case BUILT_IN_MEMSET:
12570 init_block_clear_fn (asmspec);
12571 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12572 break;
12573 case BUILT_IN_MEMMOVE:
12574 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12575 break;
12576 case BUILT_IN_MEMCMP:
12577 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12578 break;
12579 case BUILT_IN_ABORT:
12580 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12581 break;
12582 case BUILT_IN_FFS:
12583 if (INT_TYPE_SIZE < BITS_PER_WORD)
12584 {
12585 set_user_assembler_libfunc ("ffs", asmspec);
12586 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12587 MODE_INT, 0), "ffs");
12588 }
12589 break;
12590 default:
12591 break;
12592 }
12593 }
12594
12595 /* Return true if DECL is a builtin that expands to a constant or similarly
12596 simple code. */
12597 bool
12598 is_simple_builtin (tree decl)
12599 {
12600 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12601 switch (DECL_FUNCTION_CODE (decl))
12602 {
12603 /* Builtins that expand to constants. */
12604 case BUILT_IN_CONSTANT_P:
12605 case BUILT_IN_EXPECT:
12606 case BUILT_IN_OBJECT_SIZE:
12607 case BUILT_IN_UNREACHABLE:
12608 /* Simple register moves or loads from stack. */
12609 case BUILT_IN_ASSUME_ALIGNED:
12610 case BUILT_IN_RETURN_ADDRESS:
12611 case BUILT_IN_EXTRACT_RETURN_ADDR:
12612 case BUILT_IN_FROB_RETURN_ADDR:
12613 case BUILT_IN_RETURN:
12614 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12615 case BUILT_IN_FRAME_ADDRESS:
12616 case BUILT_IN_VA_END:
12617 case BUILT_IN_STACK_SAVE:
12618 case BUILT_IN_STACK_RESTORE:
12619 /* Exception state returns or moves registers around. */
12620 case BUILT_IN_EH_FILTER:
12621 case BUILT_IN_EH_POINTER:
12622 case BUILT_IN_EH_COPY_VALUES:
12623 return true;
12624
12625 default:
12626 return false;
12627 }
12628
12629 return false;
12630 }
12631
12632 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12633 most probably expanded inline into reasonably simple code. This is a
12634 superset of is_simple_builtin. */
12635 bool
12636 is_inexpensive_builtin (tree decl)
12637 {
12638 if (!decl)
12639 return false;
12640 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12641 return true;
12642 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12643 switch (DECL_FUNCTION_CODE (decl))
12644 {
12645 case BUILT_IN_ABS:
12646 case BUILT_IN_ALLOCA:
12647 case BUILT_IN_ALLOCA_WITH_ALIGN:
12648 case BUILT_IN_BSWAP16:
12649 case BUILT_IN_BSWAP32:
12650 case BUILT_IN_BSWAP64:
12651 case BUILT_IN_CLZ:
12652 case BUILT_IN_CLZIMAX:
12653 case BUILT_IN_CLZL:
12654 case BUILT_IN_CLZLL:
12655 case BUILT_IN_CTZ:
12656 case BUILT_IN_CTZIMAX:
12657 case BUILT_IN_CTZL:
12658 case BUILT_IN_CTZLL:
12659 case BUILT_IN_FFS:
12660 case BUILT_IN_FFSIMAX:
12661 case BUILT_IN_FFSL:
12662 case BUILT_IN_FFSLL:
12663 case BUILT_IN_IMAXABS:
12664 case BUILT_IN_FINITE:
12665 case BUILT_IN_FINITEF:
12666 case BUILT_IN_FINITEL:
12667 case BUILT_IN_FINITED32:
12668 case BUILT_IN_FINITED64:
12669 case BUILT_IN_FINITED128:
12670 case BUILT_IN_FPCLASSIFY:
12671 case BUILT_IN_ISFINITE:
12672 case BUILT_IN_ISINF_SIGN:
12673 case BUILT_IN_ISINF:
12674 case BUILT_IN_ISINFF:
12675 case BUILT_IN_ISINFL:
12676 case BUILT_IN_ISINFD32:
12677 case BUILT_IN_ISINFD64:
12678 case BUILT_IN_ISINFD128:
12679 case BUILT_IN_ISNAN:
12680 case BUILT_IN_ISNANF:
12681 case BUILT_IN_ISNANL:
12682 case BUILT_IN_ISNAND32:
12683 case BUILT_IN_ISNAND64:
12684 case BUILT_IN_ISNAND128:
12685 case BUILT_IN_ISNORMAL:
12686 case BUILT_IN_ISGREATER:
12687 case BUILT_IN_ISGREATEREQUAL:
12688 case BUILT_IN_ISLESS:
12689 case BUILT_IN_ISLESSEQUAL:
12690 case BUILT_IN_ISLESSGREATER:
12691 case BUILT_IN_ISUNORDERED:
12692 case BUILT_IN_VA_ARG_PACK:
12693 case BUILT_IN_VA_ARG_PACK_LEN:
12694 case BUILT_IN_VA_COPY:
12695 case BUILT_IN_TRAP:
12696 case BUILT_IN_SAVEREGS:
12697 case BUILT_IN_POPCOUNTL:
12698 case BUILT_IN_POPCOUNTLL:
12699 case BUILT_IN_POPCOUNTIMAX:
12700 case BUILT_IN_POPCOUNT:
12701 case BUILT_IN_PARITYL:
12702 case BUILT_IN_PARITYLL:
12703 case BUILT_IN_PARITYIMAX:
12704 case BUILT_IN_PARITY:
12705 case BUILT_IN_LABS:
12706 case BUILT_IN_LLABS:
12707 case BUILT_IN_PREFETCH:
12708 return true;
12709
12710 default:
12711 return is_simple_builtin (decl);
12712 }
12713
12714 return false;
12715 }