ggcplug.c: Shuffle includes to include gcc-plugin.h earlier.
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "calls.h"
30 #include "varasm.h"
31 #include "tree-object-size.h"
32 #include "realmpfr.h"
33 #include "predict.h"
34 #include "vec.h"
35 #include "hashtab.h"
36 #include "hash-set.h"
37 #include "hard-reg-set.h"
38 #include "input.h"
39 #include "function.h"
40 #include "cfgrtl.h"
41 #include "basic-block.h"
42 #include "tree-ssa-alias.h"
43 #include "internal-fn.h"
44 #include "gimple-expr.h"
45 #include "is-a.h"
46 #include "gimple.h"
47 #include "flags.h"
48 #include "regs.h"
49 #include "except.h"
50 #include "insn-config.h"
51 #include "expr.h"
52 #include "optabs.h"
53 #include "libfuncs.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "typeclass.h"
57 #include "tm_p.h"
58 #include "target.h"
59 #include "langhooks.h"
60 #include "tree-ssanames.h"
61 #include "tree-dfa.h"
62 #include "value-prof.h"
63 #include "diagnostic-core.h"
64 #include "builtins.h"
65 #include "ubsan.h"
66 #include "cilk.h"
67
68
69 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
70
71 struct target_builtins default_target_builtins;
72 #if SWITCHABLE_TARGET
73 struct target_builtins *this_target_builtins = &default_target_builtins;
74 #endif
75
76 /* Define the names of the builtin function types and codes. */
77 const char *const built_in_class_names[BUILT_IN_LAST]
78 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
79
80 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
81 const char * built_in_names[(int) END_BUILTINS] =
82 {
83 #include "builtins.def"
84 };
85 #undef DEF_BUILTIN
86
87 /* Setup an array of _DECL trees, make sure each element is
88 initialized to NULL_TREE. */
89 builtin_info_type builtin_info;
90
91 /* Non-zero if __builtin_constant_p should be folded right away. */
92 bool force_folding_builtin_constant_p;
93
94 static rtx c_readstr (const char *, enum machine_mode);
95 static int target_char_cast (tree, char *);
96 static rtx get_memory_rtx (tree, tree);
97 static int apply_args_size (void);
98 static int apply_result_size (void);
99 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
100 static rtx result_vector (int, rtx);
101 #endif
102 static void expand_builtin_update_setjmp_buf (rtx);
103 static void expand_builtin_prefetch (tree);
104 static rtx expand_builtin_apply_args (void);
105 static rtx expand_builtin_apply_args_1 (void);
106 static rtx expand_builtin_apply (rtx, rtx, rtx);
107 static void expand_builtin_return (rtx);
108 static enum type_class type_to_class (tree);
109 static rtx expand_builtin_classify_type (tree);
110 static void expand_errno_check (tree, rtx);
111 static rtx expand_builtin_mathfn (tree, rtx, rtx);
112 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
113 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
114 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
115 static rtx expand_builtin_interclass_mathfn (tree, rtx);
116 static rtx expand_builtin_sincos (tree);
117 static rtx expand_builtin_cexpi (tree, rtx);
118 static rtx expand_builtin_int_roundingfn (tree, rtx);
119 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
120 static rtx expand_builtin_next_arg (void);
121 static rtx expand_builtin_va_start (tree);
122 static rtx expand_builtin_va_end (tree);
123 static rtx expand_builtin_va_copy (tree);
124 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_strcmp (tree, rtx);
126 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
127 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
128 static rtx expand_builtin_memcpy (tree, rtx);
129 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
131 enum machine_mode, int);
132 static rtx expand_builtin_strcpy (tree, rtx);
133 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
134 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strncpy (tree, rtx);
136 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
137 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
139 static rtx expand_builtin_bzero (tree);
140 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
141 static rtx expand_builtin_alloca (tree, bool);
142 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
143 static rtx expand_builtin_frame_address (tree, tree);
144 static tree stabilize_va_list_loc (location_t, tree, int);
145 static rtx expand_builtin_expect (tree, rtx);
146 static tree fold_builtin_constant_p (tree);
147 static tree fold_builtin_classify_type (tree);
148 static tree fold_builtin_strlen (location_t, tree, tree);
149 static tree fold_builtin_inf (location_t, tree, int);
150 static tree fold_builtin_nan (tree, tree, int);
151 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
152 static bool validate_arg (const_tree, enum tree_code code);
153 static bool integer_valued_real_p (tree);
154 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
155 static rtx expand_builtin_fabs (tree, rtx, rtx);
156 static rtx expand_builtin_signbit (tree, rtx);
157 static tree fold_builtin_sqrt (location_t, tree, tree);
158 static tree fold_builtin_cbrt (location_t, tree, tree);
159 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
160 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
161 static tree fold_builtin_cos (location_t, tree, tree, tree);
162 static tree fold_builtin_cosh (location_t, tree, tree, tree);
163 static tree fold_builtin_tan (tree, tree);
164 static tree fold_builtin_trunc (location_t, tree, tree);
165 static tree fold_builtin_floor (location_t, tree, tree);
166 static tree fold_builtin_ceil (location_t, tree, tree);
167 static tree fold_builtin_round (location_t, tree, tree);
168 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
169 static tree fold_builtin_bitop (tree, tree);
170 static tree fold_builtin_strchr (location_t, tree, tree, tree);
171 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
172 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
173 static tree fold_builtin_strcmp (location_t, tree, tree);
174 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
175 static tree fold_builtin_signbit (location_t, tree, tree);
176 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
177 static tree fold_builtin_isascii (location_t, tree);
178 static tree fold_builtin_toascii (location_t, tree);
179 static tree fold_builtin_isdigit (location_t, tree);
180 static tree fold_builtin_fabs (location_t, tree, tree);
181 static tree fold_builtin_abs (location_t, tree, tree);
182 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
183 enum tree_code);
184 static tree fold_builtin_0 (location_t, tree, bool);
185 static tree fold_builtin_1 (location_t, tree, tree, bool);
186 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
187 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
188 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
189 static tree fold_builtin_varargs (location_t, tree, tree, bool);
190
191 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
192 static tree fold_builtin_strstr (location_t, tree, tree, tree);
193 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
194 static tree fold_builtin_strncat (location_t, tree, tree, tree);
195 static tree fold_builtin_strspn (location_t, tree, tree);
196 static tree fold_builtin_strcspn (location_t, tree, tree);
197
198 static rtx expand_builtin_object_size (tree);
199 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
200 enum built_in_function);
201 static void maybe_emit_chk_warning (tree, enum built_in_function);
202 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
203 static void maybe_emit_free_warning (tree);
204 static tree fold_builtin_object_size (tree, tree);
205 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
206 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
207 enum built_in_function);
208
209 static unsigned HOST_WIDE_INT target_newline;
210 unsigned HOST_WIDE_INT target_percent;
211 static unsigned HOST_WIDE_INT target_c;
212 static unsigned HOST_WIDE_INT target_s;
213 static char target_percent_c[3];
214 char target_percent_s[3];
215 static char target_percent_s_newline[4];
216 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
217 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
218 static tree do_mpfr_arg2 (tree, tree, tree,
219 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
220 static tree do_mpfr_arg3 (tree, tree, tree, tree,
221 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
222 static tree do_mpfr_sincos (tree, tree, tree);
223 static tree do_mpfr_bessel_n (tree, tree, tree,
224 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
225 const REAL_VALUE_TYPE *, bool);
226 static tree do_mpfr_remquo (tree, tree, tree);
227 static tree do_mpfr_lgamma_r (tree, tree, tree);
228 static void expand_builtin_sync_synchronize (void);
229
230 /* Return true if NAME starts with __builtin_ or __sync_. */
231
232 static bool
233 is_builtin_name (const char *name)
234 {
235 if (strncmp (name, "__builtin_", 10) == 0)
236 return true;
237 if (strncmp (name, "__sync_", 7) == 0)
238 return true;
239 if (strncmp (name, "__atomic_", 9) == 0)
240 return true;
241 if (flag_cilkplus
242 && (!strcmp (name, "__cilkrts_detach")
243 || !strcmp (name, "__cilkrts_pop_frame")))
244 return true;
245 return false;
246 }
247
248
249 /* Return true if DECL is a function symbol representing a built-in. */
250
251 bool
252 is_builtin_fn (tree decl)
253 {
254 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
255 }
256
257 /* Return true if NODE should be considered for inline expansion regardless
258 of the optimization level. This means whenever a function is invoked with
259 its "internal" name, which normally contains the prefix "__builtin". */
260
261 static bool
262 called_as_built_in (tree node)
263 {
264 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
265 we want the name used to call the function, not the name it
266 will have. */
267 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
268 return is_builtin_name (name);
269 }
270
271 /* Compute values M and N such that M divides (address of EXP - N) and such
272 that N < M. If these numbers can be determined, store M in alignp and N in
273 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
274 *alignp and any bit-offset to *bitposp.
275
276 Note that the address (and thus the alignment) computed here is based
277 on the address to which a symbol resolves, whereas DECL_ALIGN is based
278 on the address at which an object is actually located. These two
279 addresses are not always the same. For example, on ARM targets,
280 the address &foo of a Thumb function foo() has the lowest bit set,
281 whereas foo() itself starts on an even address.
282
283 If ADDR_P is true we are taking the address of the memory reference EXP
284 and thus cannot rely on the access taking place. */
285
286 static bool
287 get_object_alignment_2 (tree exp, unsigned int *alignp,
288 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
289 {
290 HOST_WIDE_INT bitsize, bitpos;
291 tree offset;
292 enum machine_mode mode;
293 int unsignedp, volatilep;
294 unsigned int align = BITS_PER_UNIT;
295 bool known_alignment = false;
296
297 /* Get the innermost object and the constant (bitpos) and possibly
298 variable (offset) offset of the access. */
299 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
300 &mode, &unsignedp, &volatilep, true);
301
302 /* Extract alignment information from the innermost object and
303 possibly adjust bitpos and offset. */
304 if (TREE_CODE (exp) == FUNCTION_DECL)
305 {
306 /* Function addresses can encode extra information besides their
307 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
308 allows the low bit to be used as a virtual bit, we know
309 that the address itself must be at least 2-byte aligned. */
310 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
311 align = 2 * BITS_PER_UNIT;
312 }
313 else if (TREE_CODE (exp) == LABEL_DECL)
314 ;
315 else if (TREE_CODE (exp) == CONST_DECL)
316 {
317 /* The alignment of a CONST_DECL is determined by its initializer. */
318 exp = DECL_INITIAL (exp);
319 align = TYPE_ALIGN (TREE_TYPE (exp));
320 #ifdef CONSTANT_ALIGNMENT
321 if (CONSTANT_CLASS_P (exp))
322 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
323 #endif
324 known_alignment = true;
325 }
326 else if (DECL_P (exp))
327 {
328 align = DECL_ALIGN (exp);
329 known_alignment = true;
330 }
331 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
332 {
333 align = TYPE_ALIGN (TREE_TYPE (exp));
334 }
335 else if (TREE_CODE (exp) == INDIRECT_REF
336 || TREE_CODE (exp) == MEM_REF
337 || TREE_CODE (exp) == TARGET_MEM_REF)
338 {
339 tree addr = TREE_OPERAND (exp, 0);
340 unsigned ptr_align;
341 unsigned HOST_WIDE_INT ptr_bitpos;
342
343 if (TREE_CODE (addr) == BIT_AND_EXPR
344 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
345 {
346 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
347 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
348 align *= BITS_PER_UNIT;
349 addr = TREE_OPERAND (addr, 0);
350 }
351
352 known_alignment
353 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
354 align = MAX (ptr_align, align);
355
356 /* The alignment of the pointer operand in a TARGET_MEM_REF
357 has to take the variable offset parts into account. */
358 if (TREE_CODE (exp) == TARGET_MEM_REF)
359 {
360 if (TMR_INDEX (exp))
361 {
362 unsigned HOST_WIDE_INT step = 1;
363 if (TMR_STEP (exp))
364 step = TREE_INT_CST_LOW (TMR_STEP (exp));
365 align = MIN (align, (step & -step) * BITS_PER_UNIT);
366 }
367 if (TMR_INDEX2 (exp))
368 align = BITS_PER_UNIT;
369 known_alignment = false;
370 }
371
372 /* When EXP is an actual memory reference then we can use
373 TYPE_ALIGN of a pointer indirection to derive alignment.
374 Do so only if get_pointer_alignment_1 did not reveal absolute
375 alignment knowledge and if using that alignment would
376 improve the situation. */
377 if (!addr_p && !known_alignment
378 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
379 align = TYPE_ALIGN (TREE_TYPE (exp));
380 else
381 {
382 /* Else adjust bitpos accordingly. */
383 bitpos += ptr_bitpos;
384 if (TREE_CODE (exp) == MEM_REF
385 || TREE_CODE (exp) == TARGET_MEM_REF)
386 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
387 }
388 }
389 else if (TREE_CODE (exp) == STRING_CST)
390 {
391 /* STRING_CST are the only constant objects we allow to be not
392 wrapped inside a CONST_DECL. */
393 align = TYPE_ALIGN (TREE_TYPE (exp));
394 #ifdef CONSTANT_ALIGNMENT
395 if (CONSTANT_CLASS_P (exp))
396 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
397 #endif
398 known_alignment = true;
399 }
400
401 /* If there is a non-constant offset part extract the maximum
402 alignment that can prevail. */
403 if (offset)
404 {
405 unsigned int trailing_zeros = tree_ctz (offset);
406 if (trailing_zeros < HOST_BITS_PER_INT)
407 {
408 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
409 if (inner)
410 align = MIN (align, inner);
411 }
412 }
413
414 *alignp = align;
415 *bitposp = bitpos & (*alignp - 1);
416 return known_alignment;
417 }
418
419 /* For a memory reference expression EXP compute values M and N such that M
420 divides (&EXP - N) and such that N < M. If these numbers can be determined,
421 store M in alignp and N in *BITPOSP and return true. Otherwise return false
422 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
423
424 bool
425 get_object_alignment_1 (tree exp, unsigned int *alignp,
426 unsigned HOST_WIDE_INT *bitposp)
427 {
428 return get_object_alignment_2 (exp, alignp, bitposp, false);
429 }
430
431 /* Return the alignment in bits of EXP, an object. */
432
433 unsigned int
434 get_object_alignment (tree exp)
435 {
436 unsigned HOST_WIDE_INT bitpos = 0;
437 unsigned int align;
438
439 get_object_alignment_1 (exp, &align, &bitpos);
440
441 /* align and bitpos now specify known low bits of the pointer.
442 ptr & (align - 1) == bitpos. */
443
444 if (bitpos != 0)
445 align = (bitpos & -bitpos);
446 return align;
447 }
448
449 /* For a pointer valued expression EXP compute values M and N such that M
450 divides (EXP - N) and such that N < M. If these numbers can be determined,
451 store M in alignp and N in *BITPOSP and return true. Return false if
452 the results are just a conservative approximation.
453
454 If EXP is not a pointer, false is returned too. */
455
456 bool
457 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
458 unsigned HOST_WIDE_INT *bitposp)
459 {
460 STRIP_NOPS (exp);
461
462 if (TREE_CODE (exp) == ADDR_EXPR)
463 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
464 alignp, bitposp, true);
465 else if (TREE_CODE (exp) == SSA_NAME
466 && POINTER_TYPE_P (TREE_TYPE (exp)))
467 {
468 unsigned int ptr_align, ptr_misalign;
469 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
470
471 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
472 {
473 *bitposp = ptr_misalign * BITS_PER_UNIT;
474 *alignp = ptr_align * BITS_PER_UNIT;
475 /* We cannot really tell whether this result is an approximation. */
476 return true;
477 }
478 else
479 {
480 *bitposp = 0;
481 *alignp = BITS_PER_UNIT;
482 return false;
483 }
484 }
485 else if (TREE_CODE (exp) == INTEGER_CST)
486 {
487 *alignp = BIGGEST_ALIGNMENT;
488 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
489 & (BIGGEST_ALIGNMENT - 1));
490 return true;
491 }
492
493 *bitposp = 0;
494 *alignp = BITS_PER_UNIT;
495 return false;
496 }
497
498 /* Return the alignment in bits of EXP, a pointer valued expression.
499 The alignment returned is, by default, the alignment of the thing that
500 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
501
502 Otherwise, look at the expression to see if we can do better, i.e., if the
503 expression is actually pointing at an object whose alignment is tighter. */
504
505 unsigned int
506 get_pointer_alignment (tree exp)
507 {
508 unsigned HOST_WIDE_INT bitpos = 0;
509 unsigned int align;
510
511 get_pointer_alignment_1 (exp, &align, &bitpos);
512
513 /* align and bitpos now specify known low bits of the pointer.
514 ptr & (align - 1) == bitpos. */
515
516 if (bitpos != 0)
517 align = (bitpos & -bitpos);
518
519 return align;
520 }
521
522 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
523 way, because it could contain a zero byte in the middle.
524 TREE_STRING_LENGTH is the size of the character array, not the string.
525
526 ONLY_VALUE should be nonzero if the result is not going to be emitted
527 into the instruction stream and zero if it is going to be expanded.
528 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
529 is returned, otherwise NULL, since
530 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
531 evaluate the side-effects.
532
533 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
534 accesses. Note that this implies the result is not going to be emitted
535 into the instruction stream.
536
537 The value returned is of type `ssizetype'.
538
539 Unfortunately, string_constant can't access the values of const char
540 arrays with initializers, so neither can we do so here. */
541
542 tree
543 c_strlen (tree src, int only_value)
544 {
545 tree offset_node;
546 HOST_WIDE_INT offset;
547 int max;
548 const char *ptr;
549 location_t loc;
550
551 STRIP_NOPS (src);
552 if (TREE_CODE (src) == COND_EXPR
553 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
554 {
555 tree len1, len2;
556
557 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
558 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
559 if (tree_int_cst_equal (len1, len2))
560 return len1;
561 }
562
563 if (TREE_CODE (src) == COMPOUND_EXPR
564 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
565 return c_strlen (TREE_OPERAND (src, 1), only_value);
566
567 loc = EXPR_LOC_OR_LOC (src, input_location);
568
569 src = string_constant (src, &offset_node);
570 if (src == 0)
571 return NULL_TREE;
572
573 max = TREE_STRING_LENGTH (src) - 1;
574 ptr = TREE_STRING_POINTER (src);
575
576 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
577 {
578 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
579 compute the offset to the following null if we don't know where to
580 start searching for it. */
581 int i;
582
583 for (i = 0; i < max; i++)
584 if (ptr[i] == 0)
585 return NULL_TREE;
586
587 /* We don't know the starting offset, but we do know that the string
588 has no internal zero bytes. We can assume that the offset falls
589 within the bounds of the string; otherwise, the programmer deserves
590 what he gets. Subtract the offset from the length of the string,
591 and return that. This would perhaps not be valid if we were dealing
592 with named arrays in addition to literal string constants. */
593
594 return size_diffop_loc (loc, size_int (max), offset_node);
595 }
596
597 /* We have a known offset into the string. Start searching there for
598 a null character if we can represent it as a single HOST_WIDE_INT. */
599 if (offset_node == 0)
600 offset = 0;
601 else if (! tree_fits_shwi_p (offset_node))
602 offset = -1;
603 else
604 offset = tree_to_shwi (offset_node);
605
606 /* If the offset is known to be out of bounds, warn, and call strlen at
607 runtime. */
608 if (offset < 0 || offset > max)
609 {
610 /* Suppress multiple warnings for propagated constant strings. */
611 if (only_value != 2
612 && !TREE_NO_WARNING (src))
613 {
614 warning_at (loc, 0, "offset outside bounds of constant string");
615 TREE_NO_WARNING (src) = 1;
616 }
617 return NULL_TREE;
618 }
619
620 /* Use strlen to search for the first zero byte. Since any strings
621 constructed with build_string will have nulls appended, we win even
622 if we get handed something like (char[4])"abcd".
623
624 Since OFFSET is our starting index into the string, no further
625 calculation is needed. */
626 return ssize_int (strlen (ptr + offset));
627 }
628
629 /* Return a char pointer for a C string if it is a string constant
630 or sum of string constant and integer constant. */
631
632 const char *
633 c_getstr (tree src)
634 {
635 tree offset_node;
636
637 src = string_constant (src, &offset_node);
638 if (src == 0)
639 return 0;
640
641 if (offset_node == 0)
642 return TREE_STRING_POINTER (src);
643 else if (!tree_fits_uhwi_p (offset_node)
644 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
645 return 0;
646
647 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
648 }
649
650 /* Return a constant integer corresponding to target reading
651 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
652
653 static rtx
654 c_readstr (const char *str, enum machine_mode mode)
655 {
656 HOST_WIDE_INT ch;
657 unsigned int i, j;
658 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
659
660 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
661 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
662 / HOST_BITS_PER_WIDE_INT;
663
664 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
665 for (i = 0; i < len; i++)
666 tmp[i] = 0;
667
668 ch = 1;
669 for (i = 0; i < GET_MODE_SIZE (mode); i++)
670 {
671 j = i;
672 if (WORDS_BIG_ENDIAN)
673 j = GET_MODE_SIZE (mode) - i - 1;
674 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
675 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
676 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
677 j *= BITS_PER_UNIT;
678
679 if (ch)
680 ch = (unsigned char) str[i];
681 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
682 }
683
684 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
685 return immed_wide_int_const (c, mode);
686 }
687
688 /* Cast a target constant CST to target CHAR and if that value fits into
689 host char type, return zero and put that value into variable pointed to by
690 P. */
691
692 static int
693 target_char_cast (tree cst, char *p)
694 {
695 unsigned HOST_WIDE_INT val, hostval;
696
697 if (TREE_CODE (cst) != INTEGER_CST
698 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
699 return 1;
700
701 /* Do not care if it fits or not right here. */
702 val = TREE_INT_CST_LOW (cst);
703
704 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
705 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
706
707 hostval = val;
708 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
709 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
710
711 if (val != hostval)
712 return 1;
713
714 *p = hostval;
715 return 0;
716 }
717
718 /* Similar to save_expr, but assumes that arbitrary code is not executed
719 in between the multiple evaluations. In particular, we assume that a
720 non-addressable local variable will not be modified. */
721
722 static tree
723 builtin_save_expr (tree exp)
724 {
725 if (TREE_CODE (exp) == SSA_NAME
726 || (TREE_ADDRESSABLE (exp) == 0
727 && (TREE_CODE (exp) == PARM_DECL
728 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
729 return exp;
730
731 return save_expr (exp);
732 }
733
734 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
735 times to get the address of either a higher stack frame, or a return
736 address located within it (depending on FNDECL_CODE). */
737
738 static rtx
739 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
740 {
741 int i;
742
743 #ifdef INITIAL_FRAME_ADDRESS_RTX
744 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
745 #else
746 rtx tem;
747
748 /* For a zero count with __builtin_return_address, we don't care what
749 frame address we return, because target-specific definitions will
750 override us. Therefore frame pointer elimination is OK, and using
751 the soft frame pointer is OK.
752
753 For a nonzero count, or a zero count with __builtin_frame_address,
754 we require a stable offset from the current frame pointer to the
755 previous one, so we must use the hard frame pointer, and
756 we must disable frame pointer elimination. */
757 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
758 tem = frame_pointer_rtx;
759 else
760 {
761 tem = hard_frame_pointer_rtx;
762
763 /* Tell reload not to eliminate the frame pointer. */
764 crtl->accesses_prior_frames = 1;
765 }
766 #endif
767
768 /* Some machines need special handling before we can access
769 arbitrary frames. For example, on the SPARC, we must first flush
770 all register windows to the stack. */
771 #ifdef SETUP_FRAME_ADDRESSES
772 if (count > 0)
773 SETUP_FRAME_ADDRESSES ();
774 #endif
775
776 /* On the SPARC, the return address is not in the frame, it is in a
777 register. There is no way to access it off of the current frame
778 pointer, but it can be accessed off the previous frame pointer by
779 reading the value from the register window save area. */
780 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
781 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
782 count--;
783 #endif
784
785 /* Scan back COUNT frames to the specified frame. */
786 for (i = 0; i < count; i++)
787 {
788 /* Assume the dynamic chain pointer is in the word that the
789 frame address points to, unless otherwise specified. */
790 #ifdef DYNAMIC_CHAIN_ADDRESS
791 tem = DYNAMIC_CHAIN_ADDRESS (tem);
792 #endif
793 tem = memory_address (Pmode, tem);
794 tem = gen_frame_mem (Pmode, tem);
795 tem = copy_to_reg (tem);
796 }
797
798 /* For __builtin_frame_address, return what we've got. But, on
799 the SPARC for example, we may have to add a bias. */
800 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
801 #ifdef FRAME_ADDR_RTX
802 return FRAME_ADDR_RTX (tem);
803 #else
804 return tem;
805 #endif
806
807 /* For __builtin_return_address, get the return address from that frame. */
808 #ifdef RETURN_ADDR_RTX
809 tem = RETURN_ADDR_RTX (count, tem);
810 #else
811 tem = memory_address (Pmode,
812 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
813 tem = gen_frame_mem (Pmode, tem);
814 #endif
815 return tem;
816 }
817
818 /* Alias set used for setjmp buffer. */
819 static alias_set_type setjmp_alias_set = -1;
820
821 /* Construct the leading half of a __builtin_setjmp call. Control will
822 return to RECEIVER_LABEL. This is also called directly by the SJLJ
823 exception handling code. */
824
825 void
826 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
827 {
828 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
829 rtx stack_save;
830 rtx mem;
831
832 if (setjmp_alias_set == -1)
833 setjmp_alias_set = new_alias_set ();
834
835 buf_addr = convert_memory_address (Pmode, buf_addr);
836
837 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
838
839 /* We store the frame pointer and the address of receiver_label in
840 the buffer and use the rest of it for the stack save area, which
841 is machine-dependent. */
842
843 mem = gen_rtx_MEM (Pmode, buf_addr);
844 set_mem_alias_set (mem, setjmp_alias_set);
845 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
846
847 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
848 GET_MODE_SIZE (Pmode))),
849 set_mem_alias_set (mem, setjmp_alias_set);
850
851 emit_move_insn (validize_mem (mem),
852 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
853
854 stack_save = gen_rtx_MEM (sa_mode,
855 plus_constant (Pmode, buf_addr,
856 2 * GET_MODE_SIZE (Pmode)));
857 set_mem_alias_set (stack_save, setjmp_alias_set);
858 emit_stack_save (SAVE_NONLOCAL, &stack_save);
859
860 /* If there is further processing to do, do it. */
861 #ifdef HAVE_builtin_setjmp_setup
862 if (HAVE_builtin_setjmp_setup)
863 emit_insn (gen_builtin_setjmp_setup (buf_addr));
864 #endif
865
866 /* We have a nonlocal label. */
867 cfun->has_nonlocal_label = 1;
868 }
869
870 /* Construct the trailing part of a __builtin_setjmp call. This is
871 also called directly by the SJLJ exception handling code.
872 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
873
874 void
875 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
876 {
877 rtx chain;
878
879 /* Mark the FP as used when we get here, so we have to make sure it's
880 marked as used by this function. */
881 emit_use (hard_frame_pointer_rtx);
882
883 /* Mark the static chain as clobbered here so life information
884 doesn't get messed up for it. */
885 chain = targetm.calls.static_chain (current_function_decl, true);
886 if (chain && REG_P (chain))
887 emit_clobber (chain);
888
889 /* Now put in the code to restore the frame pointer, and argument
890 pointer, if needed. */
891 #ifdef HAVE_nonlocal_goto
892 if (! HAVE_nonlocal_goto)
893 #endif
894 {
895 /* First adjust our frame pointer to its actual value. It was
896 previously set to the start of the virtual area corresponding to
897 the stacked variables when we branched here and now needs to be
898 adjusted to the actual hardware fp value.
899
900 Assignments to virtual registers are converted by
901 instantiate_virtual_regs into the corresponding assignment
902 to the underlying register (fp in this case) that makes
903 the original assignment true.
904 So the following insn will actually be decrementing fp by
905 STARTING_FRAME_OFFSET. */
906 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
907
908 /* Restoring the frame pointer also modifies the hard frame pointer.
909 Mark it used (so that the previous assignment remains live once
910 the frame pointer is eliminated) and clobbered (to represent the
911 implicit update from the assignment). */
912 emit_use (hard_frame_pointer_rtx);
913 emit_clobber (hard_frame_pointer_rtx);
914 }
915
916 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
917 if (fixed_regs[ARG_POINTER_REGNUM])
918 {
919 #ifdef ELIMINABLE_REGS
920 /* If the argument pointer can be eliminated in favor of the
921 frame pointer, we don't need to restore it. We assume here
922 that if such an elimination is present, it can always be used.
923 This is the case on all known machines; if we don't make this
924 assumption, we do unnecessary saving on many machines. */
925 size_t i;
926 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
927
928 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
929 if (elim_regs[i].from == ARG_POINTER_REGNUM
930 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
931 break;
932
933 if (i == ARRAY_SIZE (elim_regs))
934 #endif
935 {
936 /* Now restore our arg pointer from the address at which it
937 was saved in our stack frame. */
938 emit_move_insn (crtl->args.internal_arg_pointer,
939 copy_to_reg (get_arg_pointer_save_area ()));
940 }
941 }
942 #endif
943
944 #ifdef HAVE_builtin_setjmp_receiver
945 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
946 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
947 else
948 #endif
949 #ifdef HAVE_nonlocal_goto_receiver
950 if (HAVE_nonlocal_goto_receiver)
951 emit_insn (gen_nonlocal_goto_receiver ());
952 else
953 #endif
954 { /* Nothing */ }
955
956 /* We must not allow the code we just generated to be reordered by
957 scheduling. Specifically, the update of the frame pointer must
958 happen immediately, not later. */
959 emit_insn (gen_blockage ());
960 }
961
962 /* __builtin_longjmp is passed a pointer to an array of five words (not
963 all will be used on all machines). It operates similarly to the C
964 library function of the same name, but is more efficient. Much of
965 the code below is copied from the handling of non-local gotos. */
966
967 static void
968 expand_builtin_longjmp (rtx buf_addr, rtx value)
969 {
970 rtx fp, lab, stack;
971 rtx_insn *insn, *last;
972 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
973
974 /* DRAP is needed for stack realign if longjmp is expanded to current
975 function */
976 if (SUPPORTS_STACK_ALIGNMENT)
977 crtl->need_drap = true;
978
979 if (setjmp_alias_set == -1)
980 setjmp_alias_set = new_alias_set ();
981
982 buf_addr = convert_memory_address (Pmode, buf_addr);
983
984 buf_addr = force_reg (Pmode, buf_addr);
985
986 /* We require that the user must pass a second argument of 1, because
987 that is what builtin_setjmp will return. */
988 gcc_assert (value == const1_rtx);
989
990 last = get_last_insn ();
991 #ifdef HAVE_builtin_longjmp
992 if (HAVE_builtin_longjmp)
993 emit_insn (gen_builtin_longjmp (buf_addr));
994 else
995 #endif
996 {
997 fp = gen_rtx_MEM (Pmode, buf_addr);
998 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
999 GET_MODE_SIZE (Pmode)));
1000
1001 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1002 2 * GET_MODE_SIZE (Pmode)));
1003 set_mem_alias_set (fp, setjmp_alias_set);
1004 set_mem_alias_set (lab, setjmp_alias_set);
1005 set_mem_alias_set (stack, setjmp_alias_set);
1006
1007 /* Pick up FP, label, and SP from the block and jump. This code is
1008 from expand_goto in stmt.c; see there for detailed comments. */
1009 #ifdef HAVE_nonlocal_goto
1010 if (HAVE_nonlocal_goto)
1011 /* We have to pass a value to the nonlocal_goto pattern that will
1012 get copied into the static_chain pointer, but it does not matter
1013 what that value is, because builtin_setjmp does not use it. */
1014 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1015 else
1016 #endif
1017 {
1018 lab = copy_to_reg (lab);
1019
1020 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1021 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1022
1023 emit_move_insn (hard_frame_pointer_rtx, fp);
1024 emit_stack_restore (SAVE_NONLOCAL, stack);
1025
1026 emit_use (hard_frame_pointer_rtx);
1027 emit_use (stack_pointer_rtx);
1028 emit_indirect_jump (lab);
1029 }
1030 }
1031
1032 /* Search backwards and mark the jump insn as a non-local goto.
1033 Note that this precludes the use of __builtin_longjmp to a
1034 __builtin_setjmp target in the same function. However, we've
1035 already cautioned the user that these functions are for
1036 internal exception handling use only. */
1037 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1038 {
1039 gcc_assert (insn != last);
1040
1041 if (JUMP_P (insn))
1042 {
1043 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1044 break;
1045 }
1046 else if (CALL_P (insn))
1047 break;
1048 }
1049 }
1050
1051 static inline bool
1052 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1053 {
1054 return (iter->i < iter->n);
1055 }
1056
1057 /* This function validates the types of a function call argument list
1058 against a specified list of tree_codes. If the last specifier is a 0,
1059 that represents an ellipses, otherwise the last specifier must be a
1060 VOID_TYPE. */
1061
1062 static bool
1063 validate_arglist (const_tree callexpr, ...)
1064 {
1065 enum tree_code code;
1066 bool res = 0;
1067 va_list ap;
1068 const_call_expr_arg_iterator iter;
1069 const_tree arg;
1070
1071 va_start (ap, callexpr);
1072 init_const_call_expr_arg_iterator (callexpr, &iter);
1073
1074 do
1075 {
1076 code = (enum tree_code) va_arg (ap, int);
1077 switch (code)
1078 {
1079 case 0:
1080 /* This signifies an ellipses, any further arguments are all ok. */
1081 res = true;
1082 goto end;
1083 case VOID_TYPE:
1084 /* This signifies an endlink, if no arguments remain, return
1085 true, otherwise return false. */
1086 res = !more_const_call_expr_args_p (&iter);
1087 goto end;
1088 default:
1089 /* If no parameters remain or the parameter's code does not
1090 match the specified code, return false. Otherwise continue
1091 checking any remaining arguments. */
1092 arg = next_const_call_expr_arg (&iter);
1093 if (!validate_arg (arg, code))
1094 goto end;
1095 break;
1096 }
1097 }
1098 while (1);
1099
1100 /* We need gotos here since we can only have one VA_CLOSE in a
1101 function. */
1102 end: ;
1103 va_end (ap);
1104
1105 return res;
1106 }
1107
1108 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1109 and the address of the save area. */
1110
1111 static rtx
1112 expand_builtin_nonlocal_goto (tree exp)
1113 {
1114 tree t_label, t_save_area;
1115 rtx r_label, r_save_area, r_fp, r_sp;
1116 rtx_insn *insn;
1117
1118 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1119 return NULL_RTX;
1120
1121 t_label = CALL_EXPR_ARG (exp, 0);
1122 t_save_area = CALL_EXPR_ARG (exp, 1);
1123
1124 r_label = expand_normal (t_label);
1125 r_label = convert_memory_address (Pmode, r_label);
1126 r_save_area = expand_normal (t_save_area);
1127 r_save_area = convert_memory_address (Pmode, r_save_area);
1128 /* Copy the address of the save location to a register just in case it was
1129 based on the frame pointer. */
1130 r_save_area = copy_to_reg (r_save_area);
1131 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1132 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1133 plus_constant (Pmode, r_save_area,
1134 GET_MODE_SIZE (Pmode)));
1135
1136 crtl->has_nonlocal_goto = 1;
1137
1138 #ifdef HAVE_nonlocal_goto
1139 /* ??? We no longer need to pass the static chain value, afaik. */
1140 if (HAVE_nonlocal_goto)
1141 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1142 else
1143 #endif
1144 {
1145 r_label = copy_to_reg (r_label);
1146
1147 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1148 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1149
1150 /* Restore frame pointer for containing function. */
1151 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1152 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1153
1154 /* USE of hard_frame_pointer_rtx added for consistency;
1155 not clear if really needed. */
1156 emit_use (hard_frame_pointer_rtx);
1157 emit_use (stack_pointer_rtx);
1158
1159 /* If the architecture is using a GP register, we must
1160 conservatively assume that the target function makes use of it.
1161 The prologue of functions with nonlocal gotos must therefore
1162 initialize the GP register to the appropriate value, and we
1163 must then make sure that this value is live at the point
1164 of the jump. (Note that this doesn't necessarily apply
1165 to targets with a nonlocal_goto pattern; they are free
1166 to implement it in their own way. Note also that this is
1167 a no-op if the GP register is a global invariant.) */
1168 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1169 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1170 emit_use (pic_offset_table_rtx);
1171
1172 emit_indirect_jump (r_label);
1173 }
1174
1175 /* Search backwards to the jump insn and mark it as a
1176 non-local goto. */
1177 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1178 {
1179 if (JUMP_P (insn))
1180 {
1181 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1182 break;
1183 }
1184 else if (CALL_P (insn))
1185 break;
1186 }
1187
1188 return const0_rtx;
1189 }
1190
1191 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1192 (not all will be used on all machines) that was passed to __builtin_setjmp.
1193 It updates the stack pointer in that block to correspond to the current
1194 stack pointer. */
1195
1196 static void
1197 expand_builtin_update_setjmp_buf (rtx buf_addr)
1198 {
1199 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1200 rtx stack_save
1201 = gen_rtx_MEM (sa_mode,
1202 memory_address
1203 (sa_mode,
1204 plus_constant (Pmode, buf_addr,
1205 2 * GET_MODE_SIZE (Pmode))));
1206
1207 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1208 }
1209
1210 /* Expand a call to __builtin_prefetch. For a target that does not support
1211 data prefetch, evaluate the memory address argument in case it has side
1212 effects. */
1213
1214 static void
1215 expand_builtin_prefetch (tree exp)
1216 {
1217 tree arg0, arg1, arg2;
1218 int nargs;
1219 rtx op0, op1, op2;
1220
1221 if (!validate_arglist (exp, POINTER_TYPE, 0))
1222 return;
1223
1224 arg0 = CALL_EXPR_ARG (exp, 0);
1225
1226 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1227 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1228 locality). */
1229 nargs = call_expr_nargs (exp);
1230 if (nargs > 1)
1231 arg1 = CALL_EXPR_ARG (exp, 1);
1232 else
1233 arg1 = integer_zero_node;
1234 if (nargs > 2)
1235 arg2 = CALL_EXPR_ARG (exp, 2);
1236 else
1237 arg2 = integer_three_node;
1238
1239 /* Argument 0 is an address. */
1240 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1241
1242 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1243 if (TREE_CODE (arg1) != INTEGER_CST)
1244 {
1245 error ("second argument to %<__builtin_prefetch%> must be a constant");
1246 arg1 = integer_zero_node;
1247 }
1248 op1 = expand_normal (arg1);
1249 /* Argument 1 must be either zero or one. */
1250 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1251 {
1252 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1253 " using zero");
1254 op1 = const0_rtx;
1255 }
1256
1257 /* Argument 2 (locality) must be a compile-time constant int. */
1258 if (TREE_CODE (arg2) != INTEGER_CST)
1259 {
1260 error ("third argument to %<__builtin_prefetch%> must be a constant");
1261 arg2 = integer_zero_node;
1262 }
1263 op2 = expand_normal (arg2);
1264 /* Argument 2 must be 0, 1, 2, or 3. */
1265 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1266 {
1267 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1268 op2 = const0_rtx;
1269 }
1270
1271 #ifdef HAVE_prefetch
1272 if (HAVE_prefetch)
1273 {
1274 struct expand_operand ops[3];
1275
1276 create_address_operand (&ops[0], op0);
1277 create_integer_operand (&ops[1], INTVAL (op1));
1278 create_integer_operand (&ops[2], INTVAL (op2));
1279 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1280 return;
1281 }
1282 #endif
1283
1284 /* Don't do anything with direct references to volatile memory, but
1285 generate code to handle other side effects. */
1286 if (!MEM_P (op0) && side_effects_p (op0))
1287 emit_insn (op0);
1288 }
1289
1290 /* Get a MEM rtx for expression EXP which is the address of an operand
1291 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1292 the maximum length of the block of memory that might be accessed or
1293 NULL if unknown. */
1294
1295 static rtx
1296 get_memory_rtx (tree exp, tree len)
1297 {
1298 tree orig_exp = exp;
1299 rtx addr, mem;
1300
1301 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1302 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1303 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1304 exp = TREE_OPERAND (exp, 0);
1305
1306 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1307 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1308
1309 /* Get an expression we can use to find the attributes to assign to MEM.
1310 First remove any nops. */
1311 while (CONVERT_EXPR_P (exp)
1312 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1313 exp = TREE_OPERAND (exp, 0);
1314
1315 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1316 (as builtin stringops may alias with anything). */
1317 exp = fold_build2 (MEM_REF,
1318 build_array_type (char_type_node,
1319 build_range_type (sizetype,
1320 size_one_node, len)),
1321 exp, build_int_cst (ptr_type_node, 0));
1322
1323 /* If the MEM_REF has no acceptable address, try to get the base object
1324 from the original address we got, and build an all-aliasing
1325 unknown-sized access to that one. */
1326 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1327 set_mem_attributes (mem, exp, 0);
1328 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1329 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1330 0))))
1331 {
1332 exp = build_fold_addr_expr (exp);
1333 exp = fold_build2 (MEM_REF,
1334 build_array_type (char_type_node,
1335 build_range_type (sizetype,
1336 size_zero_node,
1337 NULL)),
1338 exp, build_int_cst (ptr_type_node, 0));
1339 set_mem_attributes (mem, exp, 0);
1340 }
1341 set_mem_alias_set (mem, 0);
1342 return mem;
1343 }
1344 \f
1345 /* Built-in functions to perform an untyped call and return. */
1346
1347 #define apply_args_mode \
1348 (this_target_builtins->x_apply_args_mode)
1349 #define apply_result_mode \
1350 (this_target_builtins->x_apply_result_mode)
1351
1352 /* Return the size required for the block returned by __builtin_apply_args,
1353 and initialize apply_args_mode. */
1354
1355 static int
1356 apply_args_size (void)
1357 {
1358 static int size = -1;
1359 int align;
1360 unsigned int regno;
1361 enum machine_mode mode;
1362
1363 /* The values computed by this function never change. */
1364 if (size < 0)
1365 {
1366 /* The first value is the incoming arg-pointer. */
1367 size = GET_MODE_SIZE (Pmode);
1368
1369 /* The second value is the structure value address unless this is
1370 passed as an "invisible" first argument. */
1371 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1372 size += GET_MODE_SIZE (Pmode);
1373
1374 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1375 if (FUNCTION_ARG_REGNO_P (regno))
1376 {
1377 mode = targetm.calls.get_raw_arg_mode (regno);
1378
1379 gcc_assert (mode != VOIDmode);
1380
1381 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1382 if (size % align != 0)
1383 size = CEIL (size, align) * align;
1384 size += GET_MODE_SIZE (mode);
1385 apply_args_mode[regno] = mode;
1386 }
1387 else
1388 {
1389 apply_args_mode[regno] = VOIDmode;
1390 }
1391 }
1392 return size;
1393 }
1394
1395 /* Return the size required for the block returned by __builtin_apply,
1396 and initialize apply_result_mode. */
1397
1398 static int
1399 apply_result_size (void)
1400 {
1401 static int size = -1;
1402 int align, regno;
1403 enum machine_mode mode;
1404
1405 /* The values computed by this function never change. */
1406 if (size < 0)
1407 {
1408 size = 0;
1409
1410 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1411 if (targetm.calls.function_value_regno_p (regno))
1412 {
1413 mode = targetm.calls.get_raw_result_mode (regno);
1414
1415 gcc_assert (mode != VOIDmode);
1416
1417 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1418 if (size % align != 0)
1419 size = CEIL (size, align) * align;
1420 size += GET_MODE_SIZE (mode);
1421 apply_result_mode[regno] = mode;
1422 }
1423 else
1424 apply_result_mode[regno] = VOIDmode;
1425
1426 /* Allow targets that use untyped_call and untyped_return to override
1427 the size so that machine-specific information can be stored here. */
1428 #ifdef APPLY_RESULT_SIZE
1429 size = APPLY_RESULT_SIZE;
1430 #endif
1431 }
1432 return size;
1433 }
1434
1435 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1436 /* Create a vector describing the result block RESULT. If SAVEP is true,
1437 the result block is used to save the values; otherwise it is used to
1438 restore the values. */
1439
1440 static rtx
1441 result_vector (int savep, rtx result)
1442 {
1443 int regno, size, align, nelts;
1444 enum machine_mode mode;
1445 rtx reg, mem;
1446 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1447
1448 size = nelts = 0;
1449 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1450 if ((mode = apply_result_mode[regno]) != VOIDmode)
1451 {
1452 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1453 if (size % align != 0)
1454 size = CEIL (size, align) * align;
1455 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1456 mem = adjust_address (result, mode, size);
1457 savevec[nelts++] = (savep
1458 ? gen_rtx_SET (VOIDmode, mem, reg)
1459 : gen_rtx_SET (VOIDmode, reg, mem));
1460 size += GET_MODE_SIZE (mode);
1461 }
1462 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1463 }
1464 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1465
1466 /* Save the state required to perform an untyped call with the same
1467 arguments as were passed to the current function. */
1468
1469 static rtx
1470 expand_builtin_apply_args_1 (void)
1471 {
1472 rtx registers, tem;
1473 int size, align, regno;
1474 enum machine_mode mode;
1475 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1476
1477 /* Create a block where the arg-pointer, structure value address,
1478 and argument registers can be saved. */
1479 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1480
1481 /* Walk past the arg-pointer and structure value address. */
1482 size = GET_MODE_SIZE (Pmode);
1483 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1484 size += GET_MODE_SIZE (Pmode);
1485
1486 /* Save each register used in calling a function to the block. */
1487 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1488 if ((mode = apply_args_mode[regno]) != VOIDmode)
1489 {
1490 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1491 if (size % align != 0)
1492 size = CEIL (size, align) * align;
1493
1494 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1495
1496 emit_move_insn (adjust_address (registers, mode, size), tem);
1497 size += GET_MODE_SIZE (mode);
1498 }
1499
1500 /* Save the arg pointer to the block. */
1501 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1502 #ifdef STACK_GROWS_DOWNWARD
1503 /* We need the pointer as the caller actually passed them to us, not
1504 as we might have pretended they were passed. Make sure it's a valid
1505 operand, as emit_move_insn isn't expected to handle a PLUS. */
1506 tem
1507 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1508 NULL_RTX);
1509 #endif
1510 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1511
1512 size = GET_MODE_SIZE (Pmode);
1513
1514 /* Save the structure value address unless this is passed as an
1515 "invisible" first argument. */
1516 if (struct_incoming_value)
1517 {
1518 emit_move_insn (adjust_address (registers, Pmode, size),
1519 copy_to_reg (struct_incoming_value));
1520 size += GET_MODE_SIZE (Pmode);
1521 }
1522
1523 /* Return the address of the block. */
1524 return copy_addr_to_reg (XEXP (registers, 0));
1525 }
1526
1527 /* __builtin_apply_args returns block of memory allocated on
1528 the stack into which is stored the arg pointer, structure
1529 value address, static chain, and all the registers that might
1530 possibly be used in performing a function call. The code is
1531 moved to the start of the function so the incoming values are
1532 saved. */
1533
1534 static rtx
1535 expand_builtin_apply_args (void)
1536 {
1537 /* Don't do __builtin_apply_args more than once in a function.
1538 Save the result of the first call and reuse it. */
1539 if (apply_args_value != 0)
1540 return apply_args_value;
1541 {
1542 /* When this function is called, it means that registers must be
1543 saved on entry to this function. So we migrate the
1544 call to the first insn of this function. */
1545 rtx temp;
1546 rtx seq;
1547
1548 start_sequence ();
1549 temp = expand_builtin_apply_args_1 ();
1550 seq = get_insns ();
1551 end_sequence ();
1552
1553 apply_args_value = temp;
1554
1555 /* Put the insns after the NOTE that starts the function.
1556 If this is inside a start_sequence, make the outer-level insn
1557 chain current, so the code is placed at the start of the
1558 function. If internal_arg_pointer is a non-virtual pseudo,
1559 it needs to be placed after the function that initializes
1560 that pseudo. */
1561 push_topmost_sequence ();
1562 if (REG_P (crtl->args.internal_arg_pointer)
1563 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1564 emit_insn_before (seq, parm_birth_insn);
1565 else
1566 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1567 pop_topmost_sequence ();
1568 return temp;
1569 }
1570 }
1571
1572 /* Perform an untyped call and save the state required to perform an
1573 untyped return of whatever value was returned by the given function. */
1574
1575 static rtx
1576 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1577 {
1578 int size, align, regno;
1579 enum machine_mode mode;
1580 rtx incoming_args, result, reg, dest, src;
1581 rtx_call_insn *call_insn;
1582 rtx old_stack_level = 0;
1583 rtx call_fusage = 0;
1584 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1585
1586 arguments = convert_memory_address (Pmode, arguments);
1587
1588 /* Create a block where the return registers can be saved. */
1589 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1590
1591 /* Fetch the arg pointer from the ARGUMENTS block. */
1592 incoming_args = gen_reg_rtx (Pmode);
1593 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1594 #ifndef STACK_GROWS_DOWNWARD
1595 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1596 incoming_args, 0, OPTAB_LIB_WIDEN);
1597 #endif
1598
1599 /* Push a new argument block and copy the arguments. Do not allow
1600 the (potential) memcpy call below to interfere with our stack
1601 manipulations. */
1602 do_pending_stack_adjust ();
1603 NO_DEFER_POP;
1604
1605 /* Save the stack with nonlocal if available. */
1606 #ifdef HAVE_save_stack_nonlocal
1607 if (HAVE_save_stack_nonlocal)
1608 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1609 else
1610 #endif
1611 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1612
1613 /* Allocate a block of memory onto the stack and copy the memory
1614 arguments to the outgoing arguments address. We can pass TRUE
1615 as the 4th argument because we just saved the stack pointer
1616 and will restore it right after the call. */
1617 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1618
1619 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1620 may have already set current_function_calls_alloca to true.
1621 current_function_calls_alloca won't be set if argsize is zero,
1622 so we have to guarantee need_drap is true here. */
1623 if (SUPPORTS_STACK_ALIGNMENT)
1624 crtl->need_drap = true;
1625
1626 dest = virtual_outgoing_args_rtx;
1627 #ifndef STACK_GROWS_DOWNWARD
1628 if (CONST_INT_P (argsize))
1629 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1630 else
1631 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1632 #endif
1633 dest = gen_rtx_MEM (BLKmode, dest);
1634 set_mem_align (dest, PARM_BOUNDARY);
1635 src = gen_rtx_MEM (BLKmode, incoming_args);
1636 set_mem_align (src, PARM_BOUNDARY);
1637 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1638
1639 /* Refer to the argument block. */
1640 apply_args_size ();
1641 arguments = gen_rtx_MEM (BLKmode, arguments);
1642 set_mem_align (arguments, PARM_BOUNDARY);
1643
1644 /* Walk past the arg-pointer and structure value address. */
1645 size = GET_MODE_SIZE (Pmode);
1646 if (struct_value)
1647 size += GET_MODE_SIZE (Pmode);
1648
1649 /* Restore each of the registers previously saved. Make USE insns
1650 for each of these registers for use in making the call. */
1651 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1652 if ((mode = apply_args_mode[regno]) != VOIDmode)
1653 {
1654 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1655 if (size % align != 0)
1656 size = CEIL (size, align) * align;
1657 reg = gen_rtx_REG (mode, regno);
1658 emit_move_insn (reg, adjust_address (arguments, mode, size));
1659 use_reg (&call_fusage, reg);
1660 size += GET_MODE_SIZE (mode);
1661 }
1662
1663 /* Restore the structure value address unless this is passed as an
1664 "invisible" first argument. */
1665 size = GET_MODE_SIZE (Pmode);
1666 if (struct_value)
1667 {
1668 rtx value = gen_reg_rtx (Pmode);
1669 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1670 emit_move_insn (struct_value, value);
1671 if (REG_P (struct_value))
1672 use_reg (&call_fusage, struct_value);
1673 size += GET_MODE_SIZE (Pmode);
1674 }
1675
1676 /* All arguments and registers used for the call are set up by now! */
1677 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1678
1679 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1680 and we don't want to load it into a register as an optimization,
1681 because prepare_call_address already did it if it should be done. */
1682 if (GET_CODE (function) != SYMBOL_REF)
1683 function = memory_address (FUNCTION_MODE, function);
1684
1685 /* Generate the actual call instruction and save the return value. */
1686 #ifdef HAVE_untyped_call
1687 if (HAVE_untyped_call)
1688 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1689 result, result_vector (1, result)));
1690 else
1691 #endif
1692 #ifdef HAVE_call_value
1693 if (HAVE_call_value)
1694 {
1695 rtx valreg = 0;
1696
1697 /* Locate the unique return register. It is not possible to
1698 express a call that sets more than one return register using
1699 call_value; use untyped_call for that. In fact, untyped_call
1700 only needs to save the return registers in the given block. */
1701 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1702 if ((mode = apply_result_mode[regno]) != VOIDmode)
1703 {
1704 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1705
1706 valreg = gen_rtx_REG (mode, regno);
1707 }
1708
1709 emit_call_insn (GEN_CALL_VALUE (valreg,
1710 gen_rtx_MEM (FUNCTION_MODE, function),
1711 const0_rtx, NULL_RTX, const0_rtx));
1712
1713 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1714 }
1715 else
1716 #endif
1717 gcc_unreachable ();
1718
1719 /* Find the CALL insn we just emitted, and attach the register usage
1720 information. */
1721 call_insn = last_call_insn ();
1722 add_function_usage_to (call_insn, call_fusage);
1723
1724 /* Restore the stack. */
1725 #ifdef HAVE_save_stack_nonlocal
1726 if (HAVE_save_stack_nonlocal)
1727 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1728 else
1729 #endif
1730 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1731 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1732
1733 OK_DEFER_POP;
1734
1735 /* Return the address of the result block. */
1736 result = copy_addr_to_reg (XEXP (result, 0));
1737 return convert_memory_address (ptr_mode, result);
1738 }
1739
1740 /* Perform an untyped return. */
1741
1742 static void
1743 expand_builtin_return (rtx result)
1744 {
1745 int size, align, regno;
1746 enum machine_mode mode;
1747 rtx reg;
1748 rtx_insn *call_fusage = 0;
1749
1750 result = convert_memory_address (Pmode, result);
1751
1752 apply_result_size ();
1753 result = gen_rtx_MEM (BLKmode, result);
1754
1755 #ifdef HAVE_untyped_return
1756 if (HAVE_untyped_return)
1757 {
1758 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1759 emit_barrier ();
1760 return;
1761 }
1762 #endif
1763
1764 /* Restore the return value and note that each value is used. */
1765 size = 0;
1766 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1767 if ((mode = apply_result_mode[regno]) != VOIDmode)
1768 {
1769 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1770 if (size % align != 0)
1771 size = CEIL (size, align) * align;
1772 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1773 emit_move_insn (reg, adjust_address (result, mode, size));
1774
1775 push_to_sequence (call_fusage);
1776 emit_use (reg);
1777 call_fusage = get_insns ();
1778 end_sequence ();
1779 size += GET_MODE_SIZE (mode);
1780 }
1781
1782 /* Put the USE insns before the return. */
1783 emit_insn (call_fusage);
1784
1785 /* Return whatever values was restored by jumping directly to the end
1786 of the function. */
1787 expand_naked_return ();
1788 }
1789
1790 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1791
1792 static enum type_class
1793 type_to_class (tree type)
1794 {
1795 switch (TREE_CODE (type))
1796 {
1797 case VOID_TYPE: return void_type_class;
1798 case INTEGER_TYPE: return integer_type_class;
1799 case ENUMERAL_TYPE: return enumeral_type_class;
1800 case BOOLEAN_TYPE: return boolean_type_class;
1801 case POINTER_TYPE: return pointer_type_class;
1802 case REFERENCE_TYPE: return reference_type_class;
1803 case OFFSET_TYPE: return offset_type_class;
1804 case REAL_TYPE: return real_type_class;
1805 case COMPLEX_TYPE: return complex_type_class;
1806 case FUNCTION_TYPE: return function_type_class;
1807 case METHOD_TYPE: return method_type_class;
1808 case RECORD_TYPE: return record_type_class;
1809 case UNION_TYPE:
1810 case QUAL_UNION_TYPE: return union_type_class;
1811 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1812 ? string_type_class : array_type_class);
1813 case LANG_TYPE: return lang_type_class;
1814 default: return no_type_class;
1815 }
1816 }
1817
1818 /* Expand a call EXP to __builtin_classify_type. */
1819
1820 static rtx
1821 expand_builtin_classify_type (tree exp)
1822 {
1823 if (call_expr_nargs (exp))
1824 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1825 return GEN_INT (no_type_class);
1826 }
1827
1828 /* This helper macro, meant to be used in mathfn_built_in below,
1829 determines which among a set of three builtin math functions is
1830 appropriate for a given type mode. The `F' and `L' cases are
1831 automatically generated from the `double' case. */
1832 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1833 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1834 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1835 fcodel = BUILT_IN_MATHFN##L ; break;
1836 /* Similar to above, but appends _R after any F/L suffix. */
1837 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1838 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1839 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1840 fcodel = BUILT_IN_MATHFN##L_R ; break;
1841
1842 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1843 if available. If IMPLICIT is true use the implicit builtin declaration,
1844 otherwise use the explicit declaration. If we can't do the conversion,
1845 return zero. */
1846
1847 static tree
1848 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1849 {
1850 enum built_in_function fcode, fcodef, fcodel, fcode2;
1851
1852 switch (fn)
1853 {
1854 CASE_MATHFN (BUILT_IN_ACOS)
1855 CASE_MATHFN (BUILT_IN_ACOSH)
1856 CASE_MATHFN (BUILT_IN_ASIN)
1857 CASE_MATHFN (BUILT_IN_ASINH)
1858 CASE_MATHFN (BUILT_IN_ATAN)
1859 CASE_MATHFN (BUILT_IN_ATAN2)
1860 CASE_MATHFN (BUILT_IN_ATANH)
1861 CASE_MATHFN (BUILT_IN_CBRT)
1862 CASE_MATHFN (BUILT_IN_CEIL)
1863 CASE_MATHFN (BUILT_IN_CEXPI)
1864 CASE_MATHFN (BUILT_IN_COPYSIGN)
1865 CASE_MATHFN (BUILT_IN_COS)
1866 CASE_MATHFN (BUILT_IN_COSH)
1867 CASE_MATHFN (BUILT_IN_DREM)
1868 CASE_MATHFN (BUILT_IN_ERF)
1869 CASE_MATHFN (BUILT_IN_ERFC)
1870 CASE_MATHFN (BUILT_IN_EXP)
1871 CASE_MATHFN (BUILT_IN_EXP10)
1872 CASE_MATHFN (BUILT_IN_EXP2)
1873 CASE_MATHFN (BUILT_IN_EXPM1)
1874 CASE_MATHFN (BUILT_IN_FABS)
1875 CASE_MATHFN (BUILT_IN_FDIM)
1876 CASE_MATHFN (BUILT_IN_FLOOR)
1877 CASE_MATHFN (BUILT_IN_FMA)
1878 CASE_MATHFN (BUILT_IN_FMAX)
1879 CASE_MATHFN (BUILT_IN_FMIN)
1880 CASE_MATHFN (BUILT_IN_FMOD)
1881 CASE_MATHFN (BUILT_IN_FREXP)
1882 CASE_MATHFN (BUILT_IN_GAMMA)
1883 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1884 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1885 CASE_MATHFN (BUILT_IN_HYPOT)
1886 CASE_MATHFN (BUILT_IN_ILOGB)
1887 CASE_MATHFN (BUILT_IN_ICEIL)
1888 CASE_MATHFN (BUILT_IN_IFLOOR)
1889 CASE_MATHFN (BUILT_IN_INF)
1890 CASE_MATHFN (BUILT_IN_IRINT)
1891 CASE_MATHFN (BUILT_IN_IROUND)
1892 CASE_MATHFN (BUILT_IN_ISINF)
1893 CASE_MATHFN (BUILT_IN_J0)
1894 CASE_MATHFN (BUILT_IN_J1)
1895 CASE_MATHFN (BUILT_IN_JN)
1896 CASE_MATHFN (BUILT_IN_LCEIL)
1897 CASE_MATHFN (BUILT_IN_LDEXP)
1898 CASE_MATHFN (BUILT_IN_LFLOOR)
1899 CASE_MATHFN (BUILT_IN_LGAMMA)
1900 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1901 CASE_MATHFN (BUILT_IN_LLCEIL)
1902 CASE_MATHFN (BUILT_IN_LLFLOOR)
1903 CASE_MATHFN (BUILT_IN_LLRINT)
1904 CASE_MATHFN (BUILT_IN_LLROUND)
1905 CASE_MATHFN (BUILT_IN_LOG)
1906 CASE_MATHFN (BUILT_IN_LOG10)
1907 CASE_MATHFN (BUILT_IN_LOG1P)
1908 CASE_MATHFN (BUILT_IN_LOG2)
1909 CASE_MATHFN (BUILT_IN_LOGB)
1910 CASE_MATHFN (BUILT_IN_LRINT)
1911 CASE_MATHFN (BUILT_IN_LROUND)
1912 CASE_MATHFN (BUILT_IN_MODF)
1913 CASE_MATHFN (BUILT_IN_NAN)
1914 CASE_MATHFN (BUILT_IN_NANS)
1915 CASE_MATHFN (BUILT_IN_NEARBYINT)
1916 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1917 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1918 CASE_MATHFN (BUILT_IN_POW)
1919 CASE_MATHFN (BUILT_IN_POWI)
1920 CASE_MATHFN (BUILT_IN_POW10)
1921 CASE_MATHFN (BUILT_IN_REMAINDER)
1922 CASE_MATHFN (BUILT_IN_REMQUO)
1923 CASE_MATHFN (BUILT_IN_RINT)
1924 CASE_MATHFN (BUILT_IN_ROUND)
1925 CASE_MATHFN (BUILT_IN_SCALB)
1926 CASE_MATHFN (BUILT_IN_SCALBLN)
1927 CASE_MATHFN (BUILT_IN_SCALBN)
1928 CASE_MATHFN (BUILT_IN_SIGNBIT)
1929 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1930 CASE_MATHFN (BUILT_IN_SIN)
1931 CASE_MATHFN (BUILT_IN_SINCOS)
1932 CASE_MATHFN (BUILT_IN_SINH)
1933 CASE_MATHFN (BUILT_IN_SQRT)
1934 CASE_MATHFN (BUILT_IN_TAN)
1935 CASE_MATHFN (BUILT_IN_TANH)
1936 CASE_MATHFN (BUILT_IN_TGAMMA)
1937 CASE_MATHFN (BUILT_IN_TRUNC)
1938 CASE_MATHFN (BUILT_IN_Y0)
1939 CASE_MATHFN (BUILT_IN_Y1)
1940 CASE_MATHFN (BUILT_IN_YN)
1941
1942 default:
1943 return NULL_TREE;
1944 }
1945
1946 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1947 fcode2 = fcode;
1948 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1949 fcode2 = fcodef;
1950 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1951 fcode2 = fcodel;
1952 else
1953 return NULL_TREE;
1954
1955 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1956 return NULL_TREE;
1957
1958 return builtin_decl_explicit (fcode2);
1959 }
1960
1961 /* Like mathfn_built_in_1(), but always use the implicit array. */
1962
1963 tree
1964 mathfn_built_in (tree type, enum built_in_function fn)
1965 {
1966 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1967 }
1968
1969 /* If errno must be maintained, expand the RTL to check if the result,
1970 TARGET, of a built-in function call, EXP, is NaN, and if so set
1971 errno to EDOM. */
1972
1973 static void
1974 expand_errno_check (tree exp, rtx target)
1975 {
1976 rtx_code_label *lab = gen_label_rtx ();
1977
1978 /* Test the result; if it is NaN, set errno=EDOM because
1979 the argument was not in the domain. */
1980 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1981 NULL_RTX, NULL_RTX, lab,
1982 /* The jump is very likely. */
1983 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1984
1985 #ifdef TARGET_EDOM
1986 /* If this built-in doesn't throw an exception, set errno directly. */
1987 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1988 {
1989 #ifdef GEN_ERRNO_RTX
1990 rtx errno_rtx = GEN_ERRNO_RTX;
1991 #else
1992 rtx errno_rtx
1993 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1994 #endif
1995 emit_move_insn (errno_rtx,
1996 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1997 emit_label (lab);
1998 return;
1999 }
2000 #endif
2001
2002 /* Make sure the library call isn't expanded as a tail call. */
2003 CALL_EXPR_TAILCALL (exp) = 0;
2004
2005 /* We can't set errno=EDOM directly; let the library call do it.
2006 Pop the arguments right away in case the call gets deleted. */
2007 NO_DEFER_POP;
2008 expand_call (exp, target, 0);
2009 OK_DEFER_POP;
2010 emit_label (lab);
2011 }
2012
2013 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2014 Return NULL_RTX if a normal call should be emitted rather than expanding
2015 the function in-line. EXP is the expression that is a call to the builtin
2016 function; if convenient, the result should be placed in TARGET.
2017 SUBTARGET may be used as the target for computing one of EXP's operands. */
2018
2019 static rtx
2020 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2021 {
2022 optab builtin_optab;
2023 rtx op0;
2024 rtx_insn *insns;
2025 tree fndecl = get_callee_fndecl (exp);
2026 enum machine_mode mode;
2027 bool errno_set = false;
2028 bool try_widening = false;
2029 tree arg;
2030
2031 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2032 return NULL_RTX;
2033
2034 arg = CALL_EXPR_ARG (exp, 0);
2035
2036 switch (DECL_FUNCTION_CODE (fndecl))
2037 {
2038 CASE_FLT_FN (BUILT_IN_SQRT):
2039 errno_set = ! tree_expr_nonnegative_p (arg);
2040 try_widening = true;
2041 builtin_optab = sqrt_optab;
2042 break;
2043 CASE_FLT_FN (BUILT_IN_EXP):
2044 errno_set = true; builtin_optab = exp_optab; break;
2045 CASE_FLT_FN (BUILT_IN_EXP10):
2046 CASE_FLT_FN (BUILT_IN_POW10):
2047 errno_set = true; builtin_optab = exp10_optab; break;
2048 CASE_FLT_FN (BUILT_IN_EXP2):
2049 errno_set = true; builtin_optab = exp2_optab; break;
2050 CASE_FLT_FN (BUILT_IN_EXPM1):
2051 errno_set = true; builtin_optab = expm1_optab; break;
2052 CASE_FLT_FN (BUILT_IN_LOGB):
2053 errno_set = true; builtin_optab = logb_optab; break;
2054 CASE_FLT_FN (BUILT_IN_LOG):
2055 errno_set = true; builtin_optab = log_optab; break;
2056 CASE_FLT_FN (BUILT_IN_LOG10):
2057 errno_set = true; builtin_optab = log10_optab; break;
2058 CASE_FLT_FN (BUILT_IN_LOG2):
2059 errno_set = true; builtin_optab = log2_optab; break;
2060 CASE_FLT_FN (BUILT_IN_LOG1P):
2061 errno_set = true; builtin_optab = log1p_optab; break;
2062 CASE_FLT_FN (BUILT_IN_ASIN):
2063 builtin_optab = asin_optab; break;
2064 CASE_FLT_FN (BUILT_IN_ACOS):
2065 builtin_optab = acos_optab; break;
2066 CASE_FLT_FN (BUILT_IN_TAN):
2067 builtin_optab = tan_optab; break;
2068 CASE_FLT_FN (BUILT_IN_ATAN):
2069 builtin_optab = atan_optab; break;
2070 CASE_FLT_FN (BUILT_IN_FLOOR):
2071 builtin_optab = floor_optab; break;
2072 CASE_FLT_FN (BUILT_IN_CEIL):
2073 builtin_optab = ceil_optab; break;
2074 CASE_FLT_FN (BUILT_IN_TRUNC):
2075 builtin_optab = btrunc_optab; break;
2076 CASE_FLT_FN (BUILT_IN_ROUND):
2077 builtin_optab = round_optab; break;
2078 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2079 builtin_optab = nearbyint_optab;
2080 if (flag_trapping_math)
2081 break;
2082 /* Else fallthrough and expand as rint. */
2083 CASE_FLT_FN (BUILT_IN_RINT):
2084 builtin_optab = rint_optab; break;
2085 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2086 builtin_optab = significand_optab; break;
2087 default:
2088 gcc_unreachable ();
2089 }
2090
2091 /* Make a suitable register to place result in. */
2092 mode = TYPE_MODE (TREE_TYPE (exp));
2093
2094 if (! flag_errno_math || ! HONOR_NANS (mode))
2095 errno_set = false;
2096
2097 /* Before working hard, check whether the instruction is available, but try
2098 to widen the mode for specific operations. */
2099 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2100 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2101 && (!errno_set || !optimize_insn_for_size_p ()))
2102 {
2103 rtx result = gen_reg_rtx (mode);
2104
2105 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2106 need to expand the argument again. This way, we will not perform
2107 side-effects more the once. */
2108 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2109
2110 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2111
2112 start_sequence ();
2113
2114 /* Compute into RESULT.
2115 Set RESULT to wherever the result comes back. */
2116 result = expand_unop (mode, builtin_optab, op0, result, 0);
2117
2118 if (result != 0)
2119 {
2120 if (errno_set)
2121 expand_errno_check (exp, result);
2122
2123 /* Output the entire sequence. */
2124 insns = get_insns ();
2125 end_sequence ();
2126 emit_insn (insns);
2127 return result;
2128 }
2129
2130 /* If we were unable to expand via the builtin, stop the sequence
2131 (without outputting the insns) and call to the library function
2132 with the stabilized argument list. */
2133 end_sequence ();
2134 }
2135
2136 return expand_call (exp, target, target == const0_rtx);
2137 }
2138
2139 /* Expand a call to the builtin binary math functions (pow and atan2).
2140 Return NULL_RTX if a normal call should be emitted rather than expanding the
2141 function in-line. EXP is the expression that is a call to the builtin
2142 function; if convenient, the result should be placed in TARGET.
2143 SUBTARGET may be used as the target for computing one of EXP's
2144 operands. */
2145
2146 static rtx
2147 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2148 {
2149 optab builtin_optab;
2150 rtx op0, op1, result;
2151 rtx_insn *insns;
2152 int op1_type = REAL_TYPE;
2153 tree fndecl = get_callee_fndecl (exp);
2154 tree arg0, arg1;
2155 enum machine_mode mode;
2156 bool errno_set = true;
2157
2158 switch (DECL_FUNCTION_CODE (fndecl))
2159 {
2160 CASE_FLT_FN (BUILT_IN_SCALBN):
2161 CASE_FLT_FN (BUILT_IN_SCALBLN):
2162 CASE_FLT_FN (BUILT_IN_LDEXP):
2163 op1_type = INTEGER_TYPE;
2164 default:
2165 break;
2166 }
2167
2168 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2169 return NULL_RTX;
2170
2171 arg0 = CALL_EXPR_ARG (exp, 0);
2172 arg1 = CALL_EXPR_ARG (exp, 1);
2173
2174 switch (DECL_FUNCTION_CODE (fndecl))
2175 {
2176 CASE_FLT_FN (BUILT_IN_POW):
2177 builtin_optab = pow_optab; break;
2178 CASE_FLT_FN (BUILT_IN_ATAN2):
2179 builtin_optab = atan2_optab; break;
2180 CASE_FLT_FN (BUILT_IN_SCALB):
2181 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2182 return 0;
2183 builtin_optab = scalb_optab; break;
2184 CASE_FLT_FN (BUILT_IN_SCALBN):
2185 CASE_FLT_FN (BUILT_IN_SCALBLN):
2186 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2187 return 0;
2188 /* Fall through... */
2189 CASE_FLT_FN (BUILT_IN_LDEXP):
2190 builtin_optab = ldexp_optab; break;
2191 CASE_FLT_FN (BUILT_IN_FMOD):
2192 builtin_optab = fmod_optab; break;
2193 CASE_FLT_FN (BUILT_IN_REMAINDER):
2194 CASE_FLT_FN (BUILT_IN_DREM):
2195 builtin_optab = remainder_optab; break;
2196 default:
2197 gcc_unreachable ();
2198 }
2199
2200 /* Make a suitable register to place result in. */
2201 mode = TYPE_MODE (TREE_TYPE (exp));
2202
2203 /* Before working hard, check whether the instruction is available. */
2204 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2205 return NULL_RTX;
2206
2207 result = gen_reg_rtx (mode);
2208
2209 if (! flag_errno_math || ! HONOR_NANS (mode))
2210 errno_set = false;
2211
2212 if (errno_set && optimize_insn_for_size_p ())
2213 return 0;
2214
2215 /* Always stabilize the argument list. */
2216 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2217 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2218
2219 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2220 op1 = expand_normal (arg1);
2221
2222 start_sequence ();
2223
2224 /* Compute into RESULT.
2225 Set RESULT to wherever the result comes back. */
2226 result = expand_binop (mode, builtin_optab, op0, op1,
2227 result, 0, OPTAB_DIRECT);
2228
2229 /* If we were unable to expand via the builtin, stop the sequence
2230 (without outputting the insns) and call to the library function
2231 with the stabilized argument list. */
2232 if (result == 0)
2233 {
2234 end_sequence ();
2235 return expand_call (exp, target, target == const0_rtx);
2236 }
2237
2238 if (errno_set)
2239 expand_errno_check (exp, result);
2240
2241 /* Output the entire sequence. */
2242 insns = get_insns ();
2243 end_sequence ();
2244 emit_insn (insns);
2245
2246 return result;
2247 }
2248
2249 /* Expand a call to the builtin trinary math functions (fma).
2250 Return NULL_RTX if a normal call should be emitted rather than expanding the
2251 function in-line. EXP is the expression that is a call to the builtin
2252 function; if convenient, the result should be placed in TARGET.
2253 SUBTARGET may be used as the target for computing one of EXP's
2254 operands. */
2255
2256 static rtx
2257 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2258 {
2259 optab builtin_optab;
2260 rtx op0, op1, op2, result;
2261 rtx_insn *insns;
2262 tree fndecl = get_callee_fndecl (exp);
2263 tree arg0, arg1, arg2;
2264 enum machine_mode mode;
2265
2266 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2267 return NULL_RTX;
2268
2269 arg0 = CALL_EXPR_ARG (exp, 0);
2270 arg1 = CALL_EXPR_ARG (exp, 1);
2271 arg2 = CALL_EXPR_ARG (exp, 2);
2272
2273 switch (DECL_FUNCTION_CODE (fndecl))
2274 {
2275 CASE_FLT_FN (BUILT_IN_FMA):
2276 builtin_optab = fma_optab; break;
2277 default:
2278 gcc_unreachable ();
2279 }
2280
2281 /* Make a suitable register to place result in. */
2282 mode = TYPE_MODE (TREE_TYPE (exp));
2283
2284 /* Before working hard, check whether the instruction is available. */
2285 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2286 return NULL_RTX;
2287
2288 result = gen_reg_rtx (mode);
2289
2290 /* Always stabilize the argument list. */
2291 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2292 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2293 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2294
2295 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2296 op1 = expand_normal (arg1);
2297 op2 = expand_normal (arg2);
2298
2299 start_sequence ();
2300
2301 /* Compute into RESULT.
2302 Set RESULT to wherever the result comes back. */
2303 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2304 result, 0);
2305
2306 /* If we were unable to expand via the builtin, stop the sequence
2307 (without outputting the insns) and call to the library function
2308 with the stabilized argument list. */
2309 if (result == 0)
2310 {
2311 end_sequence ();
2312 return expand_call (exp, target, target == const0_rtx);
2313 }
2314
2315 /* Output the entire sequence. */
2316 insns = get_insns ();
2317 end_sequence ();
2318 emit_insn (insns);
2319
2320 return result;
2321 }
2322
2323 /* Expand a call to the builtin sin and cos math functions.
2324 Return NULL_RTX if a normal call should be emitted rather than expanding the
2325 function in-line. EXP is the expression that is a call to the builtin
2326 function; if convenient, the result should be placed in TARGET.
2327 SUBTARGET may be used as the target for computing one of EXP's
2328 operands. */
2329
2330 static rtx
2331 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2332 {
2333 optab builtin_optab;
2334 rtx op0;
2335 rtx_insn *insns;
2336 tree fndecl = get_callee_fndecl (exp);
2337 enum machine_mode mode;
2338 tree arg;
2339
2340 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2341 return NULL_RTX;
2342
2343 arg = CALL_EXPR_ARG (exp, 0);
2344
2345 switch (DECL_FUNCTION_CODE (fndecl))
2346 {
2347 CASE_FLT_FN (BUILT_IN_SIN):
2348 CASE_FLT_FN (BUILT_IN_COS):
2349 builtin_optab = sincos_optab; break;
2350 default:
2351 gcc_unreachable ();
2352 }
2353
2354 /* Make a suitable register to place result in. */
2355 mode = TYPE_MODE (TREE_TYPE (exp));
2356
2357 /* Check if sincos insn is available, otherwise fallback
2358 to sin or cos insn. */
2359 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2360 switch (DECL_FUNCTION_CODE (fndecl))
2361 {
2362 CASE_FLT_FN (BUILT_IN_SIN):
2363 builtin_optab = sin_optab; break;
2364 CASE_FLT_FN (BUILT_IN_COS):
2365 builtin_optab = cos_optab; break;
2366 default:
2367 gcc_unreachable ();
2368 }
2369
2370 /* Before working hard, check whether the instruction is available. */
2371 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2372 {
2373 rtx result = gen_reg_rtx (mode);
2374
2375 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2376 need to expand the argument again. This way, we will not perform
2377 side-effects more the once. */
2378 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2379
2380 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2381
2382 start_sequence ();
2383
2384 /* Compute into RESULT.
2385 Set RESULT to wherever the result comes back. */
2386 if (builtin_optab == sincos_optab)
2387 {
2388 int ok;
2389
2390 switch (DECL_FUNCTION_CODE (fndecl))
2391 {
2392 CASE_FLT_FN (BUILT_IN_SIN):
2393 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2394 break;
2395 CASE_FLT_FN (BUILT_IN_COS):
2396 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2397 break;
2398 default:
2399 gcc_unreachable ();
2400 }
2401 gcc_assert (ok);
2402 }
2403 else
2404 result = expand_unop (mode, builtin_optab, op0, result, 0);
2405
2406 if (result != 0)
2407 {
2408 /* Output the entire sequence. */
2409 insns = get_insns ();
2410 end_sequence ();
2411 emit_insn (insns);
2412 return result;
2413 }
2414
2415 /* If we were unable to expand via the builtin, stop the sequence
2416 (without outputting the insns) and call to the library function
2417 with the stabilized argument list. */
2418 end_sequence ();
2419 }
2420
2421 return expand_call (exp, target, target == const0_rtx);
2422 }
2423
2424 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2425 return an RTL instruction code that implements the functionality.
2426 If that isn't possible or available return CODE_FOR_nothing. */
2427
2428 static enum insn_code
2429 interclass_mathfn_icode (tree arg, tree fndecl)
2430 {
2431 bool errno_set = false;
2432 optab builtin_optab = unknown_optab;
2433 enum machine_mode mode;
2434
2435 switch (DECL_FUNCTION_CODE (fndecl))
2436 {
2437 CASE_FLT_FN (BUILT_IN_ILOGB):
2438 errno_set = true; builtin_optab = ilogb_optab; break;
2439 CASE_FLT_FN (BUILT_IN_ISINF):
2440 builtin_optab = isinf_optab; break;
2441 case BUILT_IN_ISNORMAL:
2442 case BUILT_IN_ISFINITE:
2443 CASE_FLT_FN (BUILT_IN_FINITE):
2444 case BUILT_IN_FINITED32:
2445 case BUILT_IN_FINITED64:
2446 case BUILT_IN_FINITED128:
2447 case BUILT_IN_ISINFD32:
2448 case BUILT_IN_ISINFD64:
2449 case BUILT_IN_ISINFD128:
2450 /* These builtins have no optabs (yet). */
2451 break;
2452 default:
2453 gcc_unreachable ();
2454 }
2455
2456 /* There's no easy way to detect the case we need to set EDOM. */
2457 if (flag_errno_math && errno_set)
2458 return CODE_FOR_nothing;
2459
2460 /* Optab mode depends on the mode of the input argument. */
2461 mode = TYPE_MODE (TREE_TYPE (arg));
2462
2463 if (builtin_optab)
2464 return optab_handler (builtin_optab, mode);
2465 return CODE_FOR_nothing;
2466 }
2467
2468 /* Expand a call to one of the builtin math functions that operate on
2469 floating point argument and output an integer result (ilogb, isinf,
2470 isnan, etc).
2471 Return 0 if a normal call should be emitted rather than expanding the
2472 function in-line. EXP is the expression that is a call to the builtin
2473 function; if convenient, the result should be placed in TARGET. */
2474
2475 static rtx
2476 expand_builtin_interclass_mathfn (tree exp, rtx target)
2477 {
2478 enum insn_code icode = CODE_FOR_nothing;
2479 rtx op0;
2480 tree fndecl = get_callee_fndecl (exp);
2481 enum machine_mode mode;
2482 tree arg;
2483
2484 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2485 return NULL_RTX;
2486
2487 arg = CALL_EXPR_ARG (exp, 0);
2488 icode = interclass_mathfn_icode (arg, fndecl);
2489 mode = TYPE_MODE (TREE_TYPE (arg));
2490
2491 if (icode != CODE_FOR_nothing)
2492 {
2493 struct expand_operand ops[1];
2494 rtx_insn *last = get_last_insn ();
2495 tree orig_arg = arg;
2496
2497 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2498 need to expand the argument again. This way, we will not perform
2499 side-effects more the once. */
2500 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2501
2502 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2503
2504 if (mode != GET_MODE (op0))
2505 op0 = convert_to_mode (mode, op0, 0);
2506
2507 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2508 if (maybe_legitimize_operands (icode, 0, 1, ops)
2509 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2510 return ops[0].value;
2511
2512 delete_insns_since (last);
2513 CALL_EXPR_ARG (exp, 0) = orig_arg;
2514 }
2515
2516 return NULL_RTX;
2517 }
2518
2519 /* Expand a call to the builtin sincos math function.
2520 Return NULL_RTX if a normal call should be emitted rather than expanding the
2521 function in-line. EXP is the expression that is a call to the builtin
2522 function. */
2523
2524 static rtx
2525 expand_builtin_sincos (tree exp)
2526 {
2527 rtx op0, op1, op2, target1, target2;
2528 enum machine_mode mode;
2529 tree arg, sinp, cosp;
2530 int result;
2531 location_t loc = EXPR_LOCATION (exp);
2532 tree alias_type, alias_off;
2533
2534 if (!validate_arglist (exp, REAL_TYPE,
2535 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2536 return NULL_RTX;
2537
2538 arg = CALL_EXPR_ARG (exp, 0);
2539 sinp = CALL_EXPR_ARG (exp, 1);
2540 cosp = CALL_EXPR_ARG (exp, 2);
2541
2542 /* Make a suitable register to place result in. */
2543 mode = TYPE_MODE (TREE_TYPE (arg));
2544
2545 /* Check if sincos insn is available, otherwise emit the call. */
2546 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2547 return NULL_RTX;
2548
2549 target1 = gen_reg_rtx (mode);
2550 target2 = gen_reg_rtx (mode);
2551
2552 op0 = expand_normal (arg);
2553 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2554 alias_off = build_int_cst (alias_type, 0);
2555 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2556 sinp, alias_off));
2557 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2558 cosp, alias_off));
2559
2560 /* Compute into target1 and target2.
2561 Set TARGET to wherever the result comes back. */
2562 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2563 gcc_assert (result);
2564
2565 /* Move target1 and target2 to the memory locations indicated
2566 by op1 and op2. */
2567 emit_move_insn (op1, target1);
2568 emit_move_insn (op2, target2);
2569
2570 return const0_rtx;
2571 }
2572
2573 /* Expand a call to the internal cexpi builtin to the sincos math function.
2574 EXP is the expression that is a call to the builtin function; if convenient,
2575 the result should be placed in TARGET. */
2576
2577 static rtx
2578 expand_builtin_cexpi (tree exp, rtx target)
2579 {
2580 tree fndecl = get_callee_fndecl (exp);
2581 tree arg, type;
2582 enum machine_mode mode;
2583 rtx op0, op1, op2;
2584 location_t loc = EXPR_LOCATION (exp);
2585
2586 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2587 return NULL_RTX;
2588
2589 arg = CALL_EXPR_ARG (exp, 0);
2590 type = TREE_TYPE (arg);
2591 mode = TYPE_MODE (TREE_TYPE (arg));
2592
2593 /* Try expanding via a sincos optab, fall back to emitting a libcall
2594 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2595 is only generated from sincos, cexp or if we have either of them. */
2596 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2597 {
2598 op1 = gen_reg_rtx (mode);
2599 op2 = gen_reg_rtx (mode);
2600
2601 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2602
2603 /* Compute into op1 and op2. */
2604 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2605 }
2606 else if (targetm.libc_has_function (function_sincos))
2607 {
2608 tree call, fn = NULL_TREE;
2609 tree top1, top2;
2610 rtx op1a, op2a;
2611
2612 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2613 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2614 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2615 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2616 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2617 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2618 else
2619 gcc_unreachable ();
2620
2621 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2622 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2623 op1a = copy_addr_to_reg (XEXP (op1, 0));
2624 op2a = copy_addr_to_reg (XEXP (op2, 0));
2625 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2626 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2627
2628 /* Make sure not to fold the sincos call again. */
2629 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2630 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2631 call, 3, arg, top1, top2));
2632 }
2633 else
2634 {
2635 tree call, fn = NULL_TREE, narg;
2636 tree ctype = build_complex_type (type);
2637
2638 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2639 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2640 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2641 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2642 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2643 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2644 else
2645 gcc_unreachable ();
2646
2647 /* If we don't have a decl for cexp create one. This is the
2648 friendliest fallback if the user calls __builtin_cexpi
2649 without full target C99 function support. */
2650 if (fn == NULL_TREE)
2651 {
2652 tree fntype;
2653 const char *name = NULL;
2654
2655 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2656 name = "cexpf";
2657 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2658 name = "cexp";
2659 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2660 name = "cexpl";
2661
2662 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2663 fn = build_fn_decl (name, fntype);
2664 }
2665
2666 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2667 build_real (type, dconst0), arg);
2668
2669 /* Make sure not to fold the cexp call again. */
2670 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2671 return expand_expr (build_call_nary (ctype, call, 1, narg),
2672 target, VOIDmode, EXPAND_NORMAL);
2673 }
2674
2675 /* Now build the proper return type. */
2676 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2677 make_tree (TREE_TYPE (arg), op2),
2678 make_tree (TREE_TYPE (arg), op1)),
2679 target, VOIDmode, EXPAND_NORMAL);
2680 }
2681
2682 /* Conveniently construct a function call expression. FNDECL names the
2683 function to be called, N is the number of arguments, and the "..."
2684 parameters are the argument expressions. Unlike build_call_exr
2685 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2686
2687 static tree
2688 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2689 {
2690 va_list ap;
2691 tree fntype = TREE_TYPE (fndecl);
2692 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2693
2694 va_start (ap, n);
2695 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2696 va_end (ap);
2697 SET_EXPR_LOCATION (fn, loc);
2698 return fn;
2699 }
2700
2701 /* Expand a call to one of the builtin rounding functions gcc defines
2702 as an extension (lfloor and lceil). As these are gcc extensions we
2703 do not need to worry about setting errno to EDOM.
2704 If expanding via optab fails, lower expression to (int)(floor(x)).
2705 EXP is the expression that is a call to the builtin function;
2706 if convenient, the result should be placed in TARGET. */
2707
2708 static rtx
2709 expand_builtin_int_roundingfn (tree exp, rtx target)
2710 {
2711 convert_optab builtin_optab;
2712 rtx op0, tmp;
2713 rtx_insn *insns;
2714 tree fndecl = get_callee_fndecl (exp);
2715 enum built_in_function fallback_fn;
2716 tree fallback_fndecl;
2717 enum machine_mode mode;
2718 tree arg;
2719
2720 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2721 gcc_unreachable ();
2722
2723 arg = CALL_EXPR_ARG (exp, 0);
2724
2725 switch (DECL_FUNCTION_CODE (fndecl))
2726 {
2727 CASE_FLT_FN (BUILT_IN_ICEIL):
2728 CASE_FLT_FN (BUILT_IN_LCEIL):
2729 CASE_FLT_FN (BUILT_IN_LLCEIL):
2730 builtin_optab = lceil_optab;
2731 fallback_fn = BUILT_IN_CEIL;
2732 break;
2733
2734 CASE_FLT_FN (BUILT_IN_IFLOOR):
2735 CASE_FLT_FN (BUILT_IN_LFLOOR):
2736 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2737 builtin_optab = lfloor_optab;
2738 fallback_fn = BUILT_IN_FLOOR;
2739 break;
2740
2741 default:
2742 gcc_unreachable ();
2743 }
2744
2745 /* Make a suitable register to place result in. */
2746 mode = TYPE_MODE (TREE_TYPE (exp));
2747
2748 target = gen_reg_rtx (mode);
2749
2750 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2751 need to expand the argument again. This way, we will not perform
2752 side-effects more the once. */
2753 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2754
2755 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2756
2757 start_sequence ();
2758
2759 /* Compute into TARGET. */
2760 if (expand_sfix_optab (target, op0, builtin_optab))
2761 {
2762 /* Output the entire sequence. */
2763 insns = get_insns ();
2764 end_sequence ();
2765 emit_insn (insns);
2766 return target;
2767 }
2768
2769 /* If we were unable to expand via the builtin, stop the sequence
2770 (without outputting the insns). */
2771 end_sequence ();
2772
2773 /* Fall back to floating point rounding optab. */
2774 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2775
2776 /* For non-C99 targets we may end up without a fallback fndecl here
2777 if the user called __builtin_lfloor directly. In this case emit
2778 a call to the floor/ceil variants nevertheless. This should result
2779 in the best user experience for not full C99 targets. */
2780 if (fallback_fndecl == NULL_TREE)
2781 {
2782 tree fntype;
2783 const char *name = NULL;
2784
2785 switch (DECL_FUNCTION_CODE (fndecl))
2786 {
2787 case BUILT_IN_ICEIL:
2788 case BUILT_IN_LCEIL:
2789 case BUILT_IN_LLCEIL:
2790 name = "ceil";
2791 break;
2792 case BUILT_IN_ICEILF:
2793 case BUILT_IN_LCEILF:
2794 case BUILT_IN_LLCEILF:
2795 name = "ceilf";
2796 break;
2797 case BUILT_IN_ICEILL:
2798 case BUILT_IN_LCEILL:
2799 case BUILT_IN_LLCEILL:
2800 name = "ceill";
2801 break;
2802 case BUILT_IN_IFLOOR:
2803 case BUILT_IN_LFLOOR:
2804 case BUILT_IN_LLFLOOR:
2805 name = "floor";
2806 break;
2807 case BUILT_IN_IFLOORF:
2808 case BUILT_IN_LFLOORF:
2809 case BUILT_IN_LLFLOORF:
2810 name = "floorf";
2811 break;
2812 case BUILT_IN_IFLOORL:
2813 case BUILT_IN_LFLOORL:
2814 case BUILT_IN_LLFLOORL:
2815 name = "floorl";
2816 break;
2817 default:
2818 gcc_unreachable ();
2819 }
2820
2821 fntype = build_function_type_list (TREE_TYPE (arg),
2822 TREE_TYPE (arg), NULL_TREE);
2823 fallback_fndecl = build_fn_decl (name, fntype);
2824 }
2825
2826 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2827
2828 tmp = expand_normal (exp);
2829 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2830
2831 /* Truncate the result of floating point optab to integer
2832 via expand_fix (). */
2833 target = gen_reg_rtx (mode);
2834 expand_fix (target, tmp, 0);
2835
2836 return target;
2837 }
2838
2839 /* Expand a call to one of the builtin math functions doing integer
2840 conversion (lrint).
2841 Return 0 if a normal call should be emitted rather than expanding the
2842 function in-line. EXP is the expression that is a call to the builtin
2843 function; if convenient, the result should be placed in TARGET. */
2844
2845 static rtx
2846 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2847 {
2848 convert_optab builtin_optab;
2849 rtx op0;
2850 rtx_insn *insns;
2851 tree fndecl = get_callee_fndecl (exp);
2852 tree arg;
2853 enum machine_mode mode;
2854 enum built_in_function fallback_fn = BUILT_IN_NONE;
2855
2856 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2857 gcc_unreachable ();
2858
2859 arg = CALL_EXPR_ARG (exp, 0);
2860
2861 switch (DECL_FUNCTION_CODE (fndecl))
2862 {
2863 CASE_FLT_FN (BUILT_IN_IRINT):
2864 fallback_fn = BUILT_IN_LRINT;
2865 /* FALLTHRU */
2866 CASE_FLT_FN (BUILT_IN_LRINT):
2867 CASE_FLT_FN (BUILT_IN_LLRINT):
2868 builtin_optab = lrint_optab;
2869 break;
2870
2871 CASE_FLT_FN (BUILT_IN_IROUND):
2872 fallback_fn = BUILT_IN_LROUND;
2873 /* FALLTHRU */
2874 CASE_FLT_FN (BUILT_IN_LROUND):
2875 CASE_FLT_FN (BUILT_IN_LLROUND):
2876 builtin_optab = lround_optab;
2877 break;
2878
2879 default:
2880 gcc_unreachable ();
2881 }
2882
2883 /* There's no easy way to detect the case we need to set EDOM. */
2884 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2885 return NULL_RTX;
2886
2887 /* Make a suitable register to place result in. */
2888 mode = TYPE_MODE (TREE_TYPE (exp));
2889
2890 /* There's no easy way to detect the case we need to set EDOM. */
2891 if (!flag_errno_math)
2892 {
2893 rtx result = gen_reg_rtx (mode);
2894
2895 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2896 need to expand the argument again. This way, we will not perform
2897 side-effects more the once. */
2898 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2899
2900 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2901
2902 start_sequence ();
2903
2904 if (expand_sfix_optab (result, op0, builtin_optab))
2905 {
2906 /* Output the entire sequence. */
2907 insns = get_insns ();
2908 end_sequence ();
2909 emit_insn (insns);
2910 return result;
2911 }
2912
2913 /* If we were unable to expand via the builtin, stop the sequence
2914 (without outputting the insns) and call to the library function
2915 with the stabilized argument list. */
2916 end_sequence ();
2917 }
2918
2919 if (fallback_fn != BUILT_IN_NONE)
2920 {
2921 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2922 targets, (int) round (x) should never be transformed into
2923 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2924 a call to lround in the hope that the target provides at least some
2925 C99 functions. This should result in the best user experience for
2926 not full C99 targets. */
2927 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2928 fallback_fn, 0);
2929
2930 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2931 fallback_fndecl, 1, arg);
2932
2933 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2934 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2935 return convert_to_mode (mode, target, 0);
2936 }
2937
2938 return expand_call (exp, target, target == const0_rtx);
2939 }
2940
2941 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2942 a normal call should be emitted rather than expanding the function
2943 in-line. EXP is the expression that is a call to the builtin
2944 function; if convenient, the result should be placed in TARGET. */
2945
2946 static rtx
2947 expand_builtin_powi (tree exp, rtx target)
2948 {
2949 tree arg0, arg1;
2950 rtx op0, op1;
2951 enum machine_mode mode;
2952 enum machine_mode mode2;
2953
2954 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2955 return NULL_RTX;
2956
2957 arg0 = CALL_EXPR_ARG (exp, 0);
2958 arg1 = CALL_EXPR_ARG (exp, 1);
2959 mode = TYPE_MODE (TREE_TYPE (exp));
2960
2961 /* Emit a libcall to libgcc. */
2962
2963 /* Mode of the 2nd argument must match that of an int. */
2964 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2965
2966 if (target == NULL_RTX)
2967 target = gen_reg_rtx (mode);
2968
2969 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2970 if (GET_MODE (op0) != mode)
2971 op0 = convert_to_mode (mode, op0, 0);
2972 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2973 if (GET_MODE (op1) != mode2)
2974 op1 = convert_to_mode (mode2, op1, 0);
2975
2976 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2977 target, LCT_CONST, mode, 2,
2978 op0, mode, op1, mode2);
2979
2980 return target;
2981 }
2982
2983 /* Expand expression EXP which is a call to the strlen builtin. Return
2984 NULL_RTX if we failed the caller should emit a normal call, otherwise
2985 try to get the result in TARGET, if convenient. */
2986
2987 static rtx
2988 expand_builtin_strlen (tree exp, rtx target,
2989 enum machine_mode target_mode)
2990 {
2991 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2992 return NULL_RTX;
2993 else
2994 {
2995 struct expand_operand ops[4];
2996 rtx pat;
2997 tree len;
2998 tree src = CALL_EXPR_ARG (exp, 0);
2999 rtx src_reg;
3000 rtx_insn *before_strlen;
3001 enum machine_mode insn_mode = target_mode;
3002 enum insn_code icode = CODE_FOR_nothing;
3003 unsigned int align;
3004
3005 /* If the length can be computed at compile-time, return it. */
3006 len = c_strlen (src, 0);
3007 if (len)
3008 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3009
3010 /* If the length can be computed at compile-time and is constant
3011 integer, but there are side-effects in src, evaluate
3012 src for side-effects, then return len.
3013 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3014 can be optimized into: i++; x = 3; */
3015 len = c_strlen (src, 1);
3016 if (len && TREE_CODE (len) == INTEGER_CST)
3017 {
3018 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3019 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3020 }
3021
3022 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3023
3024 /* If SRC is not a pointer type, don't do this operation inline. */
3025 if (align == 0)
3026 return NULL_RTX;
3027
3028 /* Bail out if we can't compute strlen in the right mode. */
3029 while (insn_mode != VOIDmode)
3030 {
3031 icode = optab_handler (strlen_optab, insn_mode);
3032 if (icode != CODE_FOR_nothing)
3033 break;
3034
3035 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3036 }
3037 if (insn_mode == VOIDmode)
3038 return NULL_RTX;
3039
3040 /* Make a place to hold the source address. We will not expand
3041 the actual source until we are sure that the expansion will
3042 not fail -- there are trees that cannot be expanded twice. */
3043 src_reg = gen_reg_rtx (Pmode);
3044
3045 /* Mark the beginning of the strlen sequence so we can emit the
3046 source operand later. */
3047 before_strlen = get_last_insn ();
3048
3049 create_output_operand (&ops[0], target, insn_mode);
3050 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3051 create_integer_operand (&ops[2], 0);
3052 create_integer_operand (&ops[3], align);
3053 if (!maybe_expand_insn (icode, 4, ops))
3054 return NULL_RTX;
3055
3056 /* Now that we are assured of success, expand the source. */
3057 start_sequence ();
3058 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3059 if (pat != src_reg)
3060 {
3061 #ifdef POINTERS_EXTEND_UNSIGNED
3062 if (GET_MODE (pat) != Pmode)
3063 pat = convert_to_mode (Pmode, pat,
3064 POINTERS_EXTEND_UNSIGNED);
3065 #endif
3066 emit_move_insn (src_reg, pat);
3067 }
3068 pat = get_insns ();
3069 end_sequence ();
3070
3071 if (before_strlen)
3072 emit_insn_after (pat, before_strlen);
3073 else
3074 emit_insn_before (pat, get_insns ());
3075
3076 /* Return the value in the proper mode for this function. */
3077 if (GET_MODE (ops[0].value) == target_mode)
3078 target = ops[0].value;
3079 else if (target != 0)
3080 convert_move (target, ops[0].value, 0);
3081 else
3082 target = convert_to_mode (target_mode, ops[0].value, 0);
3083
3084 return target;
3085 }
3086 }
3087
3088 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3089 bytes from constant string DATA + OFFSET and return it as target
3090 constant. */
3091
3092 static rtx
3093 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3094 enum machine_mode mode)
3095 {
3096 const char *str = (const char *) data;
3097
3098 gcc_assert (offset >= 0
3099 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3100 <= strlen (str) + 1));
3101
3102 return c_readstr (str + offset, mode);
3103 }
3104
3105 /* LEN specify length of the block of memcpy/memset operation.
3106 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3107 In some cases we can make very likely guess on max size, then we
3108 set it into PROBABLE_MAX_SIZE. */
3109
3110 static void
3111 determine_block_size (tree len, rtx len_rtx,
3112 unsigned HOST_WIDE_INT *min_size,
3113 unsigned HOST_WIDE_INT *max_size,
3114 unsigned HOST_WIDE_INT *probable_max_size)
3115 {
3116 if (CONST_INT_P (len_rtx))
3117 {
3118 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3119 return;
3120 }
3121 else
3122 {
3123 wide_int min, max;
3124 enum value_range_type range_type = VR_UNDEFINED;
3125
3126 /* Determine bounds from the type. */
3127 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3128 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3129 else
3130 *min_size = 0;
3131 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3132 *probable_max_size = *max_size
3133 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3134 else
3135 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3136
3137 if (TREE_CODE (len) == SSA_NAME)
3138 range_type = get_range_info (len, &min, &max);
3139 if (range_type == VR_RANGE)
3140 {
3141 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3142 *min_size = min.to_uhwi ();
3143 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3144 *probable_max_size = *max_size = max.to_uhwi ();
3145 }
3146 else if (range_type == VR_ANTI_RANGE)
3147 {
3148 /* Anti range 0...N lets us to determine minimal size to N+1. */
3149 if (min == 0)
3150 {
3151 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3152 *min_size = max.to_uhwi () + 1;
3153 }
3154 /* Code like
3155
3156 int n;
3157 if (n < 100)
3158 memcpy (a, b, n)
3159
3160 Produce anti range allowing negative values of N. We still
3161 can use the information and make a guess that N is not negative.
3162 */
3163 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3164 *probable_max_size = min.to_uhwi () - 1;
3165 }
3166 }
3167 gcc_checking_assert (*max_size <=
3168 (unsigned HOST_WIDE_INT)
3169 GET_MODE_MASK (GET_MODE (len_rtx)));
3170 }
3171
3172 /* Expand a call EXP to the memcpy builtin.
3173 Return NULL_RTX if we failed, the caller should emit a normal call,
3174 otherwise try to get the result in TARGET, if convenient (and in
3175 mode MODE if that's convenient). */
3176
3177 static rtx
3178 expand_builtin_memcpy (tree exp, rtx target)
3179 {
3180 if (!validate_arglist (exp,
3181 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3182 return NULL_RTX;
3183 else
3184 {
3185 tree dest = CALL_EXPR_ARG (exp, 0);
3186 tree src = CALL_EXPR_ARG (exp, 1);
3187 tree len = CALL_EXPR_ARG (exp, 2);
3188 const char *src_str;
3189 unsigned int src_align = get_pointer_alignment (src);
3190 unsigned int dest_align = get_pointer_alignment (dest);
3191 rtx dest_mem, src_mem, dest_addr, len_rtx;
3192 HOST_WIDE_INT expected_size = -1;
3193 unsigned int expected_align = 0;
3194 unsigned HOST_WIDE_INT min_size;
3195 unsigned HOST_WIDE_INT max_size;
3196 unsigned HOST_WIDE_INT probable_max_size;
3197
3198 /* If DEST is not a pointer type, call the normal function. */
3199 if (dest_align == 0)
3200 return NULL_RTX;
3201
3202 /* If either SRC is not a pointer type, don't do this
3203 operation in-line. */
3204 if (src_align == 0)
3205 return NULL_RTX;
3206
3207 if (currently_expanding_gimple_stmt)
3208 stringop_block_profile (currently_expanding_gimple_stmt,
3209 &expected_align, &expected_size);
3210
3211 if (expected_align < dest_align)
3212 expected_align = dest_align;
3213 dest_mem = get_memory_rtx (dest, len);
3214 set_mem_align (dest_mem, dest_align);
3215 len_rtx = expand_normal (len);
3216 determine_block_size (len, len_rtx, &min_size, &max_size,
3217 &probable_max_size);
3218 src_str = c_getstr (src);
3219
3220 /* If SRC is a string constant and block move would be done
3221 by pieces, we can avoid loading the string from memory
3222 and only stored the computed constants. */
3223 if (src_str
3224 && CONST_INT_P (len_rtx)
3225 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3226 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3227 CONST_CAST (char *, src_str),
3228 dest_align, false))
3229 {
3230 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3231 builtin_memcpy_read_str,
3232 CONST_CAST (char *, src_str),
3233 dest_align, false, 0);
3234 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3235 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3236 return dest_mem;
3237 }
3238
3239 src_mem = get_memory_rtx (src, len);
3240 set_mem_align (src_mem, src_align);
3241
3242 /* Copy word part most expediently. */
3243 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3244 CALL_EXPR_TAILCALL (exp)
3245 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3246 expected_align, expected_size,
3247 min_size, max_size, probable_max_size);
3248
3249 if (dest_addr == 0)
3250 {
3251 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3252 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3253 }
3254 return dest_addr;
3255 }
3256 }
3257
3258 /* Expand a call EXP to the mempcpy builtin.
3259 Return NULL_RTX if we failed; the caller should emit a normal call,
3260 otherwise try to get the result in TARGET, if convenient (and in
3261 mode MODE if that's convenient). If ENDP is 0 return the
3262 destination pointer, if ENDP is 1 return the end pointer ala
3263 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3264 stpcpy. */
3265
3266 static rtx
3267 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3268 {
3269 if (!validate_arglist (exp,
3270 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3271 return NULL_RTX;
3272 else
3273 {
3274 tree dest = CALL_EXPR_ARG (exp, 0);
3275 tree src = CALL_EXPR_ARG (exp, 1);
3276 tree len = CALL_EXPR_ARG (exp, 2);
3277 return expand_builtin_mempcpy_args (dest, src, len,
3278 target, mode, /*endp=*/ 1);
3279 }
3280 }
3281
3282 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3283 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3284 so that this can also be called without constructing an actual CALL_EXPR.
3285 The other arguments and return value are the same as for
3286 expand_builtin_mempcpy. */
3287
3288 static rtx
3289 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3290 rtx target, enum machine_mode mode, int endp)
3291 {
3292 /* If return value is ignored, transform mempcpy into memcpy. */
3293 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3294 {
3295 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3296 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3297 dest, src, len);
3298 return expand_expr (result, target, mode, EXPAND_NORMAL);
3299 }
3300 else
3301 {
3302 const char *src_str;
3303 unsigned int src_align = get_pointer_alignment (src);
3304 unsigned int dest_align = get_pointer_alignment (dest);
3305 rtx dest_mem, src_mem, len_rtx;
3306
3307 /* If either SRC or DEST is not a pointer type, don't do this
3308 operation in-line. */
3309 if (dest_align == 0 || src_align == 0)
3310 return NULL_RTX;
3311
3312 /* If LEN is not constant, call the normal function. */
3313 if (! tree_fits_uhwi_p (len))
3314 return NULL_RTX;
3315
3316 len_rtx = expand_normal (len);
3317 src_str = c_getstr (src);
3318
3319 /* If SRC is a string constant and block move would be done
3320 by pieces, we can avoid loading the string from memory
3321 and only stored the computed constants. */
3322 if (src_str
3323 && CONST_INT_P (len_rtx)
3324 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3325 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3326 CONST_CAST (char *, src_str),
3327 dest_align, false))
3328 {
3329 dest_mem = get_memory_rtx (dest, len);
3330 set_mem_align (dest_mem, dest_align);
3331 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3332 builtin_memcpy_read_str,
3333 CONST_CAST (char *, src_str),
3334 dest_align, false, endp);
3335 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3336 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3337 return dest_mem;
3338 }
3339
3340 if (CONST_INT_P (len_rtx)
3341 && can_move_by_pieces (INTVAL (len_rtx),
3342 MIN (dest_align, src_align)))
3343 {
3344 dest_mem = get_memory_rtx (dest, len);
3345 set_mem_align (dest_mem, dest_align);
3346 src_mem = get_memory_rtx (src, len);
3347 set_mem_align (src_mem, src_align);
3348 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3349 MIN (dest_align, src_align), endp);
3350 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3351 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3352 return dest_mem;
3353 }
3354
3355 return NULL_RTX;
3356 }
3357 }
3358
3359 #ifndef HAVE_movstr
3360 # define HAVE_movstr 0
3361 # define CODE_FOR_movstr CODE_FOR_nothing
3362 #endif
3363
3364 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3365 we failed, the caller should emit a normal call, otherwise try to
3366 get the result in TARGET, if convenient. If ENDP is 0 return the
3367 destination pointer, if ENDP is 1 return the end pointer ala
3368 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3369 stpcpy. */
3370
3371 static rtx
3372 expand_movstr (tree dest, tree src, rtx target, int endp)
3373 {
3374 struct expand_operand ops[3];
3375 rtx dest_mem;
3376 rtx src_mem;
3377
3378 if (!HAVE_movstr)
3379 return NULL_RTX;
3380
3381 dest_mem = get_memory_rtx (dest, NULL);
3382 src_mem = get_memory_rtx (src, NULL);
3383 if (!endp)
3384 {
3385 target = force_reg (Pmode, XEXP (dest_mem, 0));
3386 dest_mem = replace_equiv_address (dest_mem, target);
3387 }
3388
3389 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3390 create_fixed_operand (&ops[1], dest_mem);
3391 create_fixed_operand (&ops[2], src_mem);
3392 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3393 return NULL_RTX;
3394
3395 if (endp && target != const0_rtx)
3396 {
3397 target = ops[0].value;
3398 /* movstr is supposed to set end to the address of the NUL
3399 terminator. If the caller requested a mempcpy-like return value,
3400 adjust it. */
3401 if (endp == 1)
3402 {
3403 rtx tem = plus_constant (GET_MODE (target),
3404 gen_lowpart (GET_MODE (target), target), 1);
3405 emit_move_insn (target, force_operand (tem, NULL_RTX));
3406 }
3407 }
3408 return target;
3409 }
3410
3411 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3412 NULL_RTX if we failed the caller should emit a normal call, otherwise
3413 try to get the result in TARGET, if convenient (and in mode MODE if that's
3414 convenient). */
3415
3416 static rtx
3417 expand_builtin_strcpy (tree exp, rtx target)
3418 {
3419 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3420 {
3421 tree dest = CALL_EXPR_ARG (exp, 0);
3422 tree src = CALL_EXPR_ARG (exp, 1);
3423 return expand_builtin_strcpy_args (dest, src, target);
3424 }
3425 return NULL_RTX;
3426 }
3427
3428 /* Helper function to do the actual work for expand_builtin_strcpy. The
3429 arguments to the builtin_strcpy call DEST and SRC are broken out
3430 so that this can also be called without constructing an actual CALL_EXPR.
3431 The other arguments and return value are the same as for
3432 expand_builtin_strcpy. */
3433
3434 static rtx
3435 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3436 {
3437 return expand_movstr (dest, src, target, /*endp=*/0);
3438 }
3439
3440 /* Expand a call EXP to the stpcpy builtin.
3441 Return NULL_RTX if we failed the caller should emit a normal call,
3442 otherwise try to get the result in TARGET, if convenient (and in
3443 mode MODE if that's convenient). */
3444
3445 static rtx
3446 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3447 {
3448 tree dst, src;
3449 location_t loc = EXPR_LOCATION (exp);
3450
3451 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3452 return NULL_RTX;
3453
3454 dst = CALL_EXPR_ARG (exp, 0);
3455 src = CALL_EXPR_ARG (exp, 1);
3456
3457 /* If return value is ignored, transform stpcpy into strcpy. */
3458 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3459 {
3460 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3461 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3462 return expand_expr (result, target, mode, EXPAND_NORMAL);
3463 }
3464 else
3465 {
3466 tree len, lenp1;
3467 rtx ret;
3468
3469 /* Ensure we get an actual string whose length can be evaluated at
3470 compile-time, not an expression containing a string. This is
3471 because the latter will potentially produce pessimized code
3472 when used to produce the return value. */
3473 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3474 return expand_movstr (dst, src, target, /*endp=*/2);
3475
3476 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3477 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3478 target, mode, /*endp=*/2);
3479
3480 if (ret)
3481 return ret;
3482
3483 if (TREE_CODE (len) == INTEGER_CST)
3484 {
3485 rtx len_rtx = expand_normal (len);
3486
3487 if (CONST_INT_P (len_rtx))
3488 {
3489 ret = expand_builtin_strcpy_args (dst, src, target);
3490
3491 if (ret)
3492 {
3493 if (! target)
3494 {
3495 if (mode != VOIDmode)
3496 target = gen_reg_rtx (mode);
3497 else
3498 target = gen_reg_rtx (GET_MODE (ret));
3499 }
3500 if (GET_MODE (target) != GET_MODE (ret))
3501 ret = gen_lowpart (GET_MODE (target), ret);
3502
3503 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3504 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3505 gcc_assert (ret);
3506
3507 return target;
3508 }
3509 }
3510 }
3511
3512 return expand_movstr (dst, src, target, /*endp=*/2);
3513 }
3514 }
3515
3516 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3517 bytes from constant string DATA + OFFSET and return it as target
3518 constant. */
3519
3520 rtx
3521 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3522 enum machine_mode mode)
3523 {
3524 const char *str = (const char *) data;
3525
3526 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3527 return const0_rtx;
3528
3529 return c_readstr (str + offset, mode);
3530 }
3531
3532 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3533 NULL_RTX if we failed the caller should emit a normal call. */
3534
3535 static rtx
3536 expand_builtin_strncpy (tree exp, rtx target)
3537 {
3538 location_t loc = EXPR_LOCATION (exp);
3539
3540 if (validate_arglist (exp,
3541 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3542 {
3543 tree dest = CALL_EXPR_ARG (exp, 0);
3544 tree src = CALL_EXPR_ARG (exp, 1);
3545 tree len = CALL_EXPR_ARG (exp, 2);
3546 tree slen = c_strlen (src, 1);
3547
3548 /* We must be passed a constant len and src parameter. */
3549 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3550 return NULL_RTX;
3551
3552 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3553
3554 /* We're required to pad with trailing zeros if the requested
3555 len is greater than strlen(s2)+1. In that case try to
3556 use store_by_pieces, if it fails, punt. */
3557 if (tree_int_cst_lt (slen, len))
3558 {
3559 unsigned int dest_align = get_pointer_alignment (dest);
3560 const char *p = c_getstr (src);
3561 rtx dest_mem;
3562
3563 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3564 || !can_store_by_pieces (tree_to_uhwi (len),
3565 builtin_strncpy_read_str,
3566 CONST_CAST (char *, p),
3567 dest_align, false))
3568 return NULL_RTX;
3569
3570 dest_mem = get_memory_rtx (dest, len);
3571 store_by_pieces (dest_mem, tree_to_uhwi (len),
3572 builtin_strncpy_read_str,
3573 CONST_CAST (char *, p), dest_align, false, 0);
3574 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3575 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3576 return dest_mem;
3577 }
3578 }
3579 return NULL_RTX;
3580 }
3581
3582 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3583 bytes from constant string DATA + OFFSET and return it as target
3584 constant. */
3585
3586 rtx
3587 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3588 enum machine_mode mode)
3589 {
3590 const char *c = (const char *) data;
3591 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3592
3593 memset (p, *c, GET_MODE_SIZE (mode));
3594
3595 return c_readstr (p, mode);
3596 }
3597
3598 /* Callback routine for store_by_pieces. Return the RTL of a register
3599 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3600 char value given in the RTL register data. For example, if mode is
3601 4 bytes wide, return the RTL for 0x01010101*data. */
3602
3603 static rtx
3604 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3605 enum machine_mode mode)
3606 {
3607 rtx target, coeff;
3608 size_t size;
3609 char *p;
3610
3611 size = GET_MODE_SIZE (mode);
3612 if (size == 1)
3613 return (rtx) data;
3614
3615 p = XALLOCAVEC (char, size);
3616 memset (p, 1, size);
3617 coeff = c_readstr (p, mode);
3618
3619 target = convert_to_mode (mode, (rtx) data, 1);
3620 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3621 return force_reg (mode, target);
3622 }
3623
3624 /* Expand expression EXP, which is a call to the memset builtin. Return
3625 NULL_RTX if we failed the caller should emit a normal call, otherwise
3626 try to get the result in TARGET, if convenient (and in mode MODE if that's
3627 convenient). */
3628
3629 static rtx
3630 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3631 {
3632 if (!validate_arglist (exp,
3633 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3634 return NULL_RTX;
3635 else
3636 {
3637 tree dest = CALL_EXPR_ARG (exp, 0);
3638 tree val = CALL_EXPR_ARG (exp, 1);
3639 tree len = CALL_EXPR_ARG (exp, 2);
3640 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3641 }
3642 }
3643
3644 /* Helper function to do the actual work for expand_builtin_memset. The
3645 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3646 so that this can also be called without constructing an actual CALL_EXPR.
3647 The other arguments and return value are the same as for
3648 expand_builtin_memset. */
3649
3650 static rtx
3651 expand_builtin_memset_args (tree dest, tree val, tree len,
3652 rtx target, enum machine_mode mode, tree orig_exp)
3653 {
3654 tree fndecl, fn;
3655 enum built_in_function fcode;
3656 enum machine_mode val_mode;
3657 char c;
3658 unsigned int dest_align;
3659 rtx dest_mem, dest_addr, len_rtx;
3660 HOST_WIDE_INT expected_size = -1;
3661 unsigned int expected_align = 0;
3662 unsigned HOST_WIDE_INT min_size;
3663 unsigned HOST_WIDE_INT max_size;
3664 unsigned HOST_WIDE_INT probable_max_size;
3665
3666 dest_align = get_pointer_alignment (dest);
3667
3668 /* If DEST is not a pointer type, don't do this operation in-line. */
3669 if (dest_align == 0)
3670 return NULL_RTX;
3671
3672 if (currently_expanding_gimple_stmt)
3673 stringop_block_profile (currently_expanding_gimple_stmt,
3674 &expected_align, &expected_size);
3675
3676 if (expected_align < dest_align)
3677 expected_align = dest_align;
3678
3679 /* If the LEN parameter is zero, return DEST. */
3680 if (integer_zerop (len))
3681 {
3682 /* Evaluate and ignore VAL in case it has side-effects. */
3683 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3684 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3685 }
3686
3687 /* Stabilize the arguments in case we fail. */
3688 dest = builtin_save_expr (dest);
3689 val = builtin_save_expr (val);
3690 len = builtin_save_expr (len);
3691
3692 len_rtx = expand_normal (len);
3693 determine_block_size (len, len_rtx, &min_size, &max_size,
3694 &probable_max_size);
3695 dest_mem = get_memory_rtx (dest, len);
3696 val_mode = TYPE_MODE (unsigned_char_type_node);
3697
3698 if (TREE_CODE (val) != INTEGER_CST)
3699 {
3700 rtx val_rtx;
3701
3702 val_rtx = expand_normal (val);
3703 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3704
3705 /* Assume that we can memset by pieces if we can store
3706 * the coefficients by pieces (in the required modes).
3707 * We can't pass builtin_memset_gen_str as that emits RTL. */
3708 c = 1;
3709 if (tree_fits_uhwi_p (len)
3710 && can_store_by_pieces (tree_to_uhwi (len),
3711 builtin_memset_read_str, &c, dest_align,
3712 true))
3713 {
3714 val_rtx = force_reg (val_mode, val_rtx);
3715 store_by_pieces (dest_mem, tree_to_uhwi (len),
3716 builtin_memset_gen_str, val_rtx, dest_align,
3717 true, 0);
3718 }
3719 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3720 dest_align, expected_align,
3721 expected_size, min_size, max_size,
3722 probable_max_size))
3723 goto do_libcall;
3724
3725 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3726 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3727 return dest_mem;
3728 }
3729
3730 if (target_char_cast (val, &c))
3731 goto do_libcall;
3732
3733 if (c)
3734 {
3735 if (tree_fits_uhwi_p (len)
3736 && can_store_by_pieces (tree_to_uhwi (len),
3737 builtin_memset_read_str, &c, dest_align,
3738 true))
3739 store_by_pieces (dest_mem, tree_to_uhwi (len),
3740 builtin_memset_read_str, &c, dest_align, true, 0);
3741 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3742 gen_int_mode (c, val_mode),
3743 dest_align, expected_align,
3744 expected_size, min_size, max_size,
3745 probable_max_size))
3746 goto do_libcall;
3747
3748 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3749 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3750 return dest_mem;
3751 }
3752
3753 set_mem_align (dest_mem, dest_align);
3754 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3755 CALL_EXPR_TAILCALL (orig_exp)
3756 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3757 expected_align, expected_size,
3758 min_size, max_size,
3759 probable_max_size);
3760
3761 if (dest_addr == 0)
3762 {
3763 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3764 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3765 }
3766
3767 return dest_addr;
3768
3769 do_libcall:
3770 fndecl = get_callee_fndecl (orig_exp);
3771 fcode = DECL_FUNCTION_CODE (fndecl);
3772 if (fcode == BUILT_IN_MEMSET)
3773 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3774 dest, val, len);
3775 else if (fcode == BUILT_IN_BZERO)
3776 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3777 dest, len);
3778 else
3779 gcc_unreachable ();
3780 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3781 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3782 return expand_call (fn, target, target == const0_rtx);
3783 }
3784
3785 /* Expand expression EXP, which is a call to the bzero builtin. Return
3786 NULL_RTX if we failed the caller should emit a normal call. */
3787
3788 static rtx
3789 expand_builtin_bzero (tree exp)
3790 {
3791 tree dest, size;
3792 location_t loc = EXPR_LOCATION (exp);
3793
3794 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3795 return NULL_RTX;
3796
3797 dest = CALL_EXPR_ARG (exp, 0);
3798 size = CALL_EXPR_ARG (exp, 1);
3799
3800 /* New argument list transforming bzero(ptr x, int y) to
3801 memset(ptr x, int 0, size_t y). This is done this way
3802 so that if it isn't expanded inline, we fallback to
3803 calling bzero instead of memset. */
3804
3805 return expand_builtin_memset_args (dest, integer_zero_node,
3806 fold_convert_loc (loc,
3807 size_type_node, size),
3808 const0_rtx, VOIDmode, exp);
3809 }
3810
3811 /* Expand expression EXP, which is a call to the memcmp built-in function.
3812 Return NULL_RTX if we failed and the caller should emit a normal call,
3813 otherwise try to get the result in TARGET, if convenient (and in mode
3814 MODE, if that's convenient). */
3815
3816 static rtx
3817 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3818 ATTRIBUTE_UNUSED enum machine_mode mode)
3819 {
3820 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3821
3822 if (!validate_arglist (exp,
3823 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3824 return NULL_RTX;
3825
3826 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3827 implementing memcmp because it will stop if it encounters two
3828 zero bytes. */
3829 #if defined HAVE_cmpmemsi
3830 {
3831 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3832 rtx result;
3833 rtx insn;
3834 tree arg1 = CALL_EXPR_ARG (exp, 0);
3835 tree arg2 = CALL_EXPR_ARG (exp, 1);
3836 tree len = CALL_EXPR_ARG (exp, 2);
3837
3838 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3839 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3840 enum machine_mode insn_mode;
3841
3842 if (HAVE_cmpmemsi)
3843 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3844 else
3845 return NULL_RTX;
3846
3847 /* If we don't have POINTER_TYPE, call the function. */
3848 if (arg1_align == 0 || arg2_align == 0)
3849 return NULL_RTX;
3850
3851 /* Make a place to write the result of the instruction. */
3852 result = target;
3853 if (! (result != 0
3854 && REG_P (result) && GET_MODE (result) == insn_mode
3855 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3856 result = gen_reg_rtx (insn_mode);
3857
3858 arg1_rtx = get_memory_rtx (arg1, len);
3859 arg2_rtx = get_memory_rtx (arg2, len);
3860 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3861
3862 /* Set MEM_SIZE as appropriate. */
3863 if (CONST_INT_P (arg3_rtx))
3864 {
3865 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3866 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3867 }
3868
3869 if (HAVE_cmpmemsi)
3870 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3871 GEN_INT (MIN (arg1_align, arg2_align)));
3872 else
3873 gcc_unreachable ();
3874
3875 if (insn)
3876 emit_insn (insn);
3877 else
3878 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3879 TYPE_MODE (integer_type_node), 3,
3880 XEXP (arg1_rtx, 0), Pmode,
3881 XEXP (arg2_rtx, 0), Pmode,
3882 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3883 TYPE_UNSIGNED (sizetype)),
3884 TYPE_MODE (sizetype));
3885
3886 /* Return the value in the proper mode for this function. */
3887 mode = TYPE_MODE (TREE_TYPE (exp));
3888 if (GET_MODE (result) == mode)
3889 return result;
3890 else if (target != 0)
3891 {
3892 convert_move (target, result, 0);
3893 return target;
3894 }
3895 else
3896 return convert_to_mode (mode, result, 0);
3897 }
3898 #endif /* HAVE_cmpmemsi. */
3899
3900 return NULL_RTX;
3901 }
3902
3903 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3904 if we failed the caller should emit a normal call, otherwise try to get
3905 the result in TARGET, if convenient. */
3906
3907 static rtx
3908 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3909 {
3910 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3911 return NULL_RTX;
3912
3913 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3914 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3915 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3916 {
3917 rtx arg1_rtx, arg2_rtx;
3918 rtx result, insn = NULL_RTX;
3919 tree fndecl, fn;
3920 tree arg1 = CALL_EXPR_ARG (exp, 0);
3921 tree arg2 = CALL_EXPR_ARG (exp, 1);
3922
3923 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3924 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3925
3926 /* If we don't have POINTER_TYPE, call the function. */
3927 if (arg1_align == 0 || arg2_align == 0)
3928 return NULL_RTX;
3929
3930 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3931 arg1 = builtin_save_expr (arg1);
3932 arg2 = builtin_save_expr (arg2);
3933
3934 arg1_rtx = get_memory_rtx (arg1, NULL);
3935 arg2_rtx = get_memory_rtx (arg2, NULL);
3936
3937 #ifdef HAVE_cmpstrsi
3938 /* Try to call cmpstrsi. */
3939 if (HAVE_cmpstrsi)
3940 {
3941 enum machine_mode insn_mode
3942 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3943
3944 /* Make a place to write the result of the instruction. */
3945 result = target;
3946 if (! (result != 0
3947 && REG_P (result) && GET_MODE (result) == insn_mode
3948 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3949 result = gen_reg_rtx (insn_mode);
3950
3951 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3952 GEN_INT (MIN (arg1_align, arg2_align)));
3953 }
3954 #endif
3955 #ifdef HAVE_cmpstrnsi
3956 /* Try to determine at least one length and call cmpstrnsi. */
3957 if (!insn && HAVE_cmpstrnsi)
3958 {
3959 tree len;
3960 rtx arg3_rtx;
3961
3962 enum machine_mode insn_mode
3963 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3964 tree len1 = c_strlen (arg1, 1);
3965 tree len2 = c_strlen (arg2, 1);
3966
3967 if (len1)
3968 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3969 if (len2)
3970 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3971
3972 /* If we don't have a constant length for the first, use the length
3973 of the second, if we know it. We don't require a constant for
3974 this case; some cost analysis could be done if both are available
3975 but neither is constant. For now, assume they're equally cheap,
3976 unless one has side effects. If both strings have constant lengths,
3977 use the smaller. */
3978
3979 if (!len1)
3980 len = len2;
3981 else if (!len2)
3982 len = len1;
3983 else if (TREE_SIDE_EFFECTS (len1))
3984 len = len2;
3985 else if (TREE_SIDE_EFFECTS (len2))
3986 len = len1;
3987 else if (TREE_CODE (len1) != INTEGER_CST)
3988 len = len2;
3989 else if (TREE_CODE (len2) != INTEGER_CST)
3990 len = len1;
3991 else if (tree_int_cst_lt (len1, len2))
3992 len = len1;
3993 else
3994 len = len2;
3995
3996 /* If both arguments have side effects, we cannot optimize. */
3997 if (!len || TREE_SIDE_EFFECTS (len))
3998 goto do_libcall;
3999
4000 arg3_rtx = expand_normal (len);
4001
4002 /* Make a place to write the result of the instruction. */
4003 result = target;
4004 if (! (result != 0
4005 && REG_P (result) && GET_MODE (result) == insn_mode
4006 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4007 result = gen_reg_rtx (insn_mode);
4008
4009 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4010 GEN_INT (MIN (arg1_align, arg2_align)));
4011 }
4012 #endif
4013
4014 if (insn)
4015 {
4016 enum machine_mode mode;
4017 emit_insn (insn);
4018
4019 /* Return the value in the proper mode for this function. */
4020 mode = TYPE_MODE (TREE_TYPE (exp));
4021 if (GET_MODE (result) == mode)
4022 return result;
4023 if (target == 0)
4024 return convert_to_mode (mode, result, 0);
4025 convert_move (target, result, 0);
4026 return target;
4027 }
4028
4029 /* Expand the library call ourselves using a stabilized argument
4030 list to avoid re-evaluating the function's arguments twice. */
4031 #ifdef HAVE_cmpstrnsi
4032 do_libcall:
4033 #endif
4034 fndecl = get_callee_fndecl (exp);
4035 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4036 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4037 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4038 return expand_call (fn, target, target == const0_rtx);
4039 }
4040 #endif
4041 return NULL_RTX;
4042 }
4043
4044 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4045 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4046 the result in TARGET, if convenient. */
4047
4048 static rtx
4049 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4050 ATTRIBUTE_UNUSED enum machine_mode mode)
4051 {
4052 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4053
4054 if (!validate_arglist (exp,
4055 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4056 return NULL_RTX;
4057
4058 /* If c_strlen can determine an expression for one of the string
4059 lengths, and it doesn't have side effects, then emit cmpstrnsi
4060 using length MIN(strlen(string)+1, arg3). */
4061 #ifdef HAVE_cmpstrnsi
4062 if (HAVE_cmpstrnsi)
4063 {
4064 tree len, len1, len2;
4065 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4066 rtx result, insn;
4067 tree fndecl, fn;
4068 tree arg1 = CALL_EXPR_ARG (exp, 0);
4069 tree arg2 = CALL_EXPR_ARG (exp, 1);
4070 tree arg3 = CALL_EXPR_ARG (exp, 2);
4071
4072 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4073 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4074 enum machine_mode insn_mode
4075 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4076
4077 len1 = c_strlen (arg1, 1);
4078 len2 = c_strlen (arg2, 1);
4079
4080 if (len1)
4081 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4082 if (len2)
4083 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4084
4085 /* If we don't have a constant length for the first, use the length
4086 of the second, if we know it. We don't require a constant for
4087 this case; some cost analysis could be done if both are available
4088 but neither is constant. For now, assume they're equally cheap,
4089 unless one has side effects. If both strings have constant lengths,
4090 use the smaller. */
4091
4092 if (!len1)
4093 len = len2;
4094 else if (!len2)
4095 len = len1;
4096 else if (TREE_SIDE_EFFECTS (len1))
4097 len = len2;
4098 else if (TREE_SIDE_EFFECTS (len2))
4099 len = len1;
4100 else if (TREE_CODE (len1) != INTEGER_CST)
4101 len = len2;
4102 else if (TREE_CODE (len2) != INTEGER_CST)
4103 len = len1;
4104 else if (tree_int_cst_lt (len1, len2))
4105 len = len1;
4106 else
4107 len = len2;
4108
4109 /* If both arguments have side effects, we cannot optimize. */
4110 if (!len || TREE_SIDE_EFFECTS (len))
4111 return NULL_RTX;
4112
4113 /* The actual new length parameter is MIN(len,arg3). */
4114 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4115 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4116
4117 /* If we don't have POINTER_TYPE, call the function. */
4118 if (arg1_align == 0 || arg2_align == 0)
4119 return NULL_RTX;
4120
4121 /* Make a place to write the result of the instruction. */
4122 result = target;
4123 if (! (result != 0
4124 && REG_P (result) && GET_MODE (result) == insn_mode
4125 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4126 result = gen_reg_rtx (insn_mode);
4127
4128 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4129 arg1 = builtin_save_expr (arg1);
4130 arg2 = builtin_save_expr (arg2);
4131 len = builtin_save_expr (len);
4132
4133 arg1_rtx = get_memory_rtx (arg1, len);
4134 arg2_rtx = get_memory_rtx (arg2, len);
4135 arg3_rtx = expand_normal (len);
4136 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4137 GEN_INT (MIN (arg1_align, arg2_align)));
4138 if (insn)
4139 {
4140 emit_insn (insn);
4141
4142 /* Return the value in the proper mode for this function. */
4143 mode = TYPE_MODE (TREE_TYPE (exp));
4144 if (GET_MODE (result) == mode)
4145 return result;
4146 if (target == 0)
4147 return convert_to_mode (mode, result, 0);
4148 convert_move (target, result, 0);
4149 return target;
4150 }
4151
4152 /* Expand the library call ourselves using a stabilized argument
4153 list to avoid re-evaluating the function's arguments twice. */
4154 fndecl = get_callee_fndecl (exp);
4155 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4156 arg1, arg2, len);
4157 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4158 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4159 return expand_call (fn, target, target == const0_rtx);
4160 }
4161 #endif
4162 return NULL_RTX;
4163 }
4164
4165 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4166 if that's convenient. */
4167
4168 rtx
4169 expand_builtin_saveregs (void)
4170 {
4171 rtx val;
4172 rtx_insn *seq;
4173
4174 /* Don't do __builtin_saveregs more than once in a function.
4175 Save the result of the first call and reuse it. */
4176 if (saveregs_value != 0)
4177 return saveregs_value;
4178
4179 /* When this function is called, it means that registers must be
4180 saved on entry to this function. So we migrate the call to the
4181 first insn of this function. */
4182
4183 start_sequence ();
4184
4185 /* Do whatever the machine needs done in this case. */
4186 val = targetm.calls.expand_builtin_saveregs ();
4187
4188 seq = get_insns ();
4189 end_sequence ();
4190
4191 saveregs_value = val;
4192
4193 /* Put the insns after the NOTE that starts the function. If this
4194 is inside a start_sequence, make the outer-level insn chain current, so
4195 the code is placed at the start of the function. */
4196 push_topmost_sequence ();
4197 emit_insn_after (seq, entry_of_function ());
4198 pop_topmost_sequence ();
4199
4200 return val;
4201 }
4202
4203 /* Expand a call to __builtin_next_arg. */
4204
4205 static rtx
4206 expand_builtin_next_arg (void)
4207 {
4208 /* Checking arguments is already done in fold_builtin_next_arg
4209 that must be called before this function. */
4210 return expand_binop (ptr_mode, add_optab,
4211 crtl->args.internal_arg_pointer,
4212 crtl->args.arg_offset_rtx,
4213 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4214 }
4215
4216 /* Make it easier for the backends by protecting the valist argument
4217 from multiple evaluations. */
4218
4219 static tree
4220 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4221 {
4222 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4223
4224 /* The current way of determining the type of valist is completely
4225 bogus. We should have the information on the va builtin instead. */
4226 if (!vatype)
4227 vatype = targetm.fn_abi_va_list (cfun->decl);
4228
4229 if (TREE_CODE (vatype) == ARRAY_TYPE)
4230 {
4231 if (TREE_SIDE_EFFECTS (valist))
4232 valist = save_expr (valist);
4233
4234 /* For this case, the backends will be expecting a pointer to
4235 vatype, but it's possible we've actually been given an array
4236 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4237 So fix it. */
4238 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4239 {
4240 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4241 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4242 }
4243 }
4244 else
4245 {
4246 tree pt = build_pointer_type (vatype);
4247
4248 if (! needs_lvalue)
4249 {
4250 if (! TREE_SIDE_EFFECTS (valist))
4251 return valist;
4252
4253 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4254 TREE_SIDE_EFFECTS (valist) = 1;
4255 }
4256
4257 if (TREE_SIDE_EFFECTS (valist))
4258 valist = save_expr (valist);
4259 valist = fold_build2_loc (loc, MEM_REF,
4260 vatype, valist, build_int_cst (pt, 0));
4261 }
4262
4263 return valist;
4264 }
4265
4266 /* The "standard" definition of va_list is void*. */
4267
4268 tree
4269 std_build_builtin_va_list (void)
4270 {
4271 return ptr_type_node;
4272 }
4273
4274 /* The "standard" abi va_list is va_list_type_node. */
4275
4276 tree
4277 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4278 {
4279 return va_list_type_node;
4280 }
4281
4282 /* The "standard" type of va_list is va_list_type_node. */
4283
4284 tree
4285 std_canonical_va_list_type (tree type)
4286 {
4287 tree wtype, htype;
4288
4289 if (INDIRECT_REF_P (type))
4290 type = TREE_TYPE (type);
4291 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4292 type = TREE_TYPE (type);
4293 wtype = va_list_type_node;
4294 htype = type;
4295 /* Treat structure va_list types. */
4296 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4297 htype = TREE_TYPE (htype);
4298 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4299 {
4300 /* If va_list is an array type, the argument may have decayed
4301 to a pointer type, e.g. by being passed to another function.
4302 In that case, unwrap both types so that we can compare the
4303 underlying records. */
4304 if (TREE_CODE (htype) == ARRAY_TYPE
4305 || POINTER_TYPE_P (htype))
4306 {
4307 wtype = TREE_TYPE (wtype);
4308 htype = TREE_TYPE (htype);
4309 }
4310 }
4311 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4312 return va_list_type_node;
4313
4314 return NULL_TREE;
4315 }
4316
4317 /* The "standard" implementation of va_start: just assign `nextarg' to
4318 the variable. */
4319
4320 void
4321 std_expand_builtin_va_start (tree valist, rtx nextarg)
4322 {
4323 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4324 convert_move (va_r, nextarg, 0);
4325 }
4326
4327 /* Expand EXP, a call to __builtin_va_start. */
4328
4329 static rtx
4330 expand_builtin_va_start (tree exp)
4331 {
4332 rtx nextarg;
4333 tree valist;
4334 location_t loc = EXPR_LOCATION (exp);
4335
4336 if (call_expr_nargs (exp) < 2)
4337 {
4338 error_at (loc, "too few arguments to function %<va_start%>");
4339 return const0_rtx;
4340 }
4341
4342 if (fold_builtin_next_arg (exp, true))
4343 return const0_rtx;
4344
4345 nextarg = expand_builtin_next_arg ();
4346 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4347
4348 if (targetm.expand_builtin_va_start)
4349 targetm.expand_builtin_va_start (valist, nextarg);
4350 else
4351 std_expand_builtin_va_start (valist, nextarg);
4352
4353 return const0_rtx;
4354 }
4355
4356 /* Expand EXP, a call to __builtin_va_end. */
4357
4358 static rtx
4359 expand_builtin_va_end (tree exp)
4360 {
4361 tree valist = CALL_EXPR_ARG (exp, 0);
4362
4363 /* Evaluate for side effects, if needed. I hate macros that don't
4364 do that. */
4365 if (TREE_SIDE_EFFECTS (valist))
4366 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4367
4368 return const0_rtx;
4369 }
4370
4371 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4372 builtin rather than just as an assignment in stdarg.h because of the
4373 nastiness of array-type va_list types. */
4374
4375 static rtx
4376 expand_builtin_va_copy (tree exp)
4377 {
4378 tree dst, src, t;
4379 location_t loc = EXPR_LOCATION (exp);
4380
4381 dst = CALL_EXPR_ARG (exp, 0);
4382 src = CALL_EXPR_ARG (exp, 1);
4383
4384 dst = stabilize_va_list_loc (loc, dst, 1);
4385 src = stabilize_va_list_loc (loc, src, 0);
4386
4387 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4388
4389 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4390 {
4391 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4392 TREE_SIDE_EFFECTS (t) = 1;
4393 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4394 }
4395 else
4396 {
4397 rtx dstb, srcb, size;
4398
4399 /* Evaluate to pointers. */
4400 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4401 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4402 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4403 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4404
4405 dstb = convert_memory_address (Pmode, dstb);
4406 srcb = convert_memory_address (Pmode, srcb);
4407
4408 /* "Dereference" to BLKmode memories. */
4409 dstb = gen_rtx_MEM (BLKmode, dstb);
4410 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4411 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4412 srcb = gen_rtx_MEM (BLKmode, srcb);
4413 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4414 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4415
4416 /* Copy. */
4417 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4418 }
4419
4420 return const0_rtx;
4421 }
4422
4423 /* Expand a call to one of the builtin functions __builtin_frame_address or
4424 __builtin_return_address. */
4425
4426 static rtx
4427 expand_builtin_frame_address (tree fndecl, tree exp)
4428 {
4429 /* The argument must be a nonnegative integer constant.
4430 It counts the number of frames to scan up the stack.
4431 The value is the return address saved in that frame. */
4432 if (call_expr_nargs (exp) == 0)
4433 /* Warning about missing arg was already issued. */
4434 return const0_rtx;
4435 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4436 {
4437 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4438 error ("invalid argument to %<__builtin_frame_address%>");
4439 else
4440 error ("invalid argument to %<__builtin_return_address%>");
4441 return const0_rtx;
4442 }
4443 else
4444 {
4445 rtx tem
4446 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4447 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4448
4449 /* Some ports cannot access arbitrary stack frames. */
4450 if (tem == NULL)
4451 {
4452 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4453 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4454 else
4455 warning (0, "unsupported argument to %<__builtin_return_address%>");
4456 return const0_rtx;
4457 }
4458
4459 /* For __builtin_frame_address, return what we've got. */
4460 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4461 return tem;
4462
4463 if (!REG_P (tem)
4464 && ! CONSTANT_P (tem))
4465 tem = copy_addr_to_reg (tem);
4466 return tem;
4467 }
4468 }
4469
4470 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4471 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4472 is the same as for allocate_dynamic_stack_space. */
4473
4474 static rtx
4475 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4476 {
4477 rtx op0;
4478 rtx result;
4479 bool valid_arglist;
4480 unsigned int align;
4481 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4482 == BUILT_IN_ALLOCA_WITH_ALIGN);
4483
4484 valid_arglist
4485 = (alloca_with_align
4486 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4487 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4488
4489 if (!valid_arglist)
4490 return NULL_RTX;
4491
4492 /* Compute the argument. */
4493 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4494
4495 /* Compute the alignment. */
4496 align = (alloca_with_align
4497 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4498 : BIGGEST_ALIGNMENT);
4499
4500 /* Allocate the desired space. */
4501 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4502 result = convert_memory_address (ptr_mode, result);
4503
4504 return result;
4505 }
4506
4507 /* Expand a call to bswap builtin in EXP.
4508 Return NULL_RTX if a normal call should be emitted rather than expanding the
4509 function in-line. If convenient, the result should be placed in TARGET.
4510 SUBTARGET may be used as the target for computing one of EXP's operands. */
4511
4512 static rtx
4513 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4514 rtx subtarget)
4515 {
4516 tree arg;
4517 rtx op0;
4518
4519 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4520 return NULL_RTX;
4521
4522 arg = CALL_EXPR_ARG (exp, 0);
4523 op0 = expand_expr (arg,
4524 subtarget && GET_MODE (subtarget) == target_mode
4525 ? subtarget : NULL_RTX,
4526 target_mode, EXPAND_NORMAL);
4527 if (GET_MODE (op0) != target_mode)
4528 op0 = convert_to_mode (target_mode, op0, 1);
4529
4530 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4531
4532 gcc_assert (target);
4533
4534 return convert_to_mode (target_mode, target, 1);
4535 }
4536
4537 /* Expand a call to a unary builtin in EXP.
4538 Return NULL_RTX if a normal call should be emitted rather than expanding the
4539 function in-line. If convenient, the result should be placed in TARGET.
4540 SUBTARGET may be used as the target for computing one of EXP's operands. */
4541
4542 static rtx
4543 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4544 rtx subtarget, optab op_optab)
4545 {
4546 rtx op0;
4547
4548 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4549 return NULL_RTX;
4550
4551 /* Compute the argument. */
4552 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4553 (subtarget
4554 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4555 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4556 VOIDmode, EXPAND_NORMAL);
4557 /* Compute op, into TARGET if possible.
4558 Set TARGET to wherever the result comes back. */
4559 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4560 op_optab, op0, target, op_optab != clrsb_optab);
4561 gcc_assert (target);
4562
4563 return convert_to_mode (target_mode, target, 0);
4564 }
4565
4566 /* Expand a call to __builtin_expect. We just return our argument
4567 as the builtin_expect semantic should've been already executed by
4568 tree branch prediction pass. */
4569
4570 static rtx
4571 expand_builtin_expect (tree exp, rtx target)
4572 {
4573 tree arg;
4574
4575 if (call_expr_nargs (exp) < 2)
4576 return const0_rtx;
4577 arg = CALL_EXPR_ARG (exp, 0);
4578
4579 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4580 /* When guessing was done, the hints should be already stripped away. */
4581 gcc_assert (!flag_guess_branch_prob
4582 || optimize == 0 || seen_error ());
4583 return target;
4584 }
4585
4586 /* Expand a call to __builtin_assume_aligned. We just return our first
4587 argument as the builtin_assume_aligned semantic should've been already
4588 executed by CCP. */
4589
4590 static rtx
4591 expand_builtin_assume_aligned (tree exp, rtx target)
4592 {
4593 if (call_expr_nargs (exp) < 2)
4594 return const0_rtx;
4595 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4596 EXPAND_NORMAL);
4597 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4598 && (call_expr_nargs (exp) < 3
4599 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4600 return target;
4601 }
4602
4603 void
4604 expand_builtin_trap (void)
4605 {
4606 #ifdef HAVE_trap
4607 if (HAVE_trap)
4608 {
4609 rtx insn = emit_insn (gen_trap ());
4610 /* For trap insns when not accumulating outgoing args force
4611 REG_ARGS_SIZE note to prevent crossjumping of calls with
4612 different args sizes. */
4613 if (!ACCUMULATE_OUTGOING_ARGS)
4614 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4615 }
4616 else
4617 #endif
4618 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4619 emit_barrier ();
4620 }
4621
4622 /* Expand a call to __builtin_unreachable. We do nothing except emit
4623 a barrier saying that control flow will not pass here.
4624
4625 It is the responsibility of the program being compiled to ensure
4626 that control flow does never reach __builtin_unreachable. */
4627 static void
4628 expand_builtin_unreachable (void)
4629 {
4630 emit_barrier ();
4631 }
4632
4633 /* Expand EXP, a call to fabs, fabsf or fabsl.
4634 Return NULL_RTX if a normal call should be emitted rather than expanding
4635 the function inline. If convenient, the result should be placed
4636 in TARGET. SUBTARGET may be used as the target for computing
4637 the operand. */
4638
4639 static rtx
4640 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4641 {
4642 enum machine_mode mode;
4643 tree arg;
4644 rtx op0;
4645
4646 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4647 return NULL_RTX;
4648
4649 arg = CALL_EXPR_ARG (exp, 0);
4650 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4651 mode = TYPE_MODE (TREE_TYPE (arg));
4652 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4653 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4654 }
4655
4656 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4657 Return NULL is a normal call should be emitted rather than expanding the
4658 function inline. If convenient, the result should be placed in TARGET.
4659 SUBTARGET may be used as the target for computing the operand. */
4660
4661 static rtx
4662 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4663 {
4664 rtx op0, op1;
4665 tree arg;
4666
4667 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4668 return NULL_RTX;
4669
4670 arg = CALL_EXPR_ARG (exp, 0);
4671 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4672
4673 arg = CALL_EXPR_ARG (exp, 1);
4674 op1 = expand_normal (arg);
4675
4676 return expand_copysign (op0, op1, target);
4677 }
4678
4679 /* Expand a call to __builtin___clear_cache. */
4680
4681 static rtx
4682 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4683 {
4684 #ifndef HAVE_clear_cache
4685 #ifdef CLEAR_INSN_CACHE
4686 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4687 does something. Just do the default expansion to a call to
4688 __clear_cache(). */
4689 return NULL_RTX;
4690 #else
4691 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4692 does nothing. There is no need to call it. Do nothing. */
4693 return const0_rtx;
4694 #endif /* CLEAR_INSN_CACHE */
4695 #else
4696 /* We have a "clear_cache" insn, and it will handle everything. */
4697 tree begin, end;
4698 rtx begin_rtx, end_rtx;
4699
4700 /* We must not expand to a library call. If we did, any
4701 fallback library function in libgcc that might contain a call to
4702 __builtin___clear_cache() would recurse infinitely. */
4703 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4704 {
4705 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4706 return const0_rtx;
4707 }
4708
4709 if (HAVE_clear_cache)
4710 {
4711 struct expand_operand ops[2];
4712
4713 begin = CALL_EXPR_ARG (exp, 0);
4714 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4715
4716 end = CALL_EXPR_ARG (exp, 1);
4717 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4718
4719 create_address_operand (&ops[0], begin_rtx);
4720 create_address_operand (&ops[1], end_rtx);
4721 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4722 return const0_rtx;
4723 }
4724 return const0_rtx;
4725 #endif /* HAVE_clear_cache */
4726 }
4727
4728 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4729
4730 static rtx
4731 round_trampoline_addr (rtx tramp)
4732 {
4733 rtx temp, addend, mask;
4734
4735 /* If we don't need too much alignment, we'll have been guaranteed
4736 proper alignment by get_trampoline_type. */
4737 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4738 return tramp;
4739
4740 /* Round address up to desired boundary. */
4741 temp = gen_reg_rtx (Pmode);
4742 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4743 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4744
4745 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4746 temp, 0, OPTAB_LIB_WIDEN);
4747 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4748 temp, 0, OPTAB_LIB_WIDEN);
4749
4750 return tramp;
4751 }
4752
4753 static rtx
4754 expand_builtin_init_trampoline (tree exp, bool onstack)
4755 {
4756 tree t_tramp, t_func, t_chain;
4757 rtx m_tramp, r_tramp, r_chain, tmp;
4758
4759 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4760 POINTER_TYPE, VOID_TYPE))
4761 return NULL_RTX;
4762
4763 t_tramp = CALL_EXPR_ARG (exp, 0);
4764 t_func = CALL_EXPR_ARG (exp, 1);
4765 t_chain = CALL_EXPR_ARG (exp, 2);
4766
4767 r_tramp = expand_normal (t_tramp);
4768 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4769 MEM_NOTRAP_P (m_tramp) = 1;
4770
4771 /* If ONSTACK, the TRAMP argument should be the address of a field
4772 within the local function's FRAME decl. Either way, let's see if
4773 we can fill in the MEM_ATTRs for this memory. */
4774 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4775 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4776
4777 /* Creator of a heap trampoline is responsible for making sure the
4778 address is aligned to at least STACK_BOUNDARY. Normally malloc
4779 will ensure this anyhow. */
4780 tmp = round_trampoline_addr (r_tramp);
4781 if (tmp != r_tramp)
4782 {
4783 m_tramp = change_address (m_tramp, BLKmode, tmp);
4784 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4785 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4786 }
4787
4788 /* The FUNC argument should be the address of the nested function.
4789 Extract the actual function decl to pass to the hook. */
4790 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4791 t_func = TREE_OPERAND (t_func, 0);
4792 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4793
4794 r_chain = expand_normal (t_chain);
4795
4796 /* Generate insns to initialize the trampoline. */
4797 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4798
4799 if (onstack)
4800 {
4801 trampolines_created = 1;
4802
4803 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4804 "trampoline generated for nested function %qD", t_func);
4805 }
4806
4807 return const0_rtx;
4808 }
4809
4810 static rtx
4811 expand_builtin_adjust_trampoline (tree exp)
4812 {
4813 rtx tramp;
4814
4815 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4816 return NULL_RTX;
4817
4818 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4819 tramp = round_trampoline_addr (tramp);
4820 if (targetm.calls.trampoline_adjust_address)
4821 tramp = targetm.calls.trampoline_adjust_address (tramp);
4822
4823 return tramp;
4824 }
4825
4826 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4827 function. The function first checks whether the back end provides
4828 an insn to implement signbit for the respective mode. If not, it
4829 checks whether the floating point format of the value is such that
4830 the sign bit can be extracted. If that is not the case, the
4831 function returns NULL_RTX to indicate that a normal call should be
4832 emitted rather than expanding the function in-line. EXP is the
4833 expression that is a call to the builtin function; if convenient,
4834 the result should be placed in TARGET. */
4835 static rtx
4836 expand_builtin_signbit (tree exp, rtx target)
4837 {
4838 const struct real_format *fmt;
4839 enum machine_mode fmode, imode, rmode;
4840 tree arg;
4841 int word, bitpos;
4842 enum insn_code icode;
4843 rtx temp;
4844 location_t loc = EXPR_LOCATION (exp);
4845
4846 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4847 return NULL_RTX;
4848
4849 arg = CALL_EXPR_ARG (exp, 0);
4850 fmode = TYPE_MODE (TREE_TYPE (arg));
4851 rmode = TYPE_MODE (TREE_TYPE (exp));
4852 fmt = REAL_MODE_FORMAT (fmode);
4853
4854 arg = builtin_save_expr (arg);
4855
4856 /* Expand the argument yielding a RTX expression. */
4857 temp = expand_normal (arg);
4858
4859 /* Check if the back end provides an insn that handles signbit for the
4860 argument's mode. */
4861 icode = optab_handler (signbit_optab, fmode);
4862 if (icode != CODE_FOR_nothing)
4863 {
4864 rtx_insn *last = get_last_insn ();
4865 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4866 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4867 return target;
4868 delete_insns_since (last);
4869 }
4870
4871 /* For floating point formats without a sign bit, implement signbit
4872 as "ARG < 0.0". */
4873 bitpos = fmt->signbit_ro;
4874 if (bitpos < 0)
4875 {
4876 /* But we can't do this if the format supports signed zero. */
4877 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4878 return NULL_RTX;
4879
4880 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4881 build_real (TREE_TYPE (arg), dconst0));
4882 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4883 }
4884
4885 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4886 {
4887 imode = int_mode_for_mode (fmode);
4888 if (imode == BLKmode)
4889 return NULL_RTX;
4890 temp = gen_lowpart (imode, temp);
4891 }
4892 else
4893 {
4894 imode = word_mode;
4895 /* Handle targets with different FP word orders. */
4896 if (FLOAT_WORDS_BIG_ENDIAN)
4897 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4898 else
4899 word = bitpos / BITS_PER_WORD;
4900 temp = operand_subword_force (temp, word, fmode);
4901 bitpos = bitpos % BITS_PER_WORD;
4902 }
4903
4904 /* Force the intermediate word_mode (or narrower) result into a
4905 register. This avoids attempting to create paradoxical SUBREGs
4906 of floating point modes below. */
4907 temp = force_reg (imode, temp);
4908
4909 /* If the bitpos is within the "result mode" lowpart, the operation
4910 can be implement with a single bitwise AND. Otherwise, we need
4911 a right shift and an AND. */
4912
4913 if (bitpos < GET_MODE_BITSIZE (rmode))
4914 {
4915 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4916
4917 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4918 temp = gen_lowpart (rmode, temp);
4919 temp = expand_binop (rmode, and_optab, temp,
4920 immed_wide_int_const (mask, rmode),
4921 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4922 }
4923 else
4924 {
4925 /* Perform a logical right shift to place the signbit in the least
4926 significant bit, then truncate the result to the desired mode
4927 and mask just this bit. */
4928 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4929 temp = gen_lowpart (rmode, temp);
4930 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4931 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4932 }
4933
4934 return temp;
4935 }
4936
4937 /* Expand fork or exec calls. TARGET is the desired target of the
4938 call. EXP is the call. FN is the
4939 identificator of the actual function. IGNORE is nonzero if the
4940 value is to be ignored. */
4941
4942 static rtx
4943 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4944 {
4945 tree id, decl;
4946 tree call;
4947
4948 /* If we are not profiling, just call the function. */
4949 if (!profile_arc_flag)
4950 return NULL_RTX;
4951
4952 /* Otherwise call the wrapper. This should be equivalent for the rest of
4953 compiler, so the code does not diverge, and the wrapper may run the
4954 code necessary for keeping the profiling sane. */
4955
4956 switch (DECL_FUNCTION_CODE (fn))
4957 {
4958 case BUILT_IN_FORK:
4959 id = get_identifier ("__gcov_fork");
4960 break;
4961
4962 case BUILT_IN_EXECL:
4963 id = get_identifier ("__gcov_execl");
4964 break;
4965
4966 case BUILT_IN_EXECV:
4967 id = get_identifier ("__gcov_execv");
4968 break;
4969
4970 case BUILT_IN_EXECLP:
4971 id = get_identifier ("__gcov_execlp");
4972 break;
4973
4974 case BUILT_IN_EXECLE:
4975 id = get_identifier ("__gcov_execle");
4976 break;
4977
4978 case BUILT_IN_EXECVP:
4979 id = get_identifier ("__gcov_execvp");
4980 break;
4981
4982 case BUILT_IN_EXECVE:
4983 id = get_identifier ("__gcov_execve");
4984 break;
4985
4986 default:
4987 gcc_unreachable ();
4988 }
4989
4990 decl = build_decl (DECL_SOURCE_LOCATION (fn),
4991 FUNCTION_DECL, id, TREE_TYPE (fn));
4992 DECL_EXTERNAL (decl) = 1;
4993 TREE_PUBLIC (decl) = 1;
4994 DECL_ARTIFICIAL (decl) = 1;
4995 TREE_NOTHROW (decl) = 1;
4996 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
4997 DECL_VISIBILITY_SPECIFIED (decl) = 1;
4998 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
4999 return expand_call (call, target, ignore);
5000 }
5001
5002
5003 \f
5004 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5005 the pointer in these functions is void*, the tree optimizers may remove
5006 casts. The mode computed in expand_builtin isn't reliable either, due
5007 to __sync_bool_compare_and_swap.
5008
5009 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5010 group of builtins. This gives us log2 of the mode size. */
5011
5012 static inline enum machine_mode
5013 get_builtin_sync_mode (int fcode_diff)
5014 {
5015 /* The size is not negotiable, so ask not to get BLKmode in return
5016 if the target indicates that a smaller size would be better. */
5017 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5018 }
5019
5020 /* Expand the memory expression LOC and return the appropriate memory operand
5021 for the builtin_sync operations. */
5022
5023 static rtx
5024 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5025 {
5026 rtx addr, mem;
5027
5028 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5029 addr = convert_memory_address (Pmode, addr);
5030
5031 /* Note that we explicitly do not want any alias information for this
5032 memory, so that we kill all other live memories. Otherwise we don't
5033 satisfy the full barrier semantics of the intrinsic. */
5034 mem = validize_mem (gen_rtx_MEM (mode, addr));
5035
5036 /* The alignment needs to be at least according to that of the mode. */
5037 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5038 get_pointer_alignment (loc)));
5039 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5040 MEM_VOLATILE_P (mem) = 1;
5041
5042 return mem;
5043 }
5044
5045 /* Make sure an argument is in the right mode.
5046 EXP is the tree argument.
5047 MODE is the mode it should be in. */
5048
5049 static rtx
5050 expand_expr_force_mode (tree exp, enum machine_mode mode)
5051 {
5052 rtx val;
5053 enum machine_mode old_mode;
5054
5055 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5056 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5057 of CONST_INTs, where we know the old_mode only from the call argument. */
5058
5059 old_mode = GET_MODE (val);
5060 if (old_mode == VOIDmode)
5061 old_mode = TYPE_MODE (TREE_TYPE (exp));
5062 val = convert_modes (mode, old_mode, val, 1);
5063 return val;
5064 }
5065
5066
5067 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5068 EXP is the CALL_EXPR. CODE is the rtx code
5069 that corresponds to the arithmetic or logical operation from the name;
5070 an exception here is that NOT actually means NAND. TARGET is an optional
5071 place for us to store the results; AFTER is true if this is the
5072 fetch_and_xxx form. */
5073
5074 static rtx
5075 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5076 enum rtx_code code, bool after,
5077 rtx target)
5078 {
5079 rtx val, mem;
5080 location_t loc = EXPR_LOCATION (exp);
5081
5082 if (code == NOT && warn_sync_nand)
5083 {
5084 tree fndecl = get_callee_fndecl (exp);
5085 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5086
5087 static bool warned_f_a_n, warned_n_a_f;
5088
5089 switch (fcode)
5090 {
5091 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5092 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5093 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5094 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5095 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5096 if (warned_f_a_n)
5097 break;
5098
5099 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5100 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5101 warned_f_a_n = true;
5102 break;
5103
5104 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5105 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5106 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5107 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5108 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5109 if (warned_n_a_f)
5110 break;
5111
5112 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5113 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5114 warned_n_a_f = true;
5115 break;
5116
5117 default:
5118 gcc_unreachable ();
5119 }
5120 }
5121
5122 /* Expand the operands. */
5123 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5124 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5125
5126 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5127 after);
5128 }
5129
5130 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5131 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5132 true if this is the boolean form. TARGET is a place for us to store the
5133 results; this is NOT optional if IS_BOOL is true. */
5134
5135 static rtx
5136 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5137 bool is_bool, rtx target)
5138 {
5139 rtx old_val, new_val, mem;
5140 rtx *pbool, *poval;
5141
5142 /* Expand the operands. */
5143 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5144 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5145 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5146
5147 pbool = poval = NULL;
5148 if (target != const0_rtx)
5149 {
5150 if (is_bool)
5151 pbool = &target;
5152 else
5153 poval = &target;
5154 }
5155 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5156 false, MEMMODEL_SEQ_CST,
5157 MEMMODEL_SEQ_CST))
5158 return NULL_RTX;
5159
5160 return target;
5161 }
5162
5163 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5164 general form is actually an atomic exchange, and some targets only
5165 support a reduced form with the second argument being a constant 1.
5166 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5167 the results. */
5168
5169 static rtx
5170 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5171 rtx target)
5172 {
5173 rtx val, mem;
5174
5175 /* Expand the operands. */
5176 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5177 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5178
5179 return expand_sync_lock_test_and_set (target, mem, val);
5180 }
5181
5182 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5183
5184 static void
5185 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5186 {
5187 rtx mem;
5188
5189 /* Expand the operands. */
5190 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5191
5192 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5193 }
5194
5195 /* Given an integer representing an ``enum memmodel'', verify its
5196 correctness and return the memory model enum. */
5197
5198 static enum memmodel
5199 get_memmodel (tree exp)
5200 {
5201 rtx op;
5202 unsigned HOST_WIDE_INT val;
5203
5204 /* If the parameter is not a constant, it's a run time value so we'll just
5205 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5206 if (TREE_CODE (exp) != INTEGER_CST)
5207 return MEMMODEL_SEQ_CST;
5208
5209 op = expand_normal (exp);
5210
5211 val = INTVAL (op);
5212 if (targetm.memmodel_check)
5213 val = targetm.memmodel_check (val);
5214 else if (val & ~MEMMODEL_MASK)
5215 {
5216 warning (OPT_Winvalid_memory_model,
5217 "Unknown architecture specifier in memory model to builtin.");
5218 return MEMMODEL_SEQ_CST;
5219 }
5220
5221 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5222 {
5223 warning (OPT_Winvalid_memory_model,
5224 "invalid memory model argument to builtin");
5225 return MEMMODEL_SEQ_CST;
5226 }
5227
5228 return (enum memmodel) val;
5229 }
5230
5231 /* Expand the __atomic_exchange intrinsic:
5232 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5233 EXP is the CALL_EXPR.
5234 TARGET is an optional place for us to store the results. */
5235
5236 static rtx
5237 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5238 {
5239 rtx val, mem;
5240 enum memmodel model;
5241
5242 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5243 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5244 {
5245 error ("invalid memory model for %<__atomic_exchange%>");
5246 return NULL_RTX;
5247 }
5248
5249 if (!flag_inline_atomics)
5250 return NULL_RTX;
5251
5252 /* Expand the operands. */
5253 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5254 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5255
5256 return expand_atomic_exchange (target, mem, val, model);
5257 }
5258
5259 /* Expand the __atomic_compare_exchange intrinsic:
5260 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5261 TYPE desired, BOOL weak,
5262 enum memmodel success,
5263 enum memmodel failure)
5264 EXP is the CALL_EXPR.
5265 TARGET is an optional place for us to store the results. */
5266
5267 static rtx
5268 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5269 rtx target)
5270 {
5271 rtx expect, desired, mem, oldval;
5272 rtx_code_label *label;
5273 enum memmodel success, failure;
5274 tree weak;
5275 bool is_weak;
5276
5277 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5278 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5279
5280 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5281 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5282 {
5283 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5284 return NULL_RTX;
5285 }
5286
5287 if (failure > success)
5288 {
5289 error ("failure memory model cannot be stronger than success "
5290 "memory model for %<__atomic_compare_exchange%>");
5291 return NULL_RTX;
5292 }
5293
5294 if (!flag_inline_atomics)
5295 return NULL_RTX;
5296
5297 /* Expand the operands. */
5298 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5299
5300 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5301 expect = convert_memory_address (Pmode, expect);
5302 expect = gen_rtx_MEM (mode, expect);
5303 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5304
5305 weak = CALL_EXPR_ARG (exp, 3);
5306 is_weak = false;
5307 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5308 is_weak = true;
5309
5310 if (target == const0_rtx)
5311 target = NULL;
5312
5313 /* Lest the rtl backend create a race condition with an imporoper store
5314 to memory, always create a new pseudo for OLDVAL. */
5315 oldval = NULL;
5316
5317 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5318 is_weak, success, failure))
5319 return NULL_RTX;
5320
5321 /* Conditionally store back to EXPECT, lest we create a race condition
5322 with an improper store to memory. */
5323 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5324 the normal case where EXPECT is totally private, i.e. a register. At
5325 which point the store can be unconditional. */
5326 label = gen_label_rtx ();
5327 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL, VOIDmode, 1, label);
5328 emit_move_insn (expect, oldval);
5329 emit_label (label);
5330
5331 return target;
5332 }
5333
5334 /* Expand the __atomic_load intrinsic:
5335 TYPE __atomic_load (TYPE *object, enum memmodel)
5336 EXP is the CALL_EXPR.
5337 TARGET is an optional place for us to store the results. */
5338
5339 static rtx
5340 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5341 {
5342 rtx mem;
5343 enum memmodel model;
5344
5345 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5346 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5347 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5348 {
5349 error ("invalid memory model for %<__atomic_load%>");
5350 return NULL_RTX;
5351 }
5352
5353 if (!flag_inline_atomics)
5354 return NULL_RTX;
5355
5356 /* Expand the operand. */
5357 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5358
5359 return expand_atomic_load (target, mem, model);
5360 }
5361
5362
5363 /* Expand the __atomic_store intrinsic:
5364 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5365 EXP is the CALL_EXPR.
5366 TARGET is an optional place for us to store the results. */
5367
5368 static rtx
5369 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5370 {
5371 rtx mem, val;
5372 enum memmodel model;
5373
5374 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5375 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5376 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5377 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5378 {
5379 error ("invalid memory model for %<__atomic_store%>");
5380 return NULL_RTX;
5381 }
5382
5383 if (!flag_inline_atomics)
5384 return NULL_RTX;
5385
5386 /* Expand the operands. */
5387 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5388 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5389
5390 return expand_atomic_store (mem, val, model, false);
5391 }
5392
5393 /* Expand the __atomic_fetch_XXX intrinsic:
5394 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5395 EXP is the CALL_EXPR.
5396 TARGET is an optional place for us to store the results.
5397 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5398 FETCH_AFTER is true if returning the result of the operation.
5399 FETCH_AFTER is false if returning the value before the operation.
5400 IGNORE is true if the result is not used.
5401 EXT_CALL is the correct builtin for an external call if this cannot be
5402 resolved to an instruction sequence. */
5403
5404 static rtx
5405 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5406 enum rtx_code code, bool fetch_after,
5407 bool ignore, enum built_in_function ext_call)
5408 {
5409 rtx val, mem, ret;
5410 enum memmodel model;
5411 tree fndecl;
5412 tree addr;
5413
5414 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5415
5416 /* Expand the operands. */
5417 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5418 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5419
5420 /* Only try generating instructions if inlining is turned on. */
5421 if (flag_inline_atomics)
5422 {
5423 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5424 if (ret)
5425 return ret;
5426 }
5427
5428 /* Return if a different routine isn't needed for the library call. */
5429 if (ext_call == BUILT_IN_NONE)
5430 return NULL_RTX;
5431
5432 /* Change the call to the specified function. */
5433 fndecl = get_callee_fndecl (exp);
5434 addr = CALL_EXPR_FN (exp);
5435 STRIP_NOPS (addr);
5436
5437 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5438 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5439
5440 /* Expand the call here so we can emit trailing code. */
5441 ret = expand_call (exp, target, ignore);
5442
5443 /* Replace the original function just in case it matters. */
5444 TREE_OPERAND (addr, 0) = fndecl;
5445
5446 /* Then issue the arithmetic correction to return the right result. */
5447 if (!ignore)
5448 {
5449 if (code == NOT)
5450 {
5451 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5452 OPTAB_LIB_WIDEN);
5453 ret = expand_simple_unop (mode, NOT, ret, target, true);
5454 }
5455 else
5456 ret = expand_simple_binop (mode, code, ret, val, target, true,
5457 OPTAB_LIB_WIDEN);
5458 }
5459 return ret;
5460 }
5461
5462
5463 #ifndef HAVE_atomic_clear
5464 # define HAVE_atomic_clear 0
5465 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5466 #endif
5467
5468 /* Expand an atomic clear operation.
5469 void _atomic_clear (BOOL *obj, enum memmodel)
5470 EXP is the call expression. */
5471
5472 static rtx
5473 expand_builtin_atomic_clear (tree exp)
5474 {
5475 enum machine_mode mode;
5476 rtx mem, ret;
5477 enum memmodel model;
5478
5479 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5480 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5481 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5482
5483 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5484 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5485 {
5486 error ("invalid memory model for %<__atomic_store%>");
5487 return const0_rtx;
5488 }
5489
5490 if (HAVE_atomic_clear)
5491 {
5492 emit_insn (gen_atomic_clear (mem, model));
5493 return const0_rtx;
5494 }
5495
5496 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5497 Failing that, a store is issued by __atomic_store. The only way this can
5498 fail is if the bool type is larger than a word size. Unlikely, but
5499 handle it anyway for completeness. Assume a single threaded model since
5500 there is no atomic support in this case, and no barriers are required. */
5501 ret = expand_atomic_store (mem, const0_rtx, model, true);
5502 if (!ret)
5503 emit_move_insn (mem, const0_rtx);
5504 return const0_rtx;
5505 }
5506
5507 /* Expand an atomic test_and_set operation.
5508 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5509 EXP is the call expression. */
5510
5511 static rtx
5512 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5513 {
5514 rtx mem;
5515 enum memmodel model;
5516 enum machine_mode mode;
5517
5518 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5519 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5520 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5521
5522 return expand_atomic_test_and_set (target, mem, model);
5523 }
5524
5525
5526 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5527 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5528
5529 static tree
5530 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5531 {
5532 int size;
5533 enum machine_mode mode;
5534 unsigned int mode_align, type_align;
5535
5536 if (TREE_CODE (arg0) != INTEGER_CST)
5537 return NULL_TREE;
5538
5539 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5540 mode = mode_for_size (size, MODE_INT, 0);
5541 mode_align = GET_MODE_ALIGNMENT (mode);
5542
5543 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5544 type_align = mode_align;
5545 else
5546 {
5547 tree ttype = TREE_TYPE (arg1);
5548
5549 /* This function is usually invoked and folded immediately by the front
5550 end before anything else has a chance to look at it. The pointer
5551 parameter at this point is usually cast to a void *, so check for that
5552 and look past the cast. */
5553 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5554 && VOID_TYPE_P (TREE_TYPE (ttype)))
5555 arg1 = TREE_OPERAND (arg1, 0);
5556
5557 ttype = TREE_TYPE (arg1);
5558 gcc_assert (POINTER_TYPE_P (ttype));
5559
5560 /* Get the underlying type of the object. */
5561 ttype = TREE_TYPE (ttype);
5562 type_align = TYPE_ALIGN (ttype);
5563 }
5564
5565 /* If the object has smaller alignment, the the lock free routines cannot
5566 be used. */
5567 if (type_align < mode_align)
5568 return boolean_false_node;
5569
5570 /* Check if a compare_and_swap pattern exists for the mode which represents
5571 the required size. The pattern is not allowed to fail, so the existence
5572 of the pattern indicates support is present. */
5573 if (can_compare_and_swap_p (mode, true))
5574 return boolean_true_node;
5575 else
5576 return boolean_false_node;
5577 }
5578
5579 /* Return true if the parameters to call EXP represent an object which will
5580 always generate lock free instructions. The first argument represents the
5581 size of the object, and the second parameter is a pointer to the object
5582 itself. If NULL is passed for the object, then the result is based on
5583 typical alignment for an object of the specified size. Otherwise return
5584 false. */
5585
5586 static rtx
5587 expand_builtin_atomic_always_lock_free (tree exp)
5588 {
5589 tree size;
5590 tree arg0 = CALL_EXPR_ARG (exp, 0);
5591 tree arg1 = CALL_EXPR_ARG (exp, 1);
5592
5593 if (TREE_CODE (arg0) != INTEGER_CST)
5594 {
5595 error ("non-constant argument 1 to __atomic_always_lock_free");
5596 return const0_rtx;
5597 }
5598
5599 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5600 if (size == boolean_true_node)
5601 return const1_rtx;
5602 return const0_rtx;
5603 }
5604
5605 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5606 is lock free on this architecture. */
5607
5608 static tree
5609 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5610 {
5611 if (!flag_inline_atomics)
5612 return NULL_TREE;
5613
5614 /* If it isn't always lock free, don't generate a result. */
5615 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5616 return boolean_true_node;
5617
5618 return NULL_TREE;
5619 }
5620
5621 /* Return true if the parameters to call EXP represent an object which will
5622 always generate lock free instructions. The first argument represents the
5623 size of the object, and the second parameter is a pointer to the object
5624 itself. If NULL is passed for the object, then the result is based on
5625 typical alignment for an object of the specified size. Otherwise return
5626 NULL*/
5627
5628 static rtx
5629 expand_builtin_atomic_is_lock_free (tree exp)
5630 {
5631 tree size;
5632 tree arg0 = CALL_EXPR_ARG (exp, 0);
5633 tree arg1 = CALL_EXPR_ARG (exp, 1);
5634
5635 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5636 {
5637 error ("non-integer argument 1 to __atomic_is_lock_free");
5638 return NULL_RTX;
5639 }
5640
5641 if (!flag_inline_atomics)
5642 return NULL_RTX;
5643
5644 /* If the value is known at compile time, return the RTX for it. */
5645 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5646 if (size == boolean_true_node)
5647 return const1_rtx;
5648
5649 return NULL_RTX;
5650 }
5651
5652 /* Expand the __atomic_thread_fence intrinsic:
5653 void __atomic_thread_fence (enum memmodel)
5654 EXP is the CALL_EXPR. */
5655
5656 static void
5657 expand_builtin_atomic_thread_fence (tree exp)
5658 {
5659 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5660 expand_mem_thread_fence (model);
5661 }
5662
5663 /* Expand the __atomic_signal_fence intrinsic:
5664 void __atomic_signal_fence (enum memmodel)
5665 EXP is the CALL_EXPR. */
5666
5667 static void
5668 expand_builtin_atomic_signal_fence (tree exp)
5669 {
5670 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5671 expand_mem_signal_fence (model);
5672 }
5673
5674 /* Expand the __sync_synchronize intrinsic. */
5675
5676 static void
5677 expand_builtin_sync_synchronize (void)
5678 {
5679 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5680 }
5681
5682 static rtx
5683 expand_builtin_thread_pointer (tree exp, rtx target)
5684 {
5685 enum insn_code icode;
5686 if (!validate_arglist (exp, VOID_TYPE))
5687 return const0_rtx;
5688 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5689 if (icode != CODE_FOR_nothing)
5690 {
5691 struct expand_operand op;
5692 /* If the target is not sutitable then create a new target. */
5693 if (target == NULL_RTX
5694 || !REG_P (target)
5695 || GET_MODE (target) != Pmode)
5696 target = gen_reg_rtx (Pmode);
5697 create_output_operand (&op, target, Pmode);
5698 expand_insn (icode, 1, &op);
5699 return target;
5700 }
5701 error ("__builtin_thread_pointer is not supported on this target");
5702 return const0_rtx;
5703 }
5704
5705 static void
5706 expand_builtin_set_thread_pointer (tree exp)
5707 {
5708 enum insn_code icode;
5709 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5710 return;
5711 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5712 if (icode != CODE_FOR_nothing)
5713 {
5714 struct expand_operand op;
5715 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5716 Pmode, EXPAND_NORMAL);
5717 create_input_operand (&op, val, Pmode);
5718 expand_insn (icode, 1, &op);
5719 return;
5720 }
5721 error ("__builtin_set_thread_pointer is not supported on this target");
5722 }
5723
5724 \f
5725 /* Emit code to restore the current value of stack. */
5726
5727 static void
5728 expand_stack_restore (tree var)
5729 {
5730 rtx_insn *prev;
5731 rtx sa = expand_normal (var);
5732
5733 sa = convert_memory_address (Pmode, sa);
5734
5735 prev = get_last_insn ();
5736 emit_stack_restore (SAVE_BLOCK, sa);
5737 fixup_args_size_notes (prev, get_last_insn (), 0);
5738 }
5739
5740
5741 /* Emit code to save the current value of stack. */
5742
5743 static rtx
5744 expand_stack_save (void)
5745 {
5746 rtx ret = NULL_RTX;
5747
5748 do_pending_stack_adjust ();
5749 emit_stack_save (SAVE_BLOCK, &ret);
5750 return ret;
5751 }
5752
5753 /* Expand an expression EXP that calls a built-in function,
5754 with result going to TARGET if that's convenient
5755 (and in mode MODE if that's convenient).
5756 SUBTARGET may be used as the target for computing one of EXP's operands.
5757 IGNORE is nonzero if the value is to be ignored. */
5758
5759 rtx
5760 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5761 int ignore)
5762 {
5763 tree fndecl = get_callee_fndecl (exp);
5764 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5765 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5766 int flags;
5767
5768 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5769 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5770
5771 /* When not optimizing, generate calls to library functions for a certain
5772 set of builtins. */
5773 if (!optimize
5774 && !called_as_built_in (fndecl)
5775 && fcode != BUILT_IN_FORK
5776 && fcode != BUILT_IN_EXECL
5777 && fcode != BUILT_IN_EXECV
5778 && fcode != BUILT_IN_EXECLP
5779 && fcode != BUILT_IN_EXECLE
5780 && fcode != BUILT_IN_EXECVP
5781 && fcode != BUILT_IN_EXECVE
5782 && fcode != BUILT_IN_ALLOCA
5783 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5784 && fcode != BUILT_IN_FREE)
5785 return expand_call (exp, target, ignore);
5786
5787 /* The built-in function expanders test for target == const0_rtx
5788 to determine whether the function's result will be ignored. */
5789 if (ignore)
5790 target = const0_rtx;
5791
5792 /* If the result of a pure or const built-in function is ignored, and
5793 none of its arguments are volatile, we can avoid expanding the
5794 built-in call and just evaluate the arguments for side-effects. */
5795 if (target == const0_rtx
5796 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5797 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5798 {
5799 bool volatilep = false;
5800 tree arg;
5801 call_expr_arg_iterator iter;
5802
5803 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5804 if (TREE_THIS_VOLATILE (arg))
5805 {
5806 volatilep = true;
5807 break;
5808 }
5809
5810 if (! volatilep)
5811 {
5812 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5813 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5814 return const0_rtx;
5815 }
5816 }
5817
5818 switch (fcode)
5819 {
5820 CASE_FLT_FN (BUILT_IN_FABS):
5821 case BUILT_IN_FABSD32:
5822 case BUILT_IN_FABSD64:
5823 case BUILT_IN_FABSD128:
5824 target = expand_builtin_fabs (exp, target, subtarget);
5825 if (target)
5826 return target;
5827 break;
5828
5829 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5830 target = expand_builtin_copysign (exp, target, subtarget);
5831 if (target)
5832 return target;
5833 break;
5834
5835 /* Just do a normal library call if we were unable to fold
5836 the values. */
5837 CASE_FLT_FN (BUILT_IN_CABS):
5838 break;
5839
5840 CASE_FLT_FN (BUILT_IN_EXP):
5841 CASE_FLT_FN (BUILT_IN_EXP10):
5842 CASE_FLT_FN (BUILT_IN_POW10):
5843 CASE_FLT_FN (BUILT_IN_EXP2):
5844 CASE_FLT_FN (BUILT_IN_EXPM1):
5845 CASE_FLT_FN (BUILT_IN_LOGB):
5846 CASE_FLT_FN (BUILT_IN_LOG):
5847 CASE_FLT_FN (BUILT_IN_LOG10):
5848 CASE_FLT_FN (BUILT_IN_LOG2):
5849 CASE_FLT_FN (BUILT_IN_LOG1P):
5850 CASE_FLT_FN (BUILT_IN_TAN):
5851 CASE_FLT_FN (BUILT_IN_ASIN):
5852 CASE_FLT_FN (BUILT_IN_ACOS):
5853 CASE_FLT_FN (BUILT_IN_ATAN):
5854 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5855 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5856 because of possible accuracy problems. */
5857 if (! flag_unsafe_math_optimizations)
5858 break;
5859 CASE_FLT_FN (BUILT_IN_SQRT):
5860 CASE_FLT_FN (BUILT_IN_FLOOR):
5861 CASE_FLT_FN (BUILT_IN_CEIL):
5862 CASE_FLT_FN (BUILT_IN_TRUNC):
5863 CASE_FLT_FN (BUILT_IN_ROUND):
5864 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5865 CASE_FLT_FN (BUILT_IN_RINT):
5866 target = expand_builtin_mathfn (exp, target, subtarget);
5867 if (target)
5868 return target;
5869 break;
5870
5871 CASE_FLT_FN (BUILT_IN_FMA):
5872 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5873 if (target)
5874 return target;
5875 break;
5876
5877 CASE_FLT_FN (BUILT_IN_ILOGB):
5878 if (! flag_unsafe_math_optimizations)
5879 break;
5880 CASE_FLT_FN (BUILT_IN_ISINF):
5881 CASE_FLT_FN (BUILT_IN_FINITE):
5882 case BUILT_IN_ISFINITE:
5883 case BUILT_IN_ISNORMAL:
5884 target = expand_builtin_interclass_mathfn (exp, target);
5885 if (target)
5886 return target;
5887 break;
5888
5889 CASE_FLT_FN (BUILT_IN_ICEIL):
5890 CASE_FLT_FN (BUILT_IN_LCEIL):
5891 CASE_FLT_FN (BUILT_IN_LLCEIL):
5892 CASE_FLT_FN (BUILT_IN_LFLOOR):
5893 CASE_FLT_FN (BUILT_IN_IFLOOR):
5894 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5895 target = expand_builtin_int_roundingfn (exp, target);
5896 if (target)
5897 return target;
5898 break;
5899
5900 CASE_FLT_FN (BUILT_IN_IRINT):
5901 CASE_FLT_FN (BUILT_IN_LRINT):
5902 CASE_FLT_FN (BUILT_IN_LLRINT):
5903 CASE_FLT_FN (BUILT_IN_IROUND):
5904 CASE_FLT_FN (BUILT_IN_LROUND):
5905 CASE_FLT_FN (BUILT_IN_LLROUND):
5906 target = expand_builtin_int_roundingfn_2 (exp, target);
5907 if (target)
5908 return target;
5909 break;
5910
5911 CASE_FLT_FN (BUILT_IN_POWI):
5912 target = expand_builtin_powi (exp, target);
5913 if (target)
5914 return target;
5915 break;
5916
5917 CASE_FLT_FN (BUILT_IN_ATAN2):
5918 CASE_FLT_FN (BUILT_IN_LDEXP):
5919 CASE_FLT_FN (BUILT_IN_SCALB):
5920 CASE_FLT_FN (BUILT_IN_SCALBN):
5921 CASE_FLT_FN (BUILT_IN_SCALBLN):
5922 if (! flag_unsafe_math_optimizations)
5923 break;
5924
5925 CASE_FLT_FN (BUILT_IN_FMOD):
5926 CASE_FLT_FN (BUILT_IN_REMAINDER):
5927 CASE_FLT_FN (BUILT_IN_DREM):
5928 CASE_FLT_FN (BUILT_IN_POW):
5929 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5930 if (target)
5931 return target;
5932 break;
5933
5934 CASE_FLT_FN (BUILT_IN_CEXPI):
5935 target = expand_builtin_cexpi (exp, target);
5936 gcc_assert (target);
5937 return target;
5938
5939 CASE_FLT_FN (BUILT_IN_SIN):
5940 CASE_FLT_FN (BUILT_IN_COS):
5941 if (! flag_unsafe_math_optimizations)
5942 break;
5943 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5944 if (target)
5945 return target;
5946 break;
5947
5948 CASE_FLT_FN (BUILT_IN_SINCOS):
5949 if (! flag_unsafe_math_optimizations)
5950 break;
5951 target = expand_builtin_sincos (exp);
5952 if (target)
5953 return target;
5954 break;
5955
5956 case BUILT_IN_APPLY_ARGS:
5957 return expand_builtin_apply_args ();
5958
5959 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5960 FUNCTION with a copy of the parameters described by
5961 ARGUMENTS, and ARGSIZE. It returns a block of memory
5962 allocated on the stack into which is stored all the registers
5963 that might possibly be used for returning the result of a
5964 function. ARGUMENTS is the value returned by
5965 __builtin_apply_args. ARGSIZE is the number of bytes of
5966 arguments that must be copied. ??? How should this value be
5967 computed? We'll also need a safe worst case value for varargs
5968 functions. */
5969 case BUILT_IN_APPLY:
5970 if (!validate_arglist (exp, POINTER_TYPE,
5971 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5972 && !validate_arglist (exp, REFERENCE_TYPE,
5973 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5974 return const0_rtx;
5975 else
5976 {
5977 rtx ops[3];
5978
5979 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5980 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5981 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5982
5983 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5984 }
5985
5986 /* __builtin_return (RESULT) causes the function to return the
5987 value described by RESULT. RESULT is address of the block of
5988 memory returned by __builtin_apply. */
5989 case BUILT_IN_RETURN:
5990 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5991 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5992 return const0_rtx;
5993
5994 case BUILT_IN_SAVEREGS:
5995 return expand_builtin_saveregs ();
5996
5997 case BUILT_IN_VA_ARG_PACK:
5998 /* All valid uses of __builtin_va_arg_pack () are removed during
5999 inlining. */
6000 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6001 return const0_rtx;
6002
6003 case BUILT_IN_VA_ARG_PACK_LEN:
6004 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6005 inlining. */
6006 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6007 return const0_rtx;
6008
6009 /* Return the address of the first anonymous stack arg. */
6010 case BUILT_IN_NEXT_ARG:
6011 if (fold_builtin_next_arg (exp, false))
6012 return const0_rtx;
6013 return expand_builtin_next_arg ();
6014
6015 case BUILT_IN_CLEAR_CACHE:
6016 target = expand_builtin___clear_cache (exp);
6017 if (target)
6018 return target;
6019 break;
6020
6021 case BUILT_IN_CLASSIFY_TYPE:
6022 return expand_builtin_classify_type (exp);
6023
6024 case BUILT_IN_CONSTANT_P:
6025 return const0_rtx;
6026
6027 case BUILT_IN_FRAME_ADDRESS:
6028 case BUILT_IN_RETURN_ADDRESS:
6029 return expand_builtin_frame_address (fndecl, exp);
6030
6031 /* Returns the address of the area where the structure is returned.
6032 0 otherwise. */
6033 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6034 if (call_expr_nargs (exp) != 0
6035 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6036 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6037 return const0_rtx;
6038 else
6039 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6040
6041 case BUILT_IN_ALLOCA:
6042 case BUILT_IN_ALLOCA_WITH_ALIGN:
6043 /* If the allocation stems from the declaration of a variable-sized
6044 object, it cannot accumulate. */
6045 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6046 if (target)
6047 return target;
6048 break;
6049
6050 case BUILT_IN_STACK_SAVE:
6051 return expand_stack_save ();
6052
6053 case BUILT_IN_STACK_RESTORE:
6054 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6055 return const0_rtx;
6056
6057 case BUILT_IN_BSWAP16:
6058 case BUILT_IN_BSWAP32:
6059 case BUILT_IN_BSWAP64:
6060 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6061 if (target)
6062 return target;
6063 break;
6064
6065 CASE_INT_FN (BUILT_IN_FFS):
6066 target = expand_builtin_unop (target_mode, exp, target,
6067 subtarget, ffs_optab);
6068 if (target)
6069 return target;
6070 break;
6071
6072 CASE_INT_FN (BUILT_IN_CLZ):
6073 target = expand_builtin_unop (target_mode, exp, target,
6074 subtarget, clz_optab);
6075 if (target)
6076 return target;
6077 break;
6078
6079 CASE_INT_FN (BUILT_IN_CTZ):
6080 target = expand_builtin_unop (target_mode, exp, target,
6081 subtarget, ctz_optab);
6082 if (target)
6083 return target;
6084 break;
6085
6086 CASE_INT_FN (BUILT_IN_CLRSB):
6087 target = expand_builtin_unop (target_mode, exp, target,
6088 subtarget, clrsb_optab);
6089 if (target)
6090 return target;
6091 break;
6092
6093 CASE_INT_FN (BUILT_IN_POPCOUNT):
6094 target = expand_builtin_unop (target_mode, exp, target,
6095 subtarget, popcount_optab);
6096 if (target)
6097 return target;
6098 break;
6099
6100 CASE_INT_FN (BUILT_IN_PARITY):
6101 target = expand_builtin_unop (target_mode, exp, target,
6102 subtarget, parity_optab);
6103 if (target)
6104 return target;
6105 break;
6106
6107 case BUILT_IN_STRLEN:
6108 target = expand_builtin_strlen (exp, target, target_mode);
6109 if (target)
6110 return target;
6111 break;
6112
6113 case BUILT_IN_STRCPY:
6114 target = expand_builtin_strcpy (exp, target);
6115 if (target)
6116 return target;
6117 break;
6118
6119 case BUILT_IN_STRNCPY:
6120 target = expand_builtin_strncpy (exp, target);
6121 if (target)
6122 return target;
6123 break;
6124
6125 case BUILT_IN_STPCPY:
6126 target = expand_builtin_stpcpy (exp, target, mode);
6127 if (target)
6128 return target;
6129 break;
6130
6131 case BUILT_IN_MEMCPY:
6132 target = expand_builtin_memcpy (exp, target);
6133 if (target)
6134 return target;
6135 break;
6136
6137 case BUILT_IN_MEMPCPY:
6138 target = expand_builtin_mempcpy (exp, target, mode);
6139 if (target)
6140 return target;
6141 break;
6142
6143 case BUILT_IN_MEMSET:
6144 target = expand_builtin_memset (exp, target, mode);
6145 if (target)
6146 return target;
6147 break;
6148
6149 case BUILT_IN_BZERO:
6150 target = expand_builtin_bzero (exp);
6151 if (target)
6152 return target;
6153 break;
6154
6155 case BUILT_IN_STRCMP:
6156 target = expand_builtin_strcmp (exp, target);
6157 if (target)
6158 return target;
6159 break;
6160
6161 case BUILT_IN_STRNCMP:
6162 target = expand_builtin_strncmp (exp, target, mode);
6163 if (target)
6164 return target;
6165 break;
6166
6167 case BUILT_IN_BCMP:
6168 case BUILT_IN_MEMCMP:
6169 target = expand_builtin_memcmp (exp, target, mode);
6170 if (target)
6171 return target;
6172 break;
6173
6174 case BUILT_IN_SETJMP:
6175 /* This should have been lowered to the builtins below. */
6176 gcc_unreachable ();
6177
6178 case BUILT_IN_SETJMP_SETUP:
6179 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6180 and the receiver label. */
6181 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6182 {
6183 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6184 VOIDmode, EXPAND_NORMAL);
6185 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6186 rtx label_r = label_rtx (label);
6187
6188 /* This is copied from the handling of non-local gotos. */
6189 expand_builtin_setjmp_setup (buf_addr, label_r);
6190 nonlocal_goto_handler_labels
6191 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6192 nonlocal_goto_handler_labels);
6193 /* ??? Do not let expand_label treat us as such since we would
6194 not want to be both on the list of non-local labels and on
6195 the list of forced labels. */
6196 FORCED_LABEL (label) = 0;
6197 return const0_rtx;
6198 }
6199 break;
6200
6201 case BUILT_IN_SETJMP_RECEIVER:
6202 /* __builtin_setjmp_receiver is passed the receiver label. */
6203 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6204 {
6205 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6206 rtx label_r = label_rtx (label);
6207
6208 expand_builtin_setjmp_receiver (label_r);
6209 return const0_rtx;
6210 }
6211 break;
6212
6213 /* __builtin_longjmp is passed a pointer to an array of five words.
6214 It's similar to the C library longjmp function but works with
6215 __builtin_setjmp above. */
6216 case BUILT_IN_LONGJMP:
6217 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6218 {
6219 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6220 VOIDmode, EXPAND_NORMAL);
6221 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6222
6223 if (value != const1_rtx)
6224 {
6225 error ("%<__builtin_longjmp%> second argument must be 1");
6226 return const0_rtx;
6227 }
6228
6229 expand_builtin_longjmp (buf_addr, value);
6230 return const0_rtx;
6231 }
6232 break;
6233
6234 case BUILT_IN_NONLOCAL_GOTO:
6235 target = expand_builtin_nonlocal_goto (exp);
6236 if (target)
6237 return target;
6238 break;
6239
6240 /* This updates the setjmp buffer that is its argument with the value
6241 of the current stack pointer. */
6242 case BUILT_IN_UPDATE_SETJMP_BUF:
6243 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6244 {
6245 rtx buf_addr
6246 = expand_normal (CALL_EXPR_ARG (exp, 0));
6247
6248 expand_builtin_update_setjmp_buf (buf_addr);
6249 return const0_rtx;
6250 }
6251 break;
6252
6253 case BUILT_IN_TRAP:
6254 expand_builtin_trap ();
6255 return const0_rtx;
6256
6257 case BUILT_IN_UNREACHABLE:
6258 expand_builtin_unreachable ();
6259 return const0_rtx;
6260
6261 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6262 case BUILT_IN_SIGNBITD32:
6263 case BUILT_IN_SIGNBITD64:
6264 case BUILT_IN_SIGNBITD128:
6265 target = expand_builtin_signbit (exp, target);
6266 if (target)
6267 return target;
6268 break;
6269
6270 /* Various hooks for the DWARF 2 __throw routine. */
6271 case BUILT_IN_UNWIND_INIT:
6272 expand_builtin_unwind_init ();
6273 return const0_rtx;
6274 case BUILT_IN_DWARF_CFA:
6275 return virtual_cfa_rtx;
6276 #ifdef DWARF2_UNWIND_INFO
6277 case BUILT_IN_DWARF_SP_COLUMN:
6278 return expand_builtin_dwarf_sp_column ();
6279 case BUILT_IN_INIT_DWARF_REG_SIZES:
6280 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6281 return const0_rtx;
6282 #endif
6283 case BUILT_IN_FROB_RETURN_ADDR:
6284 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6285 case BUILT_IN_EXTRACT_RETURN_ADDR:
6286 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6287 case BUILT_IN_EH_RETURN:
6288 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6289 CALL_EXPR_ARG (exp, 1));
6290 return const0_rtx;
6291 #ifdef EH_RETURN_DATA_REGNO
6292 case BUILT_IN_EH_RETURN_DATA_REGNO:
6293 return expand_builtin_eh_return_data_regno (exp);
6294 #endif
6295 case BUILT_IN_EXTEND_POINTER:
6296 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6297 case BUILT_IN_EH_POINTER:
6298 return expand_builtin_eh_pointer (exp);
6299 case BUILT_IN_EH_FILTER:
6300 return expand_builtin_eh_filter (exp);
6301 case BUILT_IN_EH_COPY_VALUES:
6302 return expand_builtin_eh_copy_values (exp);
6303
6304 case BUILT_IN_VA_START:
6305 return expand_builtin_va_start (exp);
6306 case BUILT_IN_VA_END:
6307 return expand_builtin_va_end (exp);
6308 case BUILT_IN_VA_COPY:
6309 return expand_builtin_va_copy (exp);
6310 case BUILT_IN_EXPECT:
6311 return expand_builtin_expect (exp, target);
6312 case BUILT_IN_ASSUME_ALIGNED:
6313 return expand_builtin_assume_aligned (exp, target);
6314 case BUILT_IN_PREFETCH:
6315 expand_builtin_prefetch (exp);
6316 return const0_rtx;
6317
6318 case BUILT_IN_INIT_TRAMPOLINE:
6319 return expand_builtin_init_trampoline (exp, true);
6320 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6321 return expand_builtin_init_trampoline (exp, false);
6322 case BUILT_IN_ADJUST_TRAMPOLINE:
6323 return expand_builtin_adjust_trampoline (exp);
6324
6325 case BUILT_IN_FORK:
6326 case BUILT_IN_EXECL:
6327 case BUILT_IN_EXECV:
6328 case BUILT_IN_EXECLP:
6329 case BUILT_IN_EXECLE:
6330 case BUILT_IN_EXECVP:
6331 case BUILT_IN_EXECVE:
6332 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6333 if (target)
6334 return target;
6335 break;
6336
6337 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6338 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6339 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6340 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6341 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6342 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6343 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6344 if (target)
6345 return target;
6346 break;
6347
6348 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6349 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6350 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6351 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6352 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6353 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6354 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6355 if (target)
6356 return target;
6357 break;
6358
6359 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6360 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6361 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6362 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6363 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6364 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6365 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6366 if (target)
6367 return target;
6368 break;
6369
6370 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6371 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6372 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6373 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6374 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6375 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6376 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6377 if (target)
6378 return target;
6379 break;
6380
6381 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6382 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6383 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6384 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6385 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6386 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6387 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6388 if (target)
6389 return target;
6390 break;
6391
6392 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6393 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6394 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6395 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6396 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6397 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6398 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6399 if (target)
6400 return target;
6401 break;
6402
6403 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6404 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6405 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6406 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6407 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6408 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6409 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6410 if (target)
6411 return target;
6412 break;
6413
6414 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6415 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6416 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6417 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6418 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6419 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6420 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6421 if (target)
6422 return target;
6423 break;
6424
6425 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6426 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6427 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6428 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6429 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6430 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6431 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6432 if (target)
6433 return target;
6434 break;
6435
6436 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6437 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6438 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6439 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6440 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6441 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6442 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6443 if (target)
6444 return target;
6445 break;
6446
6447 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6448 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6449 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6450 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6451 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6452 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6453 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6454 if (target)
6455 return target;
6456 break;
6457
6458 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6459 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6460 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6461 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6462 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6463 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6464 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6465 if (target)
6466 return target;
6467 break;
6468
6469 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6470 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6471 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6472 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6473 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6474 if (mode == VOIDmode)
6475 mode = TYPE_MODE (boolean_type_node);
6476 if (!target || !register_operand (target, mode))
6477 target = gen_reg_rtx (mode);
6478
6479 mode = get_builtin_sync_mode
6480 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6481 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6482 if (target)
6483 return target;
6484 break;
6485
6486 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6487 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6488 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6489 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6490 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6491 mode = get_builtin_sync_mode
6492 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6493 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6494 if (target)
6495 return target;
6496 break;
6497
6498 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6499 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6500 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6501 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6502 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6503 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6504 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6505 if (target)
6506 return target;
6507 break;
6508
6509 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6510 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6511 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6512 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6513 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6514 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6515 expand_builtin_sync_lock_release (mode, exp);
6516 return const0_rtx;
6517
6518 case BUILT_IN_SYNC_SYNCHRONIZE:
6519 expand_builtin_sync_synchronize ();
6520 return const0_rtx;
6521
6522 case BUILT_IN_ATOMIC_EXCHANGE_1:
6523 case BUILT_IN_ATOMIC_EXCHANGE_2:
6524 case BUILT_IN_ATOMIC_EXCHANGE_4:
6525 case BUILT_IN_ATOMIC_EXCHANGE_8:
6526 case BUILT_IN_ATOMIC_EXCHANGE_16:
6527 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6528 target = expand_builtin_atomic_exchange (mode, exp, target);
6529 if (target)
6530 return target;
6531 break;
6532
6533 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6534 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6535 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6536 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6537 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6538 {
6539 unsigned int nargs, z;
6540 vec<tree, va_gc> *vec;
6541
6542 mode =
6543 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6544 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6545 if (target)
6546 return target;
6547
6548 /* If this is turned into an external library call, the weak parameter
6549 must be dropped to match the expected parameter list. */
6550 nargs = call_expr_nargs (exp);
6551 vec_alloc (vec, nargs - 1);
6552 for (z = 0; z < 3; z++)
6553 vec->quick_push (CALL_EXPR_ARG (exp, z));
6554 /* Skip the boolean weak parameter. */
6555 for (z = 4; z < 6; z++)
6556 vec->quick_push (CALL_EXPR_ARG (exp, z));
6557 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6558 break;
6559 }
6560
6561 case BUILT_IN_ATOMIC_LOAD_1:
6562 case BUILT_IN_ATOMIC_LOAD_2:
6563 case BUILT_IN_ATOMIC_LOAD_4:
6564 case BUILT_IN_ATOMIC_LOAD_8:
6565 case BUILT_IN_ATOMIC_LOAD_16:
6566 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6567 target = expand_builtin_atomic_load (mode, exp, target);
6568 if (target)
6569 return target;
6570 break;
6571
6572 case BUILT_IN_ATOMIC_STORE_1:
6573 case BUILT_IN_ATOMIC_STORE_2:
6574 case BUILT_IN_ATOMIC_STORE_4:
6575 case BUILT_IN_ATOMIC_STORE_8:
6576 case BUILT_IN_ATOMIC_STORE_16:
6577 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6578 target = expand_builtin_atomic_store (mode, exp);
6579 if (target)
6580 return const0_rtx;
6581 break;
6582
6583 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6584 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6585 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6586 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6587 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6588 {
6589 enum built_in_function lib;
6590 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6591 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6592 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6593 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6594 ignore, lib);
6595 if (target)
6596 return target;
6597 break;
6598 }
6599 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6600 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6601 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6602 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6603 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6604 {
6605 enum built_in_function lib;
6606 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6607 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6608 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6609 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6610 ignore, lib);
6611 if (target)
6612 return target;
6613 break;
6614 }
6615 case BUILT_IN_ATOMIC_AND_FETCH_1:
6616 case BUILT_IN_ATOMIC_AND_FETCH_2:
6617 case BUILT_IN_ATOMIC_AND_FETCH_4:
6618 case BUILT_IN_ATOMIC_AND_FETCH_8:
6619 case BUILT_IN_ATOMIC_AND_FETCH_16:
6620 {
6621 enum built_in_function lib;
6622 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6623 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6624 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6625 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6626 ignore, lib);
6627 if (target)
6628 return target;
6629 break;
6630 }
6631 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6632 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6633 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6634 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6635 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6636 {
6637 enum built_in_function lib;
6638 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6639 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6640 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6641 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6642 ignore, lib);
6643 if (target)
6644 return target;
6645 break;
6646 }
6647 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6648 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6649 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6650 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6651 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6652 {
6653 enum built_in_function lib;
6654 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6655 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6656 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6657 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6658 ignore, lib);
6659 if (target)
6660 return target;
6661 break;
6662 }
6663 case BUILT_IN_ATOMIC_OR_FETCH_1:
6664 case BUILT_IN_ATOMIC_OR_FETCH_2:
6665 case BUILT_IN_ATOMIC_OR_FETCH_4:
6666 case BUILT_IN_ATOMIC_OR_FETCH_8:
6667 case BUILT_IN_ATOMIC_OR_FETCH_16:
6668 {
6669 enum built_in_function lib;
6670 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6671 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6672 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6673 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6674 ignore, lib);
6675 if (target)
6676 return target;
6677 break;
6678 }
6679 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6680 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6681 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6682 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6683 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6684 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6685 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6686 ignore, BUILT_IN_NONE);
6687 if (target)
6688 return target;
6689 break;
6690
6691 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6692 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6693 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6694 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6695 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6696 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6697 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6698 ignore, BUILT_IN_NONE);
6699 if (target)
6700 return target;
6701 break;
6702
6703 case BUILT_IN_ATOMIC_FETCH_AND_1:
6704 case BUILT_IN_ATOMIC_FETCH_AND_2:
6705 case BUILT_IN_ATOMIC_FETCH_AND_4:
6706 case BUILT_IN_ATOMIC_FETCH_AND_8:
6707 case BUILT_IN_ATOMIC_FETCH_AND_16:
6708 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6709 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6710 ignore, BUILT_IN_NONE);
6711 if (target)
6712 return target;
6713 break;
6714
6715 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6716 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6717 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6718 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6719 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6720 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6721 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6722 ignore, BUILT_IN_NONE);
6723 if (target)
6724 return target;
6725 break;
6726
6727 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6728 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6729 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6730 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6731 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6732 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6733 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6734 ignore, BUILT_IN_NONE);
6735 if (target)
6736 return target;
6737 break;
6738
6739 case BUILT_IN_ATOMIC_FETCH_OR_1:
6740 case BUILT_IN_ATOMIC_FETCH_OR_2:
6741 case BUILT_IN_ATOMIC_FETCH_OR_4:
6742 case BUILT_IN_ATOMIC_FETCH_OR_8:
6743 case BUILT_IN_ATOMIC_FETCH_OR_16:
6744 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6745 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6746 ignore, BUILT_IN_NONE);
6747 if (target)
6748 return target;
6749 break;
6750
6751 case BUILT_IN_ATOMIC_TEST_AND_SET:
6752 return expand_builtin_atomic_test_and_set (exp, target);
6753
6754 case BUILT_IN_ATOMIC_CLEAR:
6755 return expand_builtin_atomic_clear (exp);
6756
6757 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6758 return expand_builtin_atomic_always_lock_free (exp);
6759
6760 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6761 target = expand_builtin_atomic_is_lock_free (exp);
6762 if (target)
6763 return target;
6764 break;
6765
6766 case BUILT_IN_ATOMIC_THREAD_FENCE:
6767 expand_builtin_atomic_thread_fence (exp);
6768 return const0_rtx;
6769
6770 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6771 expand_builtin_atomic_signal_fence (exp);
6772 return const0_rtx;
6773
6774 case BUILT_IN_OBJECT_SIZE:
6775 return expand_builtin_object_size (exp);
6776
6777 case BUILT_IN_MEMCPY_CHK:
6778 case BUILT_IN_MEMPCPY_CHK:
6779 case BUILT_IN_MEMMOVE_CHK:
6780 case BUILT_IN_MEMSET_CHK:
6781 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6782 if (target)
6783 return target;
6784 break;
6785
6786 case BUILT_IN_STRCPY_CHK:
6787 case BUILT_IN_STPCPY_CHK:
6788 case BUILT_IN_STRNCPY_CHK:
6789 case BUILT_IN_STPNCPY_CHK:
6790 case BUILT_IN_STRCAT_CHK:
6791 case BUILT_IN_STRNCAT_CHK:
6792 case BUILT_IN_SNPRINTF_CHK:
6793 case BUILT_IN_VSNPRINTF_CHK:
6794 maybe_emit_chk_warning (exp, fcode);
6795 break;
6796
6797 case BUILT_IN_SPRINTF_CHK:
6798 case BUILT_IN_VSPRINTF_CHK:
6799 maybe_emit_sprintf_chk_warning (exp, fcode);
6800 break;
6801
6802 case BUILT_IN_FREE:
6803 if (warn_free_nonheap_object)
6804 maybe_emit_free_warning (exp);
6805 break;
6806
6807 case BUILT_IN_THREAD_POINTER:
6808 return expand_builtin_thread_pointer (exp, target);
6809
6810 case BUILT_IN_SET_THREAD_POINTER:
6811 expand_builtin_set_thread_pointer (exp);
6812 return const0_rtx;
6813
6814 case BUILT_IN_CILK_DETACH:
6815 expand_builtin_cilk_detach (exp);
6816 return const0_rtx;
6817
6818 case BUILT_IN_CILK_POP_FRAME:
6819 expand_builtin_cilk_pop_frame (exp);
6820 return const0_rtx;
6821
6822 default: /* just do library call, if unknown builtin */
6823 break;
6824 }
6825
6826 /* The switch statement above can drop through to cause the function
6827 to be called normally. */
6828 return expand_call (exp, target, ignore);
6829 }
6830
6831 /* Determine whether a tree node represents a call to a built-in
6832 function. If the tree T is a call to a built-in function with
6833 the right number of arguments of the appropriate types, return
6834 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6835 Otherwise the return value is END_BUILTINS. */
6836
6837 enum built_in_function
6838 builtin_mathfn_code (const_tree t)
6839 {
6840 const_tree fndecl, arg, parmlist;
6841 const_tree argtype, parmtype;
6842 const_call_expr_arg_iterator iter;
6843
6844 if (TREE_CODE (t) != CALL_EXPR
6845 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6846 return END_BUILTINS;
6847
6848 fndecl = get_callee_fndecl (t);
6849 if (fndecl == NULL_TREE
6850 || TREE_CODE (fndecl) != FUNCTION_DECL
6851 || ! DECL_BUILT_IN (fndecl)
6852 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6853 return END_BUILTINS;
6854
6855 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6856 init_const_call_expr_arg_iterator (t, &iter);
6857 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6858 {
6859 /* If a function doesn't take a variable number of arguments,
6860 the last element in the list will have type `void'. */
6861 parmtype = TREE_VALUE (parmlist);
6862 if (VOID_TYPE_P (parmtype))
6863 {
6864 if (more_const_call_expr_args_p (&iter))
6865 return END_BUILTINS;
6866 return DECL_FUNCTION_CODE (fndecl);
6867 }
6868
6869 if (! more_const_call_expr_args_p (&iter))
6870 return END_BUILTINS;
6871
6872 arg = next_const_call_expr_arg (&iter);
6873 argtype = TREE_TYPE (arg);
6874
6875 if (SCALAR_FLOAT_TYPE_P (parmtype))
6876 {
6877 if (! SCALAR_FLOAT_TYPE_P (argtype))
6878 return END_BUILTINS;
6879 }
6880 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6881 {
6882 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6883 return END_BUILTINS;
6884 }
6885 else if (POINTER_TYPE_P (parmtype))
6886 {
6887 if (! POINTER_TYPE_P (argtype))
6888 return END_BUILTINS;
6889 }
6890 else if (INTEGRAL_TYPE_P (parmtype))
6891 {
6892 if (! INTEGRAL_TYPE_P (argtype))
6893 return END_BUILTINS;
6894 }
6895 else
6896 return END_BUILTINS;
6897 }
6898
6899 /* Variable-length argument list. */
6900 return DECL_FUNCTION_CODE (fndecl);
6901 }
6902
6903 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6904 evaluate to a constant. */
6905
6906 static tree
6907 fold_builtin_constant_p (tree arg)
6908 {
6909 /* We return 1 for a numeric type that's known to be a constant
6910 value at compile-time or for an aggregate type that's a
6911 literal constant. */
6912 STRIP_NOPS (arg);
6913
6914 /* If we know this is a constant, emit the constant of one. */
6915 if (CONSTANT_CLASS_P (arg)
6916 || (TREE_CODE (arg) == CONSTRUCTOR
6917 && TREE_CONSTANT (arg)))
6918 return integer_one_node;
6919 if (TREE_CODE (arg) == ADDR_EXPR)
6920 {
6921 tree op = TREE_OPERAND (arg, 0);
6922 if (TREE_CODE (op) == STRING_CST
6923 || (TREE_CODE (op) == ARRAY_REF
6924 && integer_zerop (TREE_OPERAND (op, 1))
6925 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6926 return integer_one_node;
6927 }
6928
6929 /* If this expression has side effects, show we don't know it to be a
6930 constant. Likewise if it's a pointer or aggregate type since in
6931 those case we only want literals, since those are only optimized
6932 when generating RTL, not later.
6933 And finally, if we are compiling an initializer, not code, we
6934 need to return a definite result now; there's not going to be any
6935 more optimization done. */
6936 if (TREE_SIDE_EFFECTS (arg)
6937 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6938 || POINTER_TYPE_P (TREE_TYPE (arg))
6939 || cfun == 0
6940 || folding_initializer
6941 || force_folding_builtin_constant_p)
6942 return integer_zero_node;
6943
6944 return NULL_TREE;
6945 }
6946
6947 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6948 return it as a truthvalue. */
6949
6950 static tree
6951 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
6952 tree predictor)
6953 {
6954 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6955
6956 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
6957 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6958 ret_type = TREE_TYPE (TREE_TYPE (fn));
6959 pred_type = TREE_VALUE (arg_types);
6960 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6961
6962 pred = fold_convert_loc (loc, pred_type, pred);
6963 expected = fold_convert_loc (loc, expected_type, expected);
6964 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
6965 predictor);
6966
6967 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6968 build_int_cst (ret_type, 0));
6969 }
6970
6971 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6972 NULL_TREE if no simplification is possible. */
6973
6974 tree
6975 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
6976 {
6977 tree inner, fndecl, inner_arg0;
6978 enum tree_code code;
6979
6980 /* Distribute the expected value over short-circuiting operators.
6981 See through the cast from truthvalue_type_node to long. */
6982 inner_arg0 = arg0;
6983 while (TREE_CODE (inner_arg0) == NOP_EXPR
6984 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
6985 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
6986 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
6987
6988 /* If this is a builtin_expect within a builtin_expect keep the
6989 inner one. See through a comparison against a constant. It
6990 might have been added to create a thruthvalue. */
6991 inner = inner_arg0;
6992
6993 if (COMPARISON_CLASS_P (inner)
6994 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6995 inner = TREE_OPERAND (inner, 0);
6996
6997 if (TREE_CODE (inner) == CALL_EXPR
6998 && (fndecl = get_callee_fndecl (inner))
6999 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7000 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7001 return arg0;
7002
7003 inner = inner_arg0;
7004 code = TREE_CODE (inner);
7005 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7006 {
7007 tree op0 = TREE_OPERAND (inner, 0);
7008 tree op1 = TREE_OPERAND (inner, 1);
7009
7010 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7011 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7012 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7013
7014 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7015 }
7016
7017 /* If the argument isn't invariant then there's nothing else we can do. */
7018 if (!TREE_CONSTANT (inner_arg0))
7019 return NULL_TREE;
7020
7021 /* If we expect that a comparison against the argument will fold to
7022 a constant return the constant. In practice, this means a true
7023 constant or the address of a non-weak symbol. */
7024 inner = inner_arg0;
7025 STRIP_NOPS (inner);
7026 if (TREE_CODE (inner) == ADDR_EXPR)
7027 {
7028 do
7029 {
7030 inner = TREE_OPERAND (inner, 0);
7031 }
7032 while (TREE_CODE (inner) == COMPONENT_REF
7033 || TREE_CODE (inner) == ARRAY_REF);
7034 if ((TREE_CODE (inner) == VAR_DECL
7035 || TREE_CODE (inner) == FUNCTION_DECL)
7036 && DECL_WEAK (inner))
7037 return NULL_TREE;
7038 }
7039
7040 /* Otherwise, ARG0 already has the proper type for the return value. */
7041 return arg0;
7042 }
7043
7044 /* Fold a call to __builtin_classify_type with argument ARG. */
7045
7046 static tree
7047 fold_builtin_classify_type (tree arg)
7048 {
7049 if (arg == 0)
7050 return build_int_cst (integer_type_node, no_type_class);
7051
7052 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7053 }
7054
7055 /* Fold a call to __builtin_strlen with argument ARG. */
7056
7057 static tree
7058 fold_builtin_strlen (location_t loc, tree type, tree arg)
7059 {
7060 if (!validate_arg (arg, POINTER_TYPE))
7061 return NULL_TREE;
7062 else
7063 {
7064 tree len = c_strlen (arg, 0);
7065
7066 if (len)
7067 return fold_convert_loc (loc, type, len);
7068
7069 return NULL_TREE;
7070 }
7071 }
7072
7073 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7074
7075 static tree
7076 fold_builtin_inf (location_t loc, tree type, int warn)
7077 {
7078 REAL_VALUE_TYPE real;
7079
7080 /* __builtin_inff is intended to be usable to define INFINITY on all
7081 targets. If an infinity is not available, INFINITY expands "to a
7082 positive constant of type float that overflows at translation
7083 time", footnote "In this case, using INFINITY will violate the
7084 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7085 Thus we pedwarn to ensure this constraint violation is
7086 diagnosed. */
7087 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7088 pedwarn (loc, 0, "target format does not support infinity");
7089
7090 real_inf (&real);
7091 return build_real (type, real);
7092 }
7093
7094 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7095
7096 static tree
7097 fold_builtin_nan (tree arg, tree type, int quiet)
7098 {
7099 REAL_VALUE_TYPE real;
7100 const char *str;
7101
7102 if (!validate_arg (arg, POINTER_TYPE))
7103 return NULL_TREE;
7104 str = c_getstr (arg);
7105 if (!str)
7106 return NULL_TREE;
7107
7108 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7109 return NULL_TREE;
7110
7111 return build_real (type, real);
7112 }
7113
7114 /* Return true if the floating point expression T has an integer value.
7115 We also allow +Inf, -Inf and NaN to be considered integer values. */
7116
7117 static bool
7118 integer_valued_real_p (tree t)
7119 {
7120 switch (TREE_CODE (t))
7121 {
7122 case FLOAT_EXPR:
7123 return true;
7124
7125 case ABS_EXPR:
7126 case SAVE_EXPR:
7127 return integer_valued_real_p (TREE_OPERAND (t, 0));
7128
7129 case COMPOUND_EXPR:
7130 case MODIFY_EXPR:
7131 case BIND_EXPR:
7132 return integer_valued_real_p (TREE_OPERAND (t, 1));
7133
7134 case PLUS_EXPR:
7135 case MINUS_EXPR:
7136 case MULT_EXPR:
7137 case MIN_EXPR:
7138 case MAX_EXPR:
7139 return integer_valued_real_p (TREE_OPERAND (t, 0))
7140 && integer_valued_real_p (TREE_OPERAND (t, 1));
7141
7142 case COND_EXPR:
7143 return integer_valued_real_p (TREE_OPERAND (t, 1))
7144 && integer_valued_real_p (TREE_OPERAND (t, 2));
7145
7146 case REAL_CST:
7147 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7148
7149 case NOP_EXPR:
7150 {
7151 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7152 if (TREE_CODE (type) == INTEGER_TYPE)
7153 return true;
7154 if (TREE_CODE (type) == REAL_TYPE)
7155 return integer_valued_real_p (TREE_OPERAND (t, 0));
7156 break;
7157 }
7158
7159 case CALL_EXPR:
7160 switch (builtin_mathfn_code (t))
7161 {
7162 CASE_FLT_FN (BUILT_IN_CEIL):
7163 CASE_FLT_FN (BUILT_IN_FLOOR):
7164 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7165 CASE_FLT_FN (BUILT_IN_RINT):
7166 CASE_FLT_FN (BUILT_IN_ROUND):
7167 CASE_FLT_FN (BUILT_IN_TRUNC):
7168 return true;
7169
7170 CASE_FLT_FN (BUILT_IN_FMIN):
7171 CASE_FLT_FN (BUILT_IN_FMAX):
7172 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7173 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7174
7175 default:
7176 break;
7177 }
7178 break;
7179
7180 default:
7181 break;
7182 }
7183 return false;
7184 }
7185
7186 /* FNDECL is assumed to be a builtin where truncation can be propagated
7187 across (for instance floor((double)f) == (double)floorf (f).
7188 Do the transformation for a call with argument ARG. */
7189
7190 static tree
7191 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7192 {
7193 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7194
7195 if (!validate_arg (arg, REAL_TYPE))
7196 return NULL_TREE;
7197
7198 /* Integer rounding functions are idempotent. */
7199 if (fcode == builtin_mathfn_code (arg))
7200 return arg;
7201
7202 /* If argument is already integer valued, and we don't need to worry
7203 about setting errno, there's no need to perform rounding. */
7204 if (! flag_errno_math && integer_valued_real_p (arg))
7205 return arg;
7206
7207 if (optimize)
7208 {
7209 tree arg0 = strip_float_extensions (arg);
7210 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7211 tree newtype = TREE_TYPE (arg0);
7212 tree decl;
7213
7214 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7215 && (decl = mathfn_built_in (newtype, fcode)))
7216 return fold_convert_loc (loc, ftype,
7217 build_call_expr_loc (loc, decl, 1,
7218 fold_convert_loc (loc,
7219 newtype,
7220 arg0)));
7221 }
7222 return NULL_TREE;
7223 }
7224
7225 /* FNDECL is assumed to be builtin which can narrow the FP type of
7226 the argument, for instance lround((double)f) -> lroundf (f).
7227 Do the transformation for a call with argument ARG. */
7228
7229 static tree
7230 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7231 {
7232 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7233
7234 if (!validate_arg (arg, REAL_TYPE))
7235 return NULL_TREE;
7236
7237 /* If argument is already integer valued, and we don't need to worry
7238 about setting errno, there's no need to perform rounding. */
7239 if (! flag_errno_math && integer_valued_real_p (arg))
7240 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7241 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7242
7243 if (optimize)
7244 {
7245 tree ftype = TREE_TYPE (arg);
7246 tree arg0 = strip_float_extensions (arg);
7247 tree newtype = TREE_TYPE (arg0);
7248 tree decl;
7249
7250 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7251 && (decl = mathfn_built_in (newtype, fcode)))
7252 return build_call_expr_loc (loc, decl, 1,
7253 fold_convert_loc (loc, newtype, arg0));
7254 }
7255
7256 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7257 sizeof (int) == sizeof (long). */
7258 if (TYPE_PRECISION (integer_type_node)
7259 == TYPE_PRECISION (long_integer_type_node))
7260 {
7261 tree newfn = NULL_TREE;
7262 switch (fcode)
7263 {
7264 CASE_FLT_FN (BUILT_IN_ICEIL):
7265 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7266 break;
7267
7268 CASE_FLT_FN (BUILT_IN_IFLOOR):
7269 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7270 break;
7271
7272 CASE_FLT_FN (BUILT_IN_IROUND):
7273 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7274 break;
7275
7276 CASE_FLT_FN (BUILT_IN_IRINT):
7277 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7278 break;
7279
7280 default:
7281 break;
7282 }
7283
7284 if (newfn)
7285 {
7286 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7287 return fold_convert_loc (loc,
7288 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7289 }
7290 }
7291
7292 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7293 sizeof (long long) == sizeof (long). */
7294 if (TYPE_PRECISION (long_long_integer_type_node)
7295 == TYPE_PRECISION (long_integer_type_node))
7296 {
7297 tree newfn = NULL_TREE;
7298 switch (fcode)
7299 {
7300 CASE_FLT_FN (BUILT_IN_LLCEIL):
7301 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7302 break;
7303
7304 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7305 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7306 break;
7307
7308 CASE_FLT_FN (BUILT_IN_LLROUND):
7309 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7310 break;
7311
7312 CASE_FLT_FN (BUILT_IN_LLRINT):
7313 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7314 break;
7315
7316 default:
7317 break;
7318 }
7319
7320 if (newfn)
7321 {
7322 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7323 return fold_convert_loc (loc,
7324 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7325 }
7326 }
7327
7328 return NULL_TREE;
7329 }
7330
7331 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7332 return type. Return NULL_TREE if no simplification can be made. */
7333
7334 static tree
7335 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7336 {
7337 tree res;
7338
7339 if (!validate_arg (arg, COMPLEX_TYPE)
7340 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7341 return NULL_TREE;
7342
7343 /* Calculate the result when the argument is a constant. */
7344 if (TREE_CODE (arg) == COMPLEX_CST
7345 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7346 type, mpfr_hypot)))
7347 return res;
7348
7349 if (TREE_CODE (arg) == COMPLEX_EXPR)
7350 {
7351 tree real = TREE_OPERAND (arg, 0);
7352 tree imag = TREE_OPERAND (arg, 1);
7353
7354 /* If either part is zero, cabs is fabs of the other. */
7355 if (real_zerop (real))
7356 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7357 if (real_zerop (imag))
7358 return fold_build1_loc (loc, ABS_EXPR, type, real);
7359
7360 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7361 if (flag_unsafe_math_optimizations
7362 && operand_equal_p (real, imag, OEP_PURE_SAME))
7363 {
7364 const REAL_VALUE_TYPE sqrt2_trunc
7365 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7366 STRIP_NOPS (real);
7367 return fold_build2_loc (loc, MULT_EXPR, type,
7368 fold_build1_loc (loc, ABS_EXPR, type, real),
7369 build_real (type, sqrt2_trunc));
7370 }
7371 }
7372
7373 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7374 if (TREE_CODE (arg) == NEGATE_EXPR
7375 || TREE_CODE (arg) == CONJ_EXPR)
7376 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7377
7378 /* Don't do this when optimizing for size. */
7379 if (flag_unsafe_math_optimizations
7380 && optimize && optimize_function_for_speed_p (cfun))
7381 {
7382 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7383
7384 if (sqrtfn != NULL_TREE)
7385 {
7386 tree rpart, ipart, result;
7387
7388 arg = builtin_save_expr (arg);
7389
7390 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7391 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7392
7393 rpart = builtin_save_expr (rpart);
7394 ipart = builtin_save_expr (ipart);
7395
7396 result = fold_build2_loc (loc, PLUS_EXPR, type,
7397 fold_build2_loc (loc, MULT_EXPR, type,
7398 rpart, rpart),
7399 fold_build2_loc (loc, MULT_EXPR, type,
7400 ipart, ipart));
7401
7402 return build_call_expr_loc (loc, sqrtfn, 1, result);
7403 }
7404 }
7405
7406 return NULL_TREE;
7407 }
7408
7409 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7410 complex tree type of the result. If NEG is true, the imaginary
7411 zero is negative. */
7412
7413 static tree
7414 build_complex_cproj (tree type, bool neg)
7415 {
7416 REAL_VALUE_TYPE rinf, rzero = dconst0;
7417
7418 real_inf (&rinf);
7419 rzero.sign = neg;
7420 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7421 build_real (TREE_TYPE (type), rzero));
7422 }
7423
7424 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7425 return type. Return NULL_TREE if no simplification can be made. */
7426
7427 static tree
7428 fold_builtin_cproj (location_t loc, tree arg, tree type)
7429 {
7430 if (!validate_arg (arg, COMPLEX_TYPE)
7431 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7432 return NULL_TREE;
7433
7434 /* If there are no infinities, return arg. */
7435 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7436 return non_lvalue_loc (loc, arg);
7437
7438 /* Calculate the result when the argument is a constant. */
7439 if (TREE_CODE (arg) == COMPLEX_CST)
7440 {
7441 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7442 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7443
7444 if (real_isinf (real) || real_isinf (imag))
7445 return build_complex_cproj (type, imag->sign);
7446 else
7447 return arg;
7448 }
7449 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7450 {
7451 tree real = TREE_OPERAND (arg, 0);
7452 tree imag = TREE_OPERAND (arg, 1);
7453
7454 STRIP_NOPS (real);
7455 STRIP_NOPS (imag);
7456
7457 /* If the real part is inf and the imag part is known to be
7458 nonnegative, return (inf + 0i). Remember side-effects are
7459 possible in the imag part. */
7460 if (TREE_CODE (real) == REAL_CST
7461 && real_isinf (TREE_REAL_CST_PTR (real))
7462 && tree_expr_nonnegative_p (imag))
7463 return omit_one_operand_loc (loc, type,
7464 build_complex_cproj (type, false),
7465 arg);
7466
7467 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7468 Remember side-effects are possible in the real part. */
7469 if (TREE_CODE (imag) == REAL_CST
7470 && real_isinf (TREE_REAL_CST_PTR (imag)))
7471 return
7472 omit_one_operand_loc (loc, type,
7473 build_complex_cproj (type, TREE_REAL_CST_PTR
7474 (imag)->sign), arg);
7475 }
7476
7477 return NULL_TREE;
7478 }
7479
7480 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7481 Return NULL_TREE if no simplification can be made. */
7482
7483 static tree
7484 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7485 {
7486
7487 enum built_in_function fcode;
7488 tree res;
7489
7490 if (!validate_arg (arg, REAL_TYPE))
7491 return NULL_TREE;
7492
7493 /* Calculate the result when the argument is a constant. */
7494 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7495 return res;
7496
7497 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7498 fcode = builtin_mathfn_code (arg);
7499 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7500 {
7501 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7502 arg = fold_build2_loc (loc, MULT_EXPR, type,
7503 CALL_EXPR_ARG (arg, 0),
7504 build_real (type, dconsthalf));
7505 return build_call_expr_loc (loc, expfn, 1, arg);
7506 }
7507
7508 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7509 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7510 {
7511 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7512
7513 if (powfn)
7514 {
7515 tree arg0 = CALL_EXPR_ARG (arg, 0);
7516 tree tree_root;
7517 /* The inner root was either sqrt or cbrt. */
7518 /* This was a conditional expression but it triggered a bug
7519 in Sun C 5.5. */
7520 REAL_VALUE_TYPE dconstroot;
7521 if (BUILTIN_SQRT_P (fcode))
7522 dconstroot = dconsthalf;
7523 else
7524 dconstroot = dconst_third ();
7525
7526 /* Adjust for the outer root. */
7527 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7528 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7529 tree_root = build_real (type, dconstroot);
7530 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7531 }
7532 }
7533
7534 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7535 if (flag_unsafe_math_optimizations
7536 && (fcode == BUILT_IN_POW
7537 || fcode == BUILT_IN_POWF
7538 || fcode == BUILT_IN_POWL))
7539 {
7540 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7541 tree arg0 = CALL_EXPR_ARG (arg, 0);
7542 tree arg1 = CALL_EXPR_ARG (arg, 1);
7543 tree narg1;
7544 if (!tree_expr_nonnegative_p (arg0))
7545 arg0 = build1 (ABS_EXPR, type, arg0);
7546 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7547 build_real (type, dconsthalf));
7548 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7549 }
7550
7551 return NULL_TREE;
7552 }
7553
7554 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7555 Return NULL_TREE if no simplification can be made. */
7556
7557 static tree
7558 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7559 {
7560 const enum built_in_function fcode = builtin_mathfn_code (arg);
7561 tree res;
7562
7563 if (!validate_arg (arg, REAL_TYPE))
7564 return NULL_TREE;
7565
7566 /* Calculate the result when the argument is a constant. */
7567 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7568 return res;
7569
7570 if (flag_unsafe_math_optimizations)
7571 {
7572 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7573 if (BUILTIN_EXPONENT_P (fcode))
7574 {
7575 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7576 const REAL_VALUE_TYPE third_trunc =
7577 real_value_truncate (TYPE_MODE (type), dconst_third ());
7578 arg = fold_build2_loc (loc, MULT_EXPR, type,
7579 CALL_EXPR_ARG (arg, 0),
7580 build_real (type, third_trunc));
7581 return build_call_expr_loc (loc, expfn, 1, arg);
7582 }
7583
7584 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7585 if (BUILTIN_SQRT_P (fcode))
7586 {
7587 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7588
7589 if (powfn)
7590 {
7591 tree arg0 = CALL_EXPR_ARG (arg, 0);
7592 tree tree_root;
7593 REAL_VALUE_TYPE dconstroot = dconst_third ();
7594
7595 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7596 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7597 tree_root = build_real (type, dconstroot);
7598 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7599 }
7600 }
7601
7602 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7603 if (BUILTIN_CBRT_P (fcode))
7604 {
7605 tree arg0 = CALL_EXPR_ARG (arg, 0);
7606 if (tree_expr_nonnegative_p (arg0))
7607 {
7608 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7609
7610 if (powfn)
7611 {
7612 tree tree_root;
7613 REAL_VALUE_TYPE dconstroot;
7614
7615 real_arithmetic (&dconstroot, MULT_EXPR,
7616 dconst_third_ptr (), dconst_third_ptr ());
7617 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7618 tree_root = build_real (type, dconstroot);
7619 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7620 }
7621 }
7622 }
7623
7624 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7625 if (fcode == BUILT_IN_POW
7626 || fcode == BUILT_IN_POWF
7627 || fcode == BUILT_IN_POWL)
7628 {
7629 tree arg00 = CALL_EXPR_ARG (arg, 0);
7630 tree arg01 = CALL_EXPR_ARG (arg, 1);
7631 if (tree_expr_nonnegative_p (arg00))
7632 {
7633 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7634 const REAL_VALUE_TYPE dconstroot
7635 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7636 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7637 build_real (type, dconstroot));
7638 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7639 }
7640 }
7641 }
7642 return NULL_TREE;
7643 }
7644
7645 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7646 TYPE is the type of the return value. Return NULL_TREE if no
7647 simplification can be made. */
7648
7649 static tree
7650 fold_builtin_cos (location_t loc,
7651 tree arg, tree type, tree fndecl)
7652 {
7653 tree res, narg;
7654
7655 if (!validate_arg (arg, REAL_TYPE))
7656 return NULL_TREE;
7657
7658 /* Calculate the result when the argument is a constant. */
7659 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7660 return res;
7661
7662 /* Optimize cos(-x) into cos (x). */
7663 if ((narg = fold_strip_sign_ops (arg)))
7664 return build_call_expr_loc (loc, fndecl, 1, narg);
7665
7666 return NULL_TREE;
7667 }
7668
7669 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7670 Return NULL_TREE if no simplification can be made. */
7671
7672 static tree
7673 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7674 {
7675 if (validate_arg (arg, REAL_TYPE))
7676 {
7677 tree res, narg;
7678
7679 /* Calculate the result when the argument is a constant. */
7680 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7681 return res;
7682
7683 /* Optimize cosh(-x) into cosh (x). */
7684 if ((narg = fold_strip_sign_ops (arg)))
7685 return build_call_expr_loc (loc, fndecl, 1, narg);
7686 }
7687
7688 return NULL_TREE;
7689 }
7690
7691 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7692 argument ARG. TYPE is the type of the return value. Return
7693 NULL_TREE if no simplification can be made. */
7694
7695 static tree
7696 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7697 bool hyper)
7698 {
7699 if (validate_arg (arg, COMPLEX_TYPE)
7700 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7701 {
7702 tree tmp;
7703
7704 /* Calculate the result when the argument is a constant. */
7705 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7706 return tmp;
7707
7708 /* Optimize fn(-x) into fn(x). */
7709 if ((tmp = fold_strip_sign_ops (arg)))
7710 return build_call_expr_loc (loc, fndecl, 1, tmp);
7711 }
7712
7713 return NULL_TREE;
7714 }
7715
7716 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7717 Return NULL_TREE if no simplification can be made. */
7718
7719 static tree
7720 fold_builtin_tan (tree arg, tree type)
7721 {
7722 enum built_in_function fcode;
7723 tree res;
7724
7725 if (!validate_arg (arg, REAL_TYPE))
7726 return NULL_TREE;
7727
7728 /* Calculate the result when the argument is a constant. */
7729 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7730 return res;
7731
7732 /* Optimize tan(atan(x)) = x. */
7733 fcode = builtin_mathfn_code (arg);
7734 if (flag_unsafe_math_optimizations
7735 && (fcode == BUILT_IN_ATAN
7736 || fcode == BUILT_IN_ATANF
7737 || fcode == BUILT_IN_ATANL))
7738 return CALL_EXPR_ARG (arg, 0);
7739
7740 return NULL_TREE;
7741 }
7742
7743 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7744 NULL_TREE if no simplification can be made. */
7745
7746 static tree
7747 fold_builtin_sincos (location_t loc,
7748 tree arg0, tree arg1, tree arg2)
7749 {
7750 tree type;
7751 tree res, fn, call;
7752
7753 if (!validate_arg (arg0, REAL_TYPE)
7754 || !validate_arg (arg1, POINTER_TYPE)
7755 || !validate_arg (arg2, POINTER_TYPE))
7756 return NULL_TREE;
7757
7758 type = TREE_TYPE (arg0);
7759
7760 /* Calculate the result when the argument is a constant. */
7761 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7762 return res;
7763
7764 /* Canonicalize sincos to cexpi. */
7765 if (!targetm.libc_has_function (function_c99_math_complex))
7766 return NULL_TREE;
7767 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7768 if (!fn)
7769 return NULL_TREE;
7770
7771 call = build_call_expr_loc (loc, fn, 1, arg0);
7772 call = builtin_save_expr (call);
7773
7774 return build2 (COMPOUND_EXPR, void_type_node,
7775 build2 (MODIFY_EXPR, void_type_node,
7776 build_fold_indirect_ref_loc (loc, arg1),
7777 build1 (IMAGPART_EXPR, type, call)),
7778 build2 (MODIFY_EXPR, void_type_node,
7779 build_fold_indirect_ref_loc (loc, arg2),
7780 build1 (REALPART_EXPR, type, call)));
7781 }
7782
7783 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7784 NULL_TREE if no simplification can be made. */
7785
7786 static tree
7787 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7788 {
7789 tree rtype;
7790 tree realp, imagp, ifn;
7791 tree res;
7792
7793 if (!validate_arg (arg0, COMPLEX_TYPE)
7794 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7795 return NULL_TREE;
7796
7797 /* Calculate the result when the argument is a constant. */
7798 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7799 return res;
7800
7801 rtype = TREE_TYPE (TREE_TYPE (arg0));
7802
7803 /* In case we can figure out the real part of arg0 and it is constant zero
7804 fold to cexpi. */
7805 if (!targetm.libc_has_function (function_c99_math_complex))
7806 return NULL_TREE;
7807 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7808 if (!ifn)
7809 return NULL_TREE;
7810
7811 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7812 && real_zerop (realp))
7813 {
7814 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7815 return build_call_expr_loc (loc, ifn, 1, narg);
7816 }
7817
7818 /* In case we can easily decompose real and imaginary parts split cexp
7819 to exp (r) * cexpi (i). */
7820 if (flag_unsafe_math_optimizations
7821 && realp)
7822 {
7823 tree rfn, rcall, icall;
7824
7825 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7826 if (!rfn)
7827 return NULL_TREE;
7828
7829 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7830 if (!imagp)
7831 return NULL_TREE;
7832
7833 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7834 icall = builtin_save_expr (icall);
7835 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7836 rcall = builtin_save_expr (rcall);
7837 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7838 fold_build2_loc (loc, MULT_EXPR, rtype,
7839 rcall,
7840 fold_build1_loc (loc, REALPART_EXPR,
7841 rtype, icall)),
7842 fold_build2_loc (loc, MULT_EXPR, rtype,
7843 rcall,
7844 fold_build1_loc (loc, IMAGPART_EXPR,
7845 rtype, icall)));
7846 }
7847
7848 return NULL_TREE;
7849 }
7850
7851 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7852 Return NULL_TREE if no simplification can be made. */
7853
7854 static tree
7855 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7856 {
7857 if (!validate_arg (arg, REAL_TYPE))
7858 return NULL_TREE;
7859
7860 /* Optimize trunc of constant value. */
7861 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7862 {
7863 REAL_VALUE_TYPE r, x;
7864 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7865
7866 x = TREE_REAL_CST (arg);
7867 real_trunc (&r, TYPE_MODE (type), &x);
7868 return build_real (type, r);
7869 }
7870
7871 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7872 }
7873
7874 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7875 Return NULL_TREE if no simplification can be made. */
7876
7877 static tree
7878 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7879 {
7880 if (!validate_arg (arg, REAL_TYPE))
7881 return NULL_TREE;
7882
7883 /* Optimize floor of constant value. */
7884 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7885 {
7886 REAL_VALUE_TYPE x;
7887
7888 x = TREE_REAL_CST (arg);
7889 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7890 {
7891 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7892 REAL_VALUE_TYPE r;
7893
7894 real_floor (&r, TYPE_MODE (type), &x);
7895 return build_real (type, r);
7896 }
7897 }
7898
7899 /* Fold floor (x) where x is nonnegative to trunc (x). */
7900 if (tree_expr_nonnegative_p (arg))
7901 {
7902 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7903 if (truncfn)
7904 return build_call_expr_loc (loc, truncfn, 1, arg);
7905 }
7906
7907 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7908 }
7909
7910 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7911 Return NULL_TREE if no simplification can be made. */
7912
7913 static tree
7914 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7915 {
7916 if (!validate_arg (arg, REAL_TYPE))
7917 return NULL_TREE;
7918
7919 /* Optimize ceil of constant value. */
7920 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7921 {
7922 REAL_VALUE_TYPE x;
7923
7924 x = TREE_REAL_CST (arg);
7925 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7926 {
7927 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7928 REAL_VALUE_TYPE r;
7929
7930 real_ceil (&r, TYPE_MODE (type), &x);
7931 return build_real (type, r);
7932 }
7933 }
7934
7935 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7936 }
7937
7938 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7939 Return NULL_TREE if no simplification can be made. */
7940
7941 static tree
7942 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7943 {
7944 if (!validate_arg (arg, REAL_TYPE))
7945 return NULL_TREE;
7946
7947 /* Optimize round of constant value. */
7948 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7949 {
7950 REAL_VALUE_TYPE x;
7951
7952 x = TREE_REAL_CST (arg);
7953 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7954 {
7955 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7956 REAL_VALUE_TYPE r;
7957
7958 real_round (&r, TYPE_MODE (type), &x);
7959 return build_real (type, r);
7960 }
7961 }
7962
7963 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7964 }
7965
7966 /* Fold function call to builtin lround, lroundf or lroundl (or the
7967 corresponding long long versions) and other rounding functions. ARG
7968 is the argument to the call. Return NULL_TREE if no simplification
7969 can be made. */
7970
7971 static tree
7972 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7973 {
7974 if (!validate_arg (arg, REAL_TYPE))
7975 return NULL_TREE;
7976
7977 /* Optimize lround of constant value. */
7978 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7979 {
7980 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7981
7982 if (real_isfinite (&x))
7983 {
7984 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7985 tree ftype = TREE_TYPE (arg);
7986 REAL_VALUE_TYPE r;
7987 bool fail = false;
7988
7989 switch (DECL_FUNCTION_CODE (fndecl))
7990 {
7991 CASE_FLT_FN (BUILT_IN_IFLOOR):
7992 CASE_FLT_FN (BUILT_IN_LFLOOR):
7993 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7994 real_floor (&r, TYPE_MODE (ftype), &x);
7995 break;
7996
7997 CASE_FLT_FN (BUILT_IN_ICEIL):
7998 CASE_FLT_FN (BUILT_IN_LCEIL):
7999 CASE_FLT_FN (BUILT_IN_LLCEIL):
8000 real_ceil (&r, TYPE_MODE (ftype), &x);
8001 break;
8002
8003 CASE_FLT_FN (BUILT_IN_IROUND):
8004 CASE_FLT_FN (BUILT_IN_LROUND):
8005 CASE_FLT_FN (BUILT_IN_LLROUND):
8006 real_round (&r, TYPE_MODE (ftype), &x);
8007 break;
8008
8009 default:
8010 gcc_unreachable ();
8011 }
8012
8013 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
8014 if (!fail)
8015 return wide_int_to_tree (itype, val);
8016 }
8017 }
8018
8019 switch (DECL_FUNCTION_CODE (fndecl))
8020 {
8021 CASE_FLT_FN (BUILT_IN_LFLOOR):
8022 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8023 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8024 if (tree_expr_nonnegative_p (arg))
8025 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8026 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8027 break;
8028 default:;
8029 }
8030
8031 return fold_fixed_mathfn (loc, fndecl, arg);
8032 }
8033
8034 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8035 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8036 the argument to the call. Return NULL_TREE if no simplification can
8037 be made. */
8038
8039 static tree
8040 fold_builtin_bitop (tree fndecl, tree arg)
8041 {
8042 if (!validate_arg (arg, INTEGER_TYPE))
8043 return NULL_TREE;
8044
8045 /* Optimize for constant argument. */
8046 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8047 {
8048 tree type = TREE_TYPE (arg);
8049 int result;
8050
8051 switch (DECL_FUNCTION_CODE (fndecl))
8052 {
8053 CASE_INT_FN (BUILT_IN_FFS):
8054 result = wi::ffs (arg);
8055 break;
8056
8057 CASE_INT_FN (BUILT_IN_CLZ):
8058 if (wi::ne_p (arg, 0))
8059 result = wi::clz (arg);
8060 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8061 result = TYPE_PRECISION (type);
8062 break;
8063
8064 CASE_INT_FN (BUILT_IN_CTZ):
8065 if (wi::ne_p (arg, 0))
8066 result = wi::ctz (arg);
8067 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8068 result = TYPE_PRECISION (type);
8069 break;
8070
8071 CASE_INT_FN (BUILT_IN_CLRSB):
8072 result = wi::clrsb (arg);
8073 break;
8074
8075 CASE_INT_FN (BUILT_IN_POPCOUNT):
8076 result = wi::popcount (arg);
8077 break;
8078
8079 CASE_INT_FN (BUILT_IN_PARITY):
8080 result = wi::parity (arg);
8081 break;
8082
8083 default:
8084 gcc_unreachable ();
8085 }
8086
8087 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8088 }
8089
8090 return NULL_TREE;
8091 }
8092
8093 /* Fold function call to builtin_bswap and the short, long and long long
8094 variants. Return NULL_TREE if no simplification can be made. */
8095 static tree
8096 fold_builtin_bswap (tree fndecl, tree arg)
8097 {
8098 if (! validate_arg (arg, INTEGER_TYPE))
8099 return NULL_TREE;
8100
8101 /* Optimize constant value. */
8102 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8103 {
8104 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8105
8106 switch (DECL_FUNCTION_CODE (fndecl))
8107 {
8108 case BUILT_IN_BSWAP16:
8109 case BUILT_IN_BSWAP32:
8110 case BUILT_IN_BSWAP64:
8111 {
8112 signop sgn = TYPE_SIGN (type);
8113 tree result =
8114 wide_int_to_tree (type,
8115 wide_int::from (arg, TYPE_PRECISION (type),
8116 sgn).bswap ());
8117 return result;
8118 }
8119 default:
8120 gcc_unreachable ();
8121 }
8122 }
8123
8124 return NULL_TREE;
8125 }
8126
8127 /* A subroutine of fold_builtin to fold the various logarithmic
8128 functions. Return NULL_TREE if no simplification can me made.
8129 FUNC is the corresponding MPFR logarithm function. */
8130
8131 static tree
8132 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8133 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8134 {
8135 if (validate_arg (arg, REAL_TYPE))
8136 {
8137 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8138 tree res;
8139 const enum built_in_function fcode = builtin_mathfn_code (arg);
8140
8141 /* Calculate the result when the argument is a constant. */
8142 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8143 return res;
8144
8145 /* Special case, optimize logN(expN(x)) = x. */
8146 if (flag_unsafe_math_optimizations
8147 && ((func == mpfr_log
8148 && (fcode == BUILT_IN_EXP
8149 || fcode == BUILT_IN_EXPF
8150 || fcode == BUILT_IN_EXPL))
8151 || (func == mpfr_log2
8152 && (fcode == BUILT_IN_EXP2
8153 || fcode == BUILT_IN_EXP2F
8154 || fcode == BUILT_IN_EXP2L))
8155 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8156 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8157
8158 /* Optimize logN(func()) for various exponential functions. We
8159 want to determine the value "x" and the power "exponent" in
8160 order to transform logN(x**exponent) into exponent*logN(x). */
8161 if (flag_unsafe_math_optimizations)
8162 {
8163 tree exponent = 0, x = 0;
8164
8165 switch (fcode)
8166 {
8167 CASE_FLT_FN (BUILT_IN_EXP):
8168 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8169 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8170 dconst_e ()));
8171 exponent = CALL_EXPR_ARG (arg, 0);
8172 break;
8173 CASE_FLT_FN (BUILT_IN_EXP2):
8174 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8175 x = build_real (type, dconst2);
8176 exponent = CALL_EXPR_ARG (arg, 0);
8177 break;
8178 CASE_FLT_FN (BUILT_IN_EXP10):
8179 CASE_FLT_FN (BUILT_IN_POW10):
8180 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8181 {
8182 REAL_VALUE_TYPE dconst10;
8183 real_from_integer (&dconst10, VOIDmode, 10, SIGNED);
8184 x = build_real (type, dconst10);
8185 }
8186 exponent = CALL_EXPR_ARG (arg, 0);
8187 break;
8188 CASE_FLT_FN (BUILT_IN_SQRT):
8189 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8190 x = CALL_EXPR_ARG (arg, 0);
8191 exponent = build_real (type, dconsthalf);
8192 break;
8193 CASE_FLT_FN (BUILT_IN_CBRT):
8194 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8195 x = CALL_EXPR_ARG (arg, 0);
8196 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8197 dconst_third ()));
8198 break;
8199 CASE_FLT_FN (BUILT_IN_POW):
8200 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8201 x = CALL_EXPR_ARG (arg, 0);
8202 exponent = CALL_EXPR_ARG (arg, 1);
8203 break;
8204 default:
8205 break;
8206 }
8207
8208 /* Now perform the optimization. */
8209 if (x && exponent)
8210 {
8211 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8212 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8213 }
8214 }
8215 }
8216
8217 return NULL_TREE;
8218 }
8219
8220 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8221 NULL_TREE if no simplification can be made. */
8222
8223 static tree
8224 fold_builtin_hypot (location_t loc, tree fndecl,
8225 tree arg0, tree arg1, tree type)
8226 {
8227 tree res, narg0, narg1;
8228
8229 if (!validate_arg (arg0, REAL_TYPE)
8230 || !validate_arg (arg1, REAL_TYPE))
8231 return NULL_TREE;
8232
8233 /* Calculate the result when the argument is a constant. */
8234 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8235 return res;
8236
8237 /* If either argument to hypot has a negate or abs, strip that off.
8238 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8239 narg0 = fold_strip_sign_ops (arg0);
8240 narg1 = fold_strip_sign_ops (arg1);
8241 if (narg0 || narg1)
8242 {
8243 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8244 narg1 ? narg1 : arg1);
8245 }
8246
8247 /* If either argument is zero, hypot is fabs of the other. */
8248 if (real_zerop (arg0))
8249 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8250 else if (real_zerop (arg1))
8251 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8252
8253 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8254 if (flag_unsafe_math_optimizations
8255 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8256 {
8257 const REAL_VALUE_TYPE sqrt2_trunc
8258 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8259 return fold_build2_loc (loc, MULT_EXPR, type,
8260 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8261 build_real (type, sqrt2_trunc));
8262 }
8263
8264 return NULL_TREE;
8265 }
8266
8267
8268 /* Fold a builtin function call to pow, powf, or powl. Return
8269 NULL_TREE if no simplification can be made. */
8270 static tree
8271 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8272 {
8273 tree res;
8274
8275 if (!validate_arg (arg0, REAL_TYPE)
8276 || !validate_arg (arg1, REAL_TYPE))
8277 return NULL_TREE;
8278
8279 /* Calculate the result when the argument is a constant. */
8280 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8281 return res;
8282
8283 /* Optimize pow(1.0,y) = 1.0. */
8284 if (real_onep (arg0))
8285 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8286
8287 if (TREE_CODE (arg1) == REAL_CST
8288 && !TREE_OVERFLOW (arg1))
8289 {
8290 REAL_VALUE_TYPE cint;
8291 REAL_VALUE_TYPE c;
8292 HOST_WIDE_INT n;
8293
8294 c = TREE_REAL_CST (arg1);
8295
8296 /* Optimize pow(x,0.0) = 1.0. */
8297 if (REAL_VALUES_EQUAL (c, dconst0))
8298 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8299 arg0);
8300
8301 /* Optimize pow(x,1.0) = x. */
8302 if (REAL_VALUES_EQUAL (c, dconst1))
8303 return arg0;
8304
8305 /* Optimize pow(x,-1.0) = 1.0/x. */
8306 if (REAL_VALUES_EQUAL (c, dconstm1))
8307 return fold_build2_loc (loc, RDIV_EXPR, type,
8308 build_real (type, dconst1), arg0);
8309
8310 /* Optimize pow(x,0.5) = sqrt(x). */
8311 if (flag_unsafe_math_optimizations
8312 && REAL_VALUES_EQUAL (c, dconsthalf))
8313 {
8314 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8315
8316 if (sqrtfn != NULL_TREE)
8317 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8318 }
8319
8320 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8321 if (flag_unsafe_math_optimizations)
8322 {
8323 const REAL_VALUE_TYPE dconstroot
8324 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8325
8326 if (REAL_VALUES_EQUAL (c, dconstroot))
8327 {
8328 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8329 if (cbrtfn != NULL_TREE)
8330 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8331 }
8332 }
8333
8334 /* Check for an integer exponent. */
8335 n = real_to_integer (&c);
8336 real_from_integer (&cint, VOIDmode, n, SIGNED);
8337 if (real_identical (&c, &cint))
8338 {
8339 /* Attempt to evaluate pow at compile-time, unless this should
8340 raise an exception. */
8341 if (TREE_CODE (arg0) == REAL_CST
8342 && !TREE_OVERFLOW (arg0)
8343 && (n > 0
8344 || (!flag_trapping_math && !flag_errno_math)
8345 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8346 {
8347 REAL_VALUE_TYPE x;
8348 bool inexact;
8349
8350 x = TREE_REAL_CST (arg0);
8351 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8352 if (flag_unsafe_math_optimizations || !inexact)
8353 return build_real (type, x);
8354 }
8355
8356 /* Strip sign ops from even integer powers. */
8357 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8358 {
8359 tree narg0 = fold_strip_sign_ops (arg0);
8360 if (narg0)
8361 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8362 }
8363 }
8364 }
8365
8366 if (flag_unsafe_math_optimizations)
8367 {
8368 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8369
8370 /* Optimize pow(expN(x),y) = expN(x*y). */
8371 if (BUILTIN_EXPONENT_P (fcode))
8372 {
8373 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8374 tree arg = CALL_EXPR_ARG (arg0, 0);
8375 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8376 return build_call_expr_loc (loc, expfn, 1, arg);
8377 }
8378
8379 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8380 if (BUILTIN_SQRT_P (fcode))
8381 {
8382 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8383 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8384 build_real (type, dconsthalf));
8385 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8386 }
8387
8388 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8389 if (BUILTIN_CBRT_P (fcode))
8390 {
8391 tree arg = CALL_EXPR_ARG (arg0, 0);
8392 if (tree_expr_nonnegative_p (arg))
8393 {
8394 const REAL_VALUE_TYPE dconstroot
8395 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8396 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8397 build_real (type, dconstroot));
8398 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8399 }
8400 }
8401
8402 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8403 if (fcode == BUILT_IN_POW
8404 || fcode == BUILT_IN_POWF
8405 || fcode == BUILT_IN_POWL)
8406 {
8407 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8408 if (tree_expr_nonnegative_p (arg00))
8409 {
8410 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8411 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8412 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8413 }
8414 }
8415 }
8416
8417 return NULL_TREE;
8418 }
8419
8420 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8421 Return NULL_TREE if no simplification can be made. */
8422 static tree
8423 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8424 tree arg0, tree arg1, tree type)
8425 {
8426 if (!validate_arg (arg0, REAL_TYPE)
8427 || !validate_arg (arg1, INTEGER_TYPE))
8428 return NULL_TREE;
8429
8430 /* Optimize pow(1.0,y) = 1.0. */
8431 if (real_onep (arg0))
8432 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8433
8434 if (tree_fits_shwi_p (arg1))
8435 {
8436 HOST_WIDE_INT c = tree_to_shwi (arg1);
8437
8438 /* Evaluate powi at compile-time. */
8439 if (TREE_CODE (arg0) == REAL_CST
8440 && !TREE_OVERFLOW (arg0))
8441 {
8442 REAL_VALUE_TYPE x;
8443 x = TREE_REAL_CST (arg0);
8444 real_powi (&x, TYPE_MODE (type), &x, c);
8445 return build_real (type, x);
8446 }
8447
8448 /* Optimize pow(x,0) = 1.0. */
8449 if (c == 0)
8450 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8451 arg0);
8452
8453 /* Optimize pow(x,1) = x. */
8454 if (c == 1)
8455 return arg0;
8456
8457 /* Optimize pow(x,-1) = 1.0/x. */
8458 if (c == -1)
8459 return fold_build2_loc (loc, RDIV_EXPR, type,
8460 build_real (type, dconst1), arg0);
8461 }
8462
8463 return NULL_TREE;
8464 }
8465
8466 /* A subroutine of fold_builtin to fold the various exponent
8467 functions. Return NULL_TREE if no simplification can be made.
8468 FUNC is the corresponding MPFR exponent function. */
8469
8470 static tree
8471 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8472 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8473 {
8474 if (validate_arg (arg, REAL_TYPE))
8475 {
8476 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8477 tree res;
8478
8479 /* Calculate the result when the argument is a constant. */
8480 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8481 return res;
8482
8483 /* Optimize expN(logN(x)) = x. */
8484 if (flag_unsafe_math_optimizations)
8485 {
8486 const enum built_in_function fcode = builtin_mathfn_code (arg);
8487
8488 if ((func == mpfr_exp
8489 && (fcode == BUILT_IN_LOG
8490 || fcode == BUILT_IN_LOGF
8491 || fcode == BUILT_IN_LOGL))
8492 || (func == mpfr_exp2
8493 && (fcode == BUILT_IN_LOG2
8494 || fcode == BUILT_IN_LOG2F
8495 || fcode == BUILT_IN_LOG2L))
8496 || (func == mpfr_exp10
8497 && (fcode == BUILT_IN_LOG10
8498 || fcode == BUILT_IN_LOG10F
8499 || fcode == BUILT_IN_LOG10L)))
8500 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8501 }
8502 }
8503
8504 return NULL_TREE;
8505 }
8506
8507 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8508 Return NULL_TREE if no simplification can be made. */
8509
8510 static tree
8511 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8512 {
8513 tree fn, len, lenp1, call, type;
8514
8515 if (!validate_arg (dest, POINTER_TYPE)
8516 || !validate_arg (src, POINTER_TYPE))
8517 return NULL_TREE;
8518
8519 len = c_strlen (src, 1);
8520 if (!len
8521 || TREE_CODE (len) != INTEGER_CST)
8522 return NULL_TREE;
8523
8524 if (optimize_function_for_size_p (cfun)
8525 /* If length is zero it's small enough. */
8526 && !integer_zerop (len))
8527 return NULL_TREE;
8528
8529 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8530 if (!fn)
8531 return NULL_TREE;
8532
8533 lenp1 = size_binop_loc (loc, PLUS_EXPR,
8534 fold_convert_loc (loc, size_type_node, len),
8535 build_int_cst (size_type_node, 1));
8536 /* We use dest twice in building our expression. Save it from
8537 multiple expansions. */
8538 dest = builtin_save_expr (dest);
8539 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8540
8541 type = TREE_TYPE (TREE_TYPE (fndecl));
8542 dest = fold_build_pointer_plus_loc (loc, dest, len);
8543 dest = fold_convert_loc (loc, type, dest);
8544 dest = omit_one_operand_loc (loc, type, dest, call);
8545 return dest;
8546 }
8547
8548 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8549 arguments to the call, and TYPE is its return type.
8550 Return NULL_TREE if no simplification can be made. */
8551
8552 static tree
8553 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8554 {
8555 if (!validate_arg (arg1, POINTER_TYPE)
8556 || !validate_arg (arg2, INTEGER_TYPE)
8557 || !validate_arg (len, INTEGER_TYPE))
8558 return NULL_TREE;
8559 else
8560 {
8561 const char *p1;
8562
8563 if (TREE_CODE (arg2) != INTEGER_CST
8564 || !tree_fits_uhwi_p (len))
8565 return NULL_TREE;
8566
8567 p1 = c_getstr (arg1);
8568 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8569 {
8570 char c;
8571 const char *r;
8572 tree tem;
8573
8574 if (target_char_cast (arg2, &c))
8575 return NULL_TREE;
8576
8577 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
8578
8579 if (r == NULL)
8580 return build_int_cst (TREE_TYPE (arg1), 0);
8581
8582 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8583 return fold_convert_loc (loc, type, tem);
8584 }
8585 return NULL_TREE;
8586 }
8587 }
8588
8589 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8590 Return NULL_TREE if no simplification can be made. */
8591
8592 static tree
8593 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8594 {
8595 const char *p1, *p2;
8596
8597 if (!validate_arg (arg1, POINTER_TYPE)
8598 || !validate_arg (arg2, POINTER_TYPE)
8599 || !validate_arg (len, INTEGER_TYPE))
8600 return NULL_TREE;
8601
8602 /* If the LEN parameter is zero, return zero. */
8603 if (integer_zerop (len))
8604 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8605 arg1, arg2);
8606
8607 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8608 if (operand_equal_p (arg1, arg2, 0))
8609 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8610
8611 p1 = c_getstr (arg1);
8612 p2 = c_getstr (arg2);
8613
8614 /* If all arguments are constant, and the value of len is not greater
8615 than the lengths of arg1 and arg2, evaluate at compile-time. */
8616 if (tree_fits_uhwi_p (len) && p1 && p2
8617 && compare_tree_int (len, strlen (p1) + 1) <= 0
8618 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8619 {
8620 const int r = memcmp (p1, p2, tree_to_uhwi (len));
8621
8622 if (r > 0)
8623 return integer_one_node;
8624 else if (r < 0)
8625 return integer_minus_one_node;
8626 else
8627 return integer_zero_node;
8628 }
8629
8630 /* If len parameter is one, return an expression corresponding to
8631 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8632 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8633 {
8634 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8635 tree cst_uchar_ptr_node
8636 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8637
8638 tree ind1
8639 = fold_convert_loc (loc, integer_type_node,
8640 build1 (INDIRECT_REF, cst_uchar_node,
8641 fold_convert_loc (loc,
8642 cst_uchar_ptr_node,
8643 arg1)));
8644 tree ind2
8645 = fold_convert_loc (loc, integer_type_node,
8646 build1 (INDIRECT_REF, cst_uchar_node,
8647 fold_convert_loc (loc,
8648 cst_uchar_ptr_node,
8649 arg2)));
8650 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8651 }
8652
8653 return NULL_TREE;
8654 }
8655
8656 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8657 Return NULL_TREE if no simplification can be made. */
8658
8659 static tree
8660 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8661 {
8662 const char *p1, *p2;
8663
8664 if (!validate_arg (arg1, POINTER_TYPE)
8665 || !validate_arg (arg2, POINTER_TYPE))
8666 return NULL_TREE;
8667
8668 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8669 if (operand_equal_p (arg1, arg2, 0))
8670 return integer_zero_node;
8671
8672 p1 = c_getstr (arg1);
8673 p2 = c_getstr (arg2);
8674
8675 if (p1 && p2)
8676 {
8677 const int i = strcmp (p1, p2);
8678 if (i < 0)
8679 return integer_minus_one_node;
8680 else if (i > 0)
8681 return integer_one_node;
8682 else
8683 return integer_zero_node;
8684 }
8685
8686 /* If the second arg is "", return *(const unsigned char*)arg1. */
8687 if (p2 && *p2 == '\0')
8688 {
8689 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8690 tree cst_uchar_ptr_node
8691 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8692
8693 return fold_convert_loc (loc, integer_type_node,
8694 build1 (INDIRECT_REF, cst_uchar_node,
8695 fold_convert_loc (loc,
8696 cst_uchar_ptr_node,
8697 arg1)));
8698 }
8699
8700 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8701 if (p1 && *p1 == '\0')
8702 {
8703 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8704 tree cst_uchar_ptr_node
8705 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8706
8707 tree temp
8708 = fold_convert_loc (loc, integer_type_node,
8709 build1 (INDIRECT_REF, cst_uchar_node,
8710 fold_convert_loc (loc,
8711 cst_uchar_ptr_node,
8712 arg2)));
8713 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8714 }
8715
8716 return NULL_TREE;
8717 }
8718
8719 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8720 Return NULL_TREE if no simplification can be made. */
8721
8722 static tree
8723 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8724 {
8725 const char *p1, *p2;
8726
8727 if (!validate_arg (arg1, POINTER_TYPE)
8728 || !validate_arg (arg2, POINTER_TYPE)
8729 || !validate_arg (len, INTEGER_TYPE))
8730 return NULL_TREE;
8731
8732 /* If the LEN parameter is zero, return zero. */
8733 if (integer_zerop (len))
8734 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8735 arg1, arg2);
8736
8737 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8738 if (operand_equal_p (arg1, arg2, 0))
8739 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8740
8741 p1 = c_getstr (arg1);
8742 p2 = c_getstr (arg2);
8743
8744 if (tree_fits_uhwi_p (len) && p1 && p2)
8745 {
8746 const int i = strncmp (p1, p2, tree_to_uhwi (len));
8747 if (i > 0)
8748 return integer_one_node;
8749 else if (i < 0)
8750 return integer_minus_one_node;
8751 else
8752 return integer_zero_node;
8753 }
8754
8755 /* If the second arg is "", and the length is greater than zero,
8756 return *(const unsigned char*)arg1. */
8757 if (p2 && *p2 == '\0'
8758 && TREE_CODE (len) == INTEGER_CST
8759 && tree_int_cst_sgn (len) == 1)
8760 {
8761 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8762 tree cst_uchar_ptr_node
8763 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8764
8765 return fold_convert_loc (loc, integer_type_node,
8766 build1 (INDIRECT_REF, cst_uchar_node,
8767 fold_convert_loc (loc,
8768 cst_uchar_ptr_node,
8769 arg1)));
8770 }
8771
8772 /* If the first arg is "", and the length is greater than zero,
8773 return -*(const unsigned char*)arg2. */
8774 if (p1 && *p1 == '\0'
8775 && TREE_CODE (len) == INTEGER_CST
8776 && tree_int_cst_sgn (len) == 1)
8777 {
8778 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8779 tree cst_uchar_ptr_node
8780 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8781
8782 tree temp = fold_convert_loc (loc, integer_type_node,
8783 build1 (INDIRECT_REF, cst_uchar_node,
8784 fold_convert_loc (loc,
8785 cst_uchar_ptr_node,
8786 arg2)));
8787 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8788 }
8789
8790 /* If len parameter is one, return an expression corresponding to
8791 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8792 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8793 {
8794 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8795 tree cst_uchar_ptr_node
8796 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8797
8798 tree ind1 = fold_convert_loc (loc, integer_type_node,
8799 build1 (INDIRECT_REF, cst_uchar_node,
8800 fold_convert_loc (loc,
8801 cst_uchar_ptr_node,
8802 arg1)));
8803 tree ind2 = fold_convert_loc (loc, integer_type_node,
8804 build1 (INDIRECT_REF, cst_uchar_node,
8805 fold_convert_loc (loc,
8806 cst_uchar_ptr_node,
8807 arg2)));
8808 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8809 }
8810
8811 return NULL_TREE;
8812 }
8813
8814 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8815 ARG. Return NULL_TREE if no simplification can be made. */
8816
8817 static tree
8818 fold_builtin_signbit (location_t loc, tree arg, tree type)
8819 {
8820 if (!validate_arg (arg, REAL_TYPE))
8821 return NULL_TREE;
8822
8823 /* If ARG is a compile-time constant, determine the result. */
8824 if (TREE_CODE (arg) == REAL_CST
8825 && !TREE_OVERFLOW (arg))
8826 {
8827 REAL_VALUE_TYPE c;
8828
8829 c = TREE_REAL_CST (arg);
8830 return (REAL_VALUE_NEGATIVE (c)
8831 ? build_one_cst (type)
8832 : build_zero_cst (type));
8833 }
8834
8835 /* If ARG is non-negative, the result is always zero. */
8836 if (tree_expr_nonnegative_p (arg))
8837 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8838
8839 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8840 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8841 return fold_convert (type,
8842 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
8843 build_real (TREE_TYPE (arg), dconst0)));
8844
8845 return NULL_TREE;
8846 }
8847
8848 /* Fold function call to builtin copysign, copysignf or copysignl with
8849 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8850 be made. */
8851
8852 static tree
8853 fold_builtin_copysign (location_t loc, tree fndecl,
8854 tree arg1, tree arg2, tree type)
8855 {
8856 tree tem;
8857
8858 if (!validate_arg (arg1, REAL_TYPE)
8859 || !validate_arg (arg2, REAL_TYPE))
8860 return NULL_TREE;
8861
8862 /* copysign(X,X) is X. */
8863 if (operand_equal_p (arg1, arg2, 0))
8864 return fold_convert_loc (loc, type, arg1);
8865
8866 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8867 if (TREE_CODE (arg1) == REAL_CST
8868 && TREE_CODE (arg2) == REAL_CST
8869 && !TREE_OVERFLOW (arg1)
8870 && !TREE_OVERFLOW (arg2))
8871 {
8872 REAL_VALUE_TYPE c1, c2;
8873
8874 c1 = TREE_REAL_CST (arg1);
8875 c2 = TREE_REAL_CST (arg2);
8876 /* c1.sign := c2.sign. */
8877 real_copysign (&c1, &c2);
8878 return build_real (type, c1);
8879 }
8880
8881 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8882 Remember to evaluate Y for side-effects. */
8883 if (tree_expr_nonnegative_p (arg2))
8884 return omit_one_operand_loc (loc, type,
8885 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8886 arg2);
8887
8888 /* Strip sign changing operations for the first argument. */
8889 tem = fold_strip_sign_ops (arg1);
8890 if (tem)
8891 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8892
8893 return NULL_TREE;
8894 }
8895
8896 /* Fold a call to builtin isascii with argument ARG. */
8897
8898 static tree
8899 fold_builtin_isascii (location_t loc, tree arg)
8900 {
8901 if (!validate_arg (arg, INTEGER_TYPE))
8902 return NULL_TREE;
8903 else
8904 {
8905 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8906 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8907 build_int_cst (integer_type_node,
8908 ~ (unsigned HOST_WIDE_INT) 0x7f));
8909 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8910 arg, integer_zero_node);
8911 }
8912 }
8913
8914 /* Fold a call to builtin toascii with argument ARG. */
8915
8916 static tree
8917 fold_builtin_toascii (location_t loc, tree arg)
8918 {
8919 if (!validate_arg (arg, INTEGER_TYPE))
8920 return NULL_TREE;
8921
8922 /* Transform toascii(c) -> (c & 0x7f). */
8923 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8924 build_int_cst (integer_type_node, 0x7f));
8925 }
8926
8927 /* Fold a call to builtin isdigit with argument ARG. */
8928
8929 static tree
8930 fold_builtin_isdigit (location_t loc, tree arg)
8931 {
8932 if (!validate_arg (arg, INTEGER_TYPE))
8933 return NULL_TREE;
8934 else
8935 {
8936 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8937 /* According to the C standard, isdigit is unaffected by locale.
8938 However, it definitely is affected by the target character set. */
8939 unsigned HOST_WIDE_INT target_digit0
8940 = lang_hooks.to_target_charset ('0');
8941
8942 if (target_digit0 == 0)
8943 return NULL_TREE;
8944
8945 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8946 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8947 build_int_cst (unsigned_type_node, target_digit0));
8948 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8949 build_int_cst (unsigned_type_node, 9));
8950 }
8951 }
8952
8953 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8954
8955 static tree
8956 fold_builtin_fabs (location_t loc, tree arg, tree type)
8957 {
8958 if (!validate_arg (arg, REAL_TYPE))
8959 return NULL_TREE;
8960
8961 arg = fold_convert_loc (loc, type, arg);
8962 if (TREE_CODE (arg) == REAL_CST)
8963 return fold_abs_const (arg, type);
8964 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8965 }
8966
8967 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8968
8969 static tree
8970 fold_builtin_abs (location_t loc, tree arg, tree type)
8971 {
8972 if (!validate_arg (arg, INTEGER_TYPE))
8973 return NULL_TREE;
8974
8975 arg = fold_convert_loc (loc, type, arg);
8976 if (TREE_CODE (arg) == INTEGER_CST)
8977 return fold_abs_const (arg, type);
8978 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8979 }
8980
8981 /* Fold a fma operation with arguments ARG[012]. */
8982
8983 tree
8984 fold_fma (location_t loc ATTRIBUTE_UNUSED,
8985 tree type, tree arg0, tree arg1, tree arg2)
8986 {
8987 if (TREE_CODE (arg0) == REAL_CST
8988 && TREE_CODE (arg1) == REAL_CST
8989 && TREE_CODE (arg2) == REAL_CST)
8990 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
8991
8992 return NULL_TREE;
8993 }
8994
8995 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8996
8997 static tree
8998 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8999 {
9000 if (validate_arg (arg0, REAL_TYPE)
9001 && validate_arg (arg1, REAL_TYPE)
9002 && validate_arg (arg2, REAL_TYPE))
9003 {
9004 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9005 if (tem)
9006 return tem;
9007
9008 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9009 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9010 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9011 }
9012 return NULL_TREE;
9013 }
9014
9015 /* Fold a call to builtin fmin or fmax. */
9016
9017 static tree
9018 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9019 tree type, bool max)
9020 {
9021 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9022 {
9023 /* Calculate the result when the argument is a constant. */
9024 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9025
9026 if (res)
9027 return res;
9028
9029 /* If either argument is NaN, return the other one. Avoid the
9030 transformation if we get (and honor) a signalling NaN. Using
9031 omit_one_operand() ensures we create a non-lvalue. */
9032 if (TREE_CODE (arg0) == REAL_CST
9033 && real_isnan (&TREE_REAL_CST (arg0))
9034 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9035 || ! TREE_REAL_CST (arg0).signalling))
9036 return omit_one_operand_loc (loc, type, arg1, arg0);
9037 if (TREE_CODE (arg1) == REAL_CST
9038 && real_isnan (&TREE_REAL_CST (arg1))
9039 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9040 || ! TREE_REAL_CST (arg1).signalling))
9041 return omit_one_operand_loc (loc, type, arg0, arg1);
9042
9043 /* Transform fmin/fmax(x,x) -> x. */
9044 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9045 return omit_one_operand_loc (loc, type, arg0, arg1);
9046
9047 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9048 functions to return the numeric arg if the other one is NaN.
9049 These tree codes don't honor that, so only transform if
9050 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9051 handled, so we don't have to worry about it either. */
9052 if (flag_finite_math_only)
9053 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9054 fold_convert_loc (loc, type, arg0),
9055 fold_convert_loc (loc, type, arg1));
9056 }
9057 return NULL_TREE;
9058 }
9059
9060 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9061
9062 static tree
9063 fold_builtin_carg (location_t loc, tree arg, tree type)
9064 {
9065 if (validate_arg (arg, COMPLEX_TYPE)
9066 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9067 {
9068 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9069
9070 if (atan2_fn)
9071 {
9072 tree new_arg = builtin_save_expr (arg);
9073 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9074 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9075 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9076 }
9077 }
9078
9079 return NULL_TREE;
9080 }
9081
9082 /* Fold a call to builtin logb/ilogb. */
9083
9084 static tree
9085 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9086 {
9087 if (! validate_arg (arg, REAL_TYPE))
9088 return NULL_TREE;
9089
9090 STRIP_NOPS (arg);
9091
9092 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9093 {
9094 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9095
9096 switch (value->cl)
9097 {
9098 case rvc_nan:
9099 case rvc_inf:
9100 /* If arg is Inf or NaN and we're logb, return it. */
9101 if (TREE_CODE (rettype) == REAL_TYPE)
9102 {
9103 /* For logb(-Inf) we have to return +Inf. */
9104 if (real_isinf (value) && real_isneg (value))
9105 {
9106 REAL_VALUE_TYPE tem;
9107 real_inf (&tem);
9108 return build_real (rettype, tem);
9109 }
9110 return fold_convert_loc (loc, rettype, arg);
9111 }
9112 /* Fall through... */
9113 case rvc_zero:
9114 /* Zero may set errno and/or raise an exception for logb, also
9115 for ilogb we don't know FP_ILOGB0. */
9116 return NULL_TREE;
9117 case rvc_normal:
9118 /* For normal numbers, proceed iff radix == 2. In GCC,
9119 normalized significands are in the range [0.5, 1.0). We
9120 want the exponent as if they were [1.0, 2.0) so get the
9121 exponent and subtract 1. */
9122 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9123 return fold_convert_loc (loc, rettype,
9124 build_int_cst (integer_type_node,
9125 REAL_EXP (value)-1));
9126 break;
9127 }
9128 }
9129
9130 return NULL_TREE;
9131 }
9132
9133 /* Fold a call to builtin significand, if radix == 2. */
9134
9135 static tree
9136 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9137 {
9138 if (! validate_arg (arg, REAL_TYPE))
9139 return NULL_TREE;
9140
9141 STRIP_NOPS (arg);
9142
9143 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9144 {
9145 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9146
9147 switch (value->cl)
9148 {
9149 case rvc_zero:
9150 case rvc_nan:
9151 case rvc_inf:
9152 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9153 return fold_convert_loc (loc, rettype, arg);
9154 case rvc_normal:
9155 /* For normal numbers, proceed iff radix == 2. */
9156 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9157 {
9158 REAL_VALUE_TYPE result = *value;
9159 /* In GCC, normalized significands are in the range [0.5,
9160 1.0). We want them to be [1.0, 2.0) so set the
9161 exponent to 1. */
9162 SET_REAL_EXP (&result, 1);
9163 return build_real (rettype, result);
9164 }
9165 break;
9166 }
9167 }
9168
9169 return NULL_TREE;
9170 }
9171
9172 /* Fold a call to builtin frexp, we can assume the base is 2. */
9173
9174 static tree
9175 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9176 {
9177 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9178 return NULL_TREE;
9179
9180 STRIP_NOPS (arg0);
9181
9182 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9183 return NULL_TREE;
9184
9185 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9186
9187 /* Proceed if a valid pointer type was passed in. */
9188 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9189 {
9190 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9191 tree frac, exp;
9192
9193 switch (value->cl)
9194 {
9195 case rvc_zero:
9196 /* For +-0, return (*exp = 0, +-0). */
9197 exp = integer_zero_node;
9198 frac = arg0;
9199 break;
9200 case rvc_nan:
9201 case rvc_inf:
9202 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9203 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9204 case rvc_normal:
9205 {
9206 /* Since the frexp function always expects base 2, and in
9207 GCC normalized significands are already in the range
9208 [0.5, 1.0), we have exactly what frexp wants. */
9209 REAL_VALUE_TYPE frac_rvt = *value;
9210 SET_REAL_EXP (&frac_rvt, 0);
9211 frac = build_real (rettype, frac_rvt);
9212 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9213 }
9214 break;
9215 default:
9216 gcc_unreachable ();
9217 }
9218
9219 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9220 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9221 TREE_SIDE_EFFECTS (arg1) = 1;
9222 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9223 }
9224
9225 return NULL_TREE;
9226 }
9227
9228 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9229 then we can assume the base is two. If it's false, then we have to
9230 check the mode of the TYPE parameter in certain cases. */
9231
9232 static tree
9233 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9234 tree type, bool ldexp)
9235 {
9236 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9237 {
9238 STRIP_NOPS (arg0);
9239 STRIP_NOPS (arg1);
9240
9241 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9242 if (real_zerop (arg0) || integer_zerop (arg1)
9243 || (TREE_CODE (arg0) == REAL_CST
9244 && !real_isfinite (&TREE_REAL_CST (arg0))))
9245 return omit_one_operand_loc (loc, type, arg0, arg1);
9246
9247 /* If both arguments are constant, then try to evaluate it. */
9248 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9249 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9250 && tree_fits_shwi_p (arg1))
9251 {
9252 /* Bound the maximum adjustment to twice the range of the
9253 mode's valid exponents. Use abs to ensure the range is
9254 positive as a sanity check. */
9255 const long max_exp_adj = 2 *
9256 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9257 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9258
9259 /* Get the user-requested adjustment. */
9260 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9261
9262 /* The requested adjustment must be inside this range. This
9263 is a preliminary cap to avoid things like overflow, we
9264 may still fail to compute the result for other reasons. */
9265 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9266 {
9267 REAL_VALUE_TYPE initial_result;
9268
9269 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9270
9271 /* Ensure we didn't overflow. */
9272 if (! real_isinf (&initial_result))
9273 {
9274 const REAL_VALUE_TYPE trunc_result
9275 = real_value_truncate (TYPE_MODE (type), initial_result);
9276
9277 /* Only proceed if the target mode can hold the
9278 resulting value. */
9279 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9280 return build_real (type, trunc_result);
9281 }
9282 }
9283 }
9284 }
9285
9286 return NULL_TREE;
9287 }
9288
9289 /* Fold a call to builtin modf. */
9290
9291 static tree
9292 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9293 {
9294 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9295 return NULL_TREE;
9296
9297 STRIP_NOPS (arg0);
9298
9299 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9300 return NULL_TREE;
9301
9302 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9303
9304 /* Proceed if a valid pointer type was passed in. */
9305 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9306 {
9307 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9308 REAL_VALUE_TYPE trunc, frac;
9309
9310 switch (value->cl)
9311 {
9312 case rvc_nan:
9313 case rvc_zero:
9314 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9315 trunc = frac = *value;
9316 break;
9317 case rvc_inf:
9318 /* For +-Inf, return (*arg1 = arg0, +-0). */
9319 frac = dconst0;
9320 frac.sign = value->sign;
9321 trunc = *value;
9322 break;
9323 case rvc_normal:
9324 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9325 real_trunc (&trunc, VOIDmode, value);
9326 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9327 /* If the original number was negative and already
9328 integral, then the fractional part is -0.0. */
9329 if (value->sign && frac.cl == rvc_zero)
9330 frac.sign = value->sign;
9331 break;
9332 }
9333
9334 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9335 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9336 build_real (rettype, trunc));
9337 TREE_SIDE_EFFECTS (arg1) = 1;
9338 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9339 build_real (rettype, frac));
9340 }
9341
9342 return NULL_TREE;
9343 }
9344
9345 /* Given a location LOC, an interclass builtin function decl FNDECL
9346 and its single argument ARG, return an folded expression computing
9347 the same, or NULL_TREE if we either couldn't or didn't want to fold
9348 (the latter happen if there's an RTL instruction available). */
9349
9350 static tree
9351 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9352 {
9353 enum machine_mode mode;
9354
9355 if (!validate_arg (arg, REAL_TYPE))
9356 return NULL_TREE;
9357
9358 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9359 return NULL_TREE;
9360
9361 mode = TYPE_MODE (TREE_TYPE (arg));
9362
9363 /* If there is no optab, try generic code. */
9364 switch (DECL_FUNCTION_CODE (fndecl))
9365 {
9366 tree result;
9367
9368 CASE_FLT_FN (BUILT_IN_ISINF):
9369 {
9370 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9371 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9372 tree const type = TREE_TYPE (arg);
9373 REAL_VALUE_TYPE r;
9374 char buf[128];
9375
9376 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9377 real_from_string (&r, buf);
9378 result = build_call_expr (isgr_fn, 2,
9379 fold_build1_loc (loc, ABS_EXPR, type, arg),
9380 build_real (type, r));
9381 return result;
9382 }
9383 CASE_FLT_FN (BUILT_IN_FINITE):
9384 case BUILT_IN_ISFINITE:
9385 {
9386 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9387 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9388 tree const type = TREE_TYPE (arg);
9389 REAL_VALUE_TYPE r;
9390 char buf[128];
9391
9392 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9393 real_from_string (&r, buf);
9394 result = build_call_expr (isle_fn, 2,
9395 fold_build1_loc (loc, ABS_EXPR, type, arg),
9396 build_real (type, r));
9397 /*result = fold_build2_loc (loc, UNGT_EXPR,
9398 TREE_TYPE (TREE_TYPE (fndecl)),
9399 fold_build1_loc (loc, ABS_EXPR, type, arg),
9400 build_real (type, r));
9401 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9402 TREE_TYPE (TREE_TYPE (fndecl)),
9403 result);*/
9404 return result;
9405 }
9406 case BUILT_IN_ISNORMAL:
9407 {
9408 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9409 islessequal(fabs(x),DBL_MAX). */
9410 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9411 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9412 tree const type = TREE_TYPE (arg);
9413 REAL_VALUE_TYPE rmax, rmin;
9414 char buf[128];
9415
9416 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9417 real_from_string (&rmax, buf);
9418 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9419 real_from_string (&rmin, buf);
9420 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9421 result = build_call_expr (isle_fn, 2, arg,
9422 build_real (type, rmax));
9423 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9424 build_call_expr (isge_fn, 2, arg,
9425 build_real (type, rmin)));
9426 return result;
9427 }
9428 default:
9429 break;
9430 }
9431
9432 return NULL_TREE;
9433 }
9434
9435 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9436 ARG is the argument for the call. */
9437
9438 static tree
9439 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9440 {
9441 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9442 REAL_VALUE_TYPE r;
9443
9444 if (!validate_arg (arg, REAL_TYPE))
9445 return NULL_TREE;
9446
9447 switch (builtin_index)
9448 {
9449 case BUILT_IN_ISINF:
9450 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9451 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9452
9453 if (TREE_CODE (arg) == REAL_CST)
9454 {
9455 r = TREE_REAL_CST (arg);
9456 if (real_isinf (&r))
9457 return real_compare (GT_EXPR, &r, &dconst0)
9458 ? integer_one_node : integer_minus_one_node;
9459 else
9460 return integer_zero_node;
9461 }
9462
9463 return NULL_TREE;
9464
9465 case BUILT_IN_ISINF_SIGN:
9466 {
9467 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9468 /* In a boolean context, GCC will fold the inner COND_EXPR to
9469 1. So e.g. "if (isinf_sign(x))" would be folded to just
9470 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9471 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9472 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9473 tree tmp = NULL_TREE;
9474
9475 arg = builtin_save_expr (arg);
9476
9477 if (signbit_fn && isinf_fn)
9478 {
9479 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9480 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9481
9482 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9483 signbit_call, integer_zero_node);
9484 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9485 isinf_call, integer_zero_node);
9486
9487 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9488 integer_minus_one_node, integer_one_node);
9489 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9490 isinf_call, tmp,
9491 integer_zero_node);
9492 }
9493
9494 return tmp;
9495 }
9496
9497 case BUILT_IN_ISFINITE:
9498 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9499 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9500 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9501
9502 if (TREE_CODE (arg) == REAL_CST)
9503 {
9504 r = TREE_REAL_CST (arg);
9505 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9506 }
9507
9508 return NULL_TREE;
9509
9510 case BUILT_IN_ISNAN:
9511 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9512 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9513
9514 if (TREE_CODE (arg) == REAL_CST)
9515 {
9516 r = TREE_REAL_CST (arg);
9517 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9518 }
9519
9520 arg = builtin_save_expr (arg);
9521 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9522
9523 default:
9524 gcc_unreachable ();
9525 }
9526 }
9527
9528 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9529 This builtin will generate code to return the appropriate floating
9530 point classification depending on the value of the floating point
9531 number passed in. The possible return values must be supplied as
9532 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9533 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9534 one floating point argument which is "type generic". */
9535
9536 static tree
9537 fold_builtin_fpclassify (location_t loc, tree exp)
9538 {
9539 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9540 arg, type, res, tmp;
9541 enum machine_mode mode;
9542 REAL_VALUE_TYPE r;
9543 char buf[128];
9544
9545 /* Verify the required arguments in the original call. */
9546 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9547 INTEGER_TYPE, INTEGER_TYPE,
9548 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9549 return NULL_TREE;
9550
9551 fp_nan = CALL_EXPR_ARG (exp, 0);
9552 fp_infinite = CALL_EXPR_ARG (exp, 1);
9553 fp_normal = CALL_EXPR_ARG (exp, 2);
9554 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9555 fp_zero = CALL_EXPR_ARG (exp, 4);
9556 arg = CALL_EXPR_ARG (exp, 5);
9557 type = TREE_TYPE (arg);
9558 mode = TYPE_MODE (type);
9559 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9560
9561 /* fpclassify(x) ->
9562 isnan(x) ? FP_NAN :
9563 (fabs(x) == Inf ? FP_INFINITE :
9564 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9565 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9566
9567 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9568 build_real (type, dconst0));
9569 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9570 tmp, fp_zero, fp_subnormal);
9571
9572 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9573 real_from_string (&r, buf);
9574 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9575 arg, build_real (type, r));
9576 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9577
9578 if (HONOR_INFINITIES (mode))
9579 {
9580 real_inf (&r);
9581 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9582 build_real (type, r));
9583 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9584 fp_infinite, res);
9585 }
9586
9587 if (HONOR_NANS (mode))
9588 {
9589 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9590 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9591 }
9592
9593 return res;
9594 }
9595
9596 /* Fold a call to an unordered comparison function such as
9597 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9598 being called and ARG0 and ARG1 are the arguments for the call.
9599 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9600 the opposite of the desired result. UNORDERED_CODE is used
9601 for modes that can hold NaNs and ORDERED_CODE is used for
9602 the rest. */
9603
9604 static tree
9605 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9606 enum tree_code unordered_code,
9607 enum tree_code ordered_code)
9608 {
9609 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9610 enum tree_code code;
9611 tree type0, type1;
9612 enum tree_code code0, code1;
9613 tree cmp_type = NULL_TREE;
9614
9615 type0 = TREE_TYPE (arg0);
9616 type1 = TREE_TYPE (arg1);
9617
9618 code0 = TREE_CODE (type0);
9619 code1 = TREE_CODE (type1);
9620
9621 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9622 /* Choose the wider of two real types. */
9623 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9624 ? type0 : type1;
9625 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9626 cmp_type = type0;
9627 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9628 cmp_type = type1;
9629
9630 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9631 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9632
9633 if (unordered_code == UNORDERED_EXPR)
9634 {
9635 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9636 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9637 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9638 }
9639
9640 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9641 : ordered_code;
9642 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9643 fold_build2_loc (loc, code, type, arg0, arg1));
9644 }
9645
9646 /* Fold a call to built-in function FNDECL with 0 arguments.
9647 IGNORE is true if the result of the function call is ignored. This
9648 function returns NULL_TREE if no simplification was possible. */
9649
9650 static tree
9651 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9652 {
9653 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9654 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9655 switch (fcode)
9656 {
9657 CASE_FLT_FN (BUILT_IN_INF):
9658 case BUILT_IN_INFD32:
9659 case BUILT_IN_INFD64:
9660 case BUILT_IN_INFD128:
9661 return fold_builtin_inf (loc, type, true);
9662
9663 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9664 return fold_builtin_inf (loc, type, false);
9665
9666 case BUILT_IN_CLASSIFY_TYPE:
9667 return fold_builtin_classify_type (NULL_TREE);
9668
9669 case BUILT_IN_UNREACHABLE:
9670 if (flag_sanitize & SANITIZE_UNREACHABLE
9671 && (current_function_decl == NULL
9672 || !lookup_attribute ("no_sanitize_undefined",
9673 DECL_ATTRIBUTES (current_function_decl))))
9674 return ubsan_instrument_unreachable (loc);
9675 break;
9676
9677 default:
9678 break;
9679 }
9680 return NULL_TREE;
9681 }
9682
9683 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9684 IGNORE is true if the result of the function call is ignored. This
9685 function returns NULL_TREE if no simplification was possible. */
9686
9687 static tree
9688 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9689 {
9690 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9691 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9692 switch (fcode)
9693 {
9694 case BUILT_IN_CONSTANT_P:
9695 {
9696 tree val = fold_builtin_constant_p (arg0);
9697
9698 /* Gimplification will pull the CALL_EXPR for the builtin out of
9699 an if condition. When not optimizing, we'll not CSE it back.
9700 To avoid link error types of regressions, return false now. */
9701 if (!val && !optimize)
9702 val = integer_zero_node;
9703
9704 return val;
9705 }
9706
9707 case BUILT_IN_CLASSIFY_TYPE:
9708 return fold_builtin_classify_type (arg0);
9709
9710 case BUILT_IN_STRLEN:
9711 return fold_builtin_strlen (loc, type, arg0);
9712
9713 CASE_FLT_FN (BUILT_IN_FABS):
9714 case BUILT_IN_FABSD32:
9715 case BUILT_IN_FABSD64:
9716 case BUILT_IN_FABSD128:
9717 return fold_builtin_fabs (loc, arg0, type);
9718
9719 case BUILT_IN_ABS:
9720 case BUILT_IN_LABS:
9721 case BUILT_IN_LLABS:
9722 case BUILT_IN_IMAXABS:
9723 return fold_builtin_abs (loc, arg0, type);
9724
9725 CASE_FLT_FN (BUILT_IN_CONJ):
9726 if (validate_arg (arg0, COMPLEX_TYPE)
9727 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9728 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9729 break;
9730
9731 CASE_FLT_FN (BUILT_IN_CREAL):
9732 if (validate_arg (arg0, COMPLEX_TYPE)
9733 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9734 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9735 break;
9736
9737 CASE_FLT_FN (BUILT_IN_CIMAG):
9738 if (validate_arg (arg0, COMPLEX_TYPE)
9739 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9740 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9741 break;
9742
9743 CASE_FLT_FN (BUILT_IN_CCOS):
9744 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
9745
9746 CASE_FLT_FN (BUILT_IN_CCOSH):
9747 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
9748
9749 CASE_FLT_FN (BUILT_IN_CPROJ):
9750 return fold_builtin_cproj (loc, arg0, type);
9751
9752 CASE_FLT_FN (BUILT_IN_CSIN):
9753 if (validate_arg (arg0, COMPLEX_TYPE)
9754 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9755 return do_mpc_arg1 (arg0, type, mpc_sin);
9756 break;
9757
9758 CASE_FLT_FN (BUILT_IN_CSINH):
9759 if (validate_arg (arg0, COMPLEX_TYPE)
9760 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9761 return do_mpc_arg1 (arg0, type, mpc_sinh);
9762 break;
9763
9764 CASE_FLT_FN (BUILT_IN_CTAN):
9765 if (validate_arg (arg0, COMPLEX_TYPE)
9766 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9767 return do_mpc_arg1 (arg0, type, mpc_tan);
9768 break;
9769
9770 CASE_FLT_FN (BUILT_IN_CTANH):
9771 if (validate_arg (arg0, COMPLEX_TYPE)
9772 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9773 return do_mpc_arg1 (arg0, type, mpc_tanh);
9774 break;
9775
9776 CASE_FLT_FN (BUILT_IN_CLOG):
9777 if (validate_arg (arg0, COMPLEX_TYPE)
9778 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9779 return do_mpc_arg1 (arg0, type, mpc_log);
9780 break;
9781
9782 CASE_FLT_FN (BUILT_IN_CSQRT):
9783 if (validate_arg (arg0, COMPLEX_TYPE)
9784 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9785 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9786 break;
9787
9788 CASE_FLT_FN (BUILT_IN_CASIN):
9789 if (validate_arg (arg0, COMPLEX_TYPE)
9790 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9791 return do_mpc_arg1 (arg0, type, mpc_asin);
9792 break;
9793
9794 CASE_FLT_FN (BUILT_IN_CACOS):
9795 if (validate_arg (arg0, COMPLEX_TYPE)
9796 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9797 return do_mpc_arg1 (arg0, type, mpc_acos);
9798 break;
9799
9800 CASE_FLT_FN (BUILT_IN_CATAN):
9801 if (validate_arg (arg0, COMPLEX_TYPE)
9802 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9803 return do_mpc_arg1 (arg0, type, mpc_atan);
9804 break;
9805
9806 CASE_FLT_FN (BUILT_IN_CASINH):
9807 if (validate_arg (arg0, COMPLEX_TYPE)
9808 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9809 return do_mpc_arg1 (arg0, type, mpc_asinh);
9810 break;
9811
9812 CASE_FLT_FN (BUILT_IN_CACOSH):
9813 if (validate_arg (arg0, COMPLEX_TYPE)
9814 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9815 return do_mpc_arg1 (arg0, type, mpc_acosh);
9816 break;
9817
9818 CASE_FLT_FN (BUILT_IN_CATANH):
9819 if (validate_arg (arg0, COMPLEX_TYPE)
9820 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9821 return do_mpc_arg1 (arg0, type, mpc_atanh);
9822 break;
9823
9824 CASE_FLT_FN (BUILT_IN_CABS):
9825 return fold_builtin_cabs (loc, arg0, type, fndecl);
9826
9827 CASE_FLT_FN (BUILT_IN_CARG):
9828 return fold_builtin_carg (loc, arg0, type);
9829
9830 CASE_FLT_FN (BUILT_IN_SQRT):
9831 return fold_builtin_sqrt (loc, arg0, type);
9832
9833 CASE_FLT_FN (BUILT_IN_CBRT):
9834 return fold_builtin_cbrt (loc, arg0, type);
9835
9836 CASE_FLT_FN (BUILT_IN_ASIN):
9837 if (validate_arg (arg0, REAL_TYPE))
9838 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9839 &dconstm1, &dconst1, true);
9840 break;
9841
9842 CASE_FLT_FN (BUILT_IN_ACOS):
9843 if (validate_arg (arg0, REAL_TYPE))
9844 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9845 &dconstm1, &dconst1, true);
9846 break;
9847
9848 CASE_FLT_FN (BUILT_IN_ATAN):
9849 if (validate_arg (arg0, REAL_TYPE))
9850 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9851 break;
9852
9853 CASE_FLT_FN (BUILT_IN_ASINH):
9854 if (validate_arg (arg0, REAL_TYPE))
9855 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9856 break;
9857
9858 CASE_FLT_FN (BUILT_IN_ACOSH):
9859 if (validate_arg (arg0, REAL_TYPE))
9860 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9861 &dconst1, NULL, true);
9862 break;
9863
9864 CASE_FLT_FN (BUILT_IN_ATANH):
9865 if (validate_arg (arg0, REAL_TYPE))
9866 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9867 &dconstm1, &dconst1, false);
9868 break;
9869
9870 CASE_FLT_FN (BUILT_IN_SIN):
9871 if (validate_arg (arg0, REAL_TYPE))
9872 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9873 break;
9874
9875 CASE_FLT_FN (BUILT_IN_COS):
9876 return fold_builtin_cos (loc, arg0, type, fndecl);
9877
9878 CASE_FLT_FN (BUILT_IN_TAN):
9879 return fold_builtin_tan (arg0, type);
9880
9881 CASE_FLT_FN (BUILT_IN_CEXP):
9882 return fold_builtin_cexp (loc, arg0, type);
9883
9884 CASE_FLT_FN (BUILT_IN_CEXPI):
9885 if (validate_arg (arg0, REAL_TYPE))
9886 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9887 break;
9888
9889 CASE_FLT_FN (BUILT_IN_SINH):
9890 if (validate_arg (arg0, REAL_TYPE))
9891 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9892 break;
9893
9894 CASE_FLT_FN (BUILT_IN_COSH):
9895 return fold_builtin_cosh (loc, arg0, type, fndecl);
9896
9897 CASE_FLT_FN (BUILT_IN_TANH):
9898 if (validate_arg (arg0, REAL_TYPE))
9899 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9900 break;
9901
9902 CASE_FLT_FN (BUILT_IN_ERF):
9903 if (validate_arg (arg0, REAL_TYPE))
9904 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9905 break;
9906
9907 CASE_FLT_FN (BUILT_IN_ERFC):
9908 if (validate_arg (arg0, REAL_TYPE))
9909 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9910 break;
9911
9912 CASE_FLT_FN (BUILT_IN_TGAMMA):
9913 if (validate_arg (arg0, REAL_TYPE))
9914 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9915 break;
9916
9917 CASE_FLT_FN (BUILT_IN_EXP):
9918 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9919
9920 CASE_FLT_FN (BUILT_IN_EXP2):
9921 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9922
9923 CASE_FLT_FN (BUILT_IN_EXP10):
9924 CASE_FLT_FN (BUILT_IN_POW10):
9925 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9926
9927 CASE_FLT_FN (BUILT_IN_EXPM1):
9928 if (validate_arg (arg0, REAL_TYPE))
9929 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9930 break;
9931
9932 CASE_FLT_FN (BUILT_IN_LOG):
9933 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
9934
9935 CASE_FLT_FN (BUILT_IN_LOG2):
9936 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
9937
9938 CASE_FLT_FN (BUILT_IN_LOG10):
9939 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
9940
9941 CASE_FLT_FN (BUILT_IN_LOG1P):
9942 if (validate_arg (arg0, REAL_TYPE))
9943 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9944 &dconstm1, NULL, false);
9945 break;
9946
9947 CASE_FLT_FN (BUILT_IN_J0):
9948 if (validate_arg (arg0, REAL_TYPE))
9949 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9950 NULL, NULL, 0);
9951 break;
9952
9953 CASE_FLT_FN (BUILT_IN_J1):
9954 if (validate_arg (arg0, REAL_TYPE))
9955 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9956 NULL, NULL, 0);
9957 break;
9958
9959 CASE_FLT_FN (BUILT_IN_Y0):
9960 if (validate_arg (arg0, REAL_TYPE))
9961 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9962 &dconst0, NULL, false);
9963 break;
9964
9965 CASE_FLT_FN (BUILT_IN_Y1):
9966 if (validate_arg (arg0, REAL_TYPE))
9967 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9968 &dconst0, NULL, false);
9969 break;
9970
9971 CASE_FLT_FN (BUILT_IN_NAN):
9972 case BUILT_IN_NAND32:
9973 case BUILT_IN_NAND64:
9974 case BUILT_IN_NAND128:
9975 return fold_builtin_nan (arg0, type, true);
9976
9977 CASE_FLT_FN (BUILT_IN_NANS):
9978 return fold_builtin_nan (arg0, type, false);
9979
9980 CASE_FLT_FN (BUILT_IN_FLOOR):
9981 return fold_builtin_floor (loc, fndecl, arg0);
9982
9983 CASE_FLT_FN (BUILT_IN_CEIL):
9984 return fold_builtin_ceil (loc, fndecl, arg0);
9985
9986 CASE_FLT_FN (BUILT_IN_TRUNC):
9987 return fold_builtin_trunc (loc, fndecl, arg0);
9988
9989 CASE_FLT_FN (BUILT_IN_ROUND):
9990 return fold_builtin_round (loc, fndecl, arg0);
9991
9992 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9993 CASE_FLT_FN (BUILT_IN_RINT):
9994 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
9995
9996 CASE_FLT_FN (BUILT_IN_ICEIL):
9997 CASE_FLT_FN (BUILT_IN_LCEIL):
9998 CASE_FLT_FN (BUILT_IN_LLCEIL):
9999 CASE_FLT_FN (BUILT_IN_LFLOOR):
10000 CASE_FLT_FN (BUILT_IN_IFLOOR):
10001 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10002 CASE_FLT_FN (BUILT_IN_IROUND):
10003 CASE_FLT_FN (BUILT_IN_LROUND):
10004 CASE_FLT_FN (BUILT_IN_LLROUND):
10005 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10006
10007 CASE_FLT_FN (BUILT_IN_IRINT):
10008 CASE_FLT_FN (BUILT_IN_LRINT):
10009 CASE_FLT_FN (BUILT_IN_LLRINT):
10010 return fold_fixed_mathfn (loc, fndecl, arg0);
10011
10012 case BUILT_IN_BSWAP16:
10013 case BUILT_IN_BSWAP32:
10014 case BUILT_IN_BSWAP64:
10015 return fold_builtin_bswap (fndecl, arg0);
10016
10017 CASE_INT_FN (BUILT_IN_FFS):
10018 CASE_INT_FN (BUILT_IN_CLZ):
10019 CASE_INT_FN (BUILT_IN_CTZ):
10020 CASE_INT_FN (BUILT_IN_CLRSB):
10021 CASE_INT_FN (BUILT_IN_POPCOUNT):
10022 CASE_INT_FN (BUILT_IN_PARITY):
10023 return fold_builtin_bitop (fndecl, arg0);
10024
10025 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10026 return fold_builtin_signbit (loc, arg0, type);
10027
10028 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10029 return fold_builtin_significand (loc, arg0, type);
10030
10031 CASE_FLT_FN (BUILT_IN_ILOGB):
10032 CASE_FLT_FN (BUILT_IN_LOGB):
10033 return fold_builtin_logb (loc, arg0, type);
10034
10035 case BUILT_IN_ISASCII:
10036 return fold_builtin_isascii (loc, arg0);
10037
10038 case BUILT_IN_TOASCII:
10039 return fold_builtin_toascii (loc, arg0);
10040
10041 case BUILT_IN_ISDIGIT:
10042 return fold_builtin_isdigit (loc, arg0);
10043
10044 CASE_FLT_FN (BUILT_IN_FINITE):
10045 case BUILT_IN_FINITED32:
10046 case BUILT_IN_FINITED64:
10047 case BUILT_IN_FINITED128:
10048 case BUILT_IN_ISFINITE:
10049 {
10050 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10051 if (ret)
10052 return ret;
10053 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10054 }
10055
10056 CASE_FLT_FN (BUILT_IN_ISINF):
10057 case BUILT_IN_ISINFD32:
10058 case BUILT_IN_ISINFD64:
10059 case BUILT_IN_ISINFD128:
10060 {
10061 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10062 if (ret)
10063 return ret;
10064 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10065 }
10066
10067 case BUILT_IN_ISNORMAL:
10068 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10069
10070 case BUILT_IN_ISINF_SIGN:
10071 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10072
10073 CASE_FLT_FN (BUILT_IN_ISNAN):
10074 case BUILT_IN_ISNAND32:
10075 case BUILT_IN_ISNAND64:
10076 case BUILT_IN_ISNAND128:
10077 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10078
10079 case BUILT_IN_PRINTF:
10080 case BUILT_IN_PRINTF_UNLOCKED:
10081 case BUILT_IN_VPRINTF:
10082 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10083
10084 case BUILT_IN_FREE:
10085 if (integer_zerop (arg0))
10086 return build_empty_stmt (loc);
10087 break;
10088
10089 default:
10090 break;
10091 }
10092
10093 return NULL_TREE;
10094
10095 }
10096
10097 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10098 IGNORE is true if the result of the function call is ignored. This
10099 function returns NULL_TREE if no simplification was possible. */
10100
10101 static tree
10102 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10103 {
10104 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10105 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10106
10107 switch (fcode)
10108 {
10109 CASE_FLT_FN (BUILT_IN_JN):
10110 if (validate_arg (arg0, INTEGER_TYPE)
10111 && validate_arg (arg1, REAL_TYPE))
10112 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10113 break;
10114
10115 CASE_FLT_FN (BUILT_IN_YN):
10116 if (validate_arg (arg0, INTEGER_TYPE)
10117 && validate_arg (arg1, REAL_TYPE))
10118 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10119 &dconst0, false);
10120 break;
10121
10122 CASE_FLT_FN (BUILT_IN_DREM):
10123 CASE_FLT_FN (BUILT_IN_REMAINDER):
10124 if (validate_arg (arg0, REAL_TYPE)
10125 && validate_arg (arg1, REAL_TYPE))
10126 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10127 break;
10128
10129 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10130 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10131 if (validate_arg (arg0, REAL_TYPE)
10132 && validate_arg (arg1, POINTER_TYPE))
10133 return do_mpfr_lgamma_r (arg0, arg1, type);
10134 break;
10135
10136 CASE_FLT_FN (BUILT_IN_ATAN2):
10137 if (validate_arg (arg0, REAL_TYPE)
10138 && validate_arg (arg1, REAL_TYPE))
10139 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10140 break;
10141
10142 CASE_FLT_FN (BUILT_IN_FDIM):
10143 if (validate_arg (arg0, REAL_TYPE)
10144 && validate_arg (arg1, REAL_TYPE))
10145 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10146 break;
10147
10148 CASE_FLT_FN (BUILT_IN_HYPOT):
10149 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10150
10151 CASE_FLT_FN (BUILT_IN_CPOW):
10152 if (validate_arg (arg0, COMPLEX_TYPE)
10153 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10154 && validate_arg (arg1, COMPLEX_TYPE)
10155 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10156 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10157 break;
10158
10159 CASE_FLT_FN (BUILT_IN_LDEXP):
10160 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10161 CASE_FLT_FN (BUILT_IN_SCALBN):
10162 CASE_FLT_FN (BUILT_IN_SCALBLN):
10163 return fold_builtin_load_exponent (loc, arg0, arg1,
10164 type, /*ldexp=*/false);
10165
10166 CASE_FLT_FN (BUILT_IN_FREXP):
10167 return fold_builtin_frexp (loc, arg0, arg1, type);
10168
10169 CASE_FLT_FN (BUILT_IN_MODF):
10170 return fold_builtin_modf (loc, arg0, arg1, type);
10171
10172 case BUILT_IN_STRSTR:
10173 return fold_builtin_strstr (loc, arg0, arg1, type);
10174
10175 case BUILT_IN_STRSPN:
10176 return fold_builtin_strspn (loc, arg0, arg1);
10177
10178 case BUILT_IN_STRCSPN:
10179 return fold_builtin_strcspn (loc, arg0, arg1);
10180
10181 case BUILT_IN_STRCHR:
10182 case BUILT_IN_INDEX:
10183 return fold_builtin_strchr (loc, arg0, arg1, type);
10184
10185 case BUILT_IN_STRRCHR:
10186 case BUILT_IN_RINDEX:
10187 return fold_builtin_strrchr (loc, arg0, arg1, type);
10188
10189 case BUILT_IN_STPCPY:
10190 if (ignore)
10191 {
10192 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10193 if (!fn)
10194 break;
10195
10196 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10197 }
10198 else
10199 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10200 break;
10201
10202 case BUILT_IN_STRCMP:
10203 return fold_builtin_strcmp (loc, arg0, arg1);
10204
10205 case BUILT_IN_STRPBRK:
10206 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10207
10208 case BUILT_IN_EXPECT:
10209 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10210
10211 CASE_FLT_FN (BUILT_IN_POW):
10212 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10213
10214 CASE_FLT_FN (BUILT_IN_POWI):
10215 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10216
10217 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10218 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10219
10220 CASE_FLT_FN (BUILT_IN_FMIN):
10221 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10222
10223 CASE_FLT_FN (BUILT_IN_FMAX):
10224 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10225
10226 case BUILT_IN_ISGREATER:
10227 return fold_builtin_unordered_cmp (loc, fndecl,
10228 arg0, arg1, UNLE_EXPR, LE_EXPR);
10229 case BUILT_IN_ISGREATEREQUAL:
10230 return fold_builtin_unordered_cmp (loc, fndecl,
10231 arg0, arg1, UNLT_EXPR, LT_EXPR);
10232 case BUILT_IN_ISLESS:
10233 return fold_builtin_unordered_cmp (loc, fndecl,
10234 arg0, arg1, UNGE_EXPR, GE_EXPR);
10235 case BUILT_IN_ISLESSEQUAL:
10236 return fold_builtin_unordered_cmp (loc, fndecl,
10237 arg0, arg1, UNGT_EXPR, GT_EXPR);
10238 case BUILT_IN_ISLESSGREATER:
10239 return fold_builtin_unordered_cmp (loc, fndecl,
10240 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10241 case BUILT_IN_ISUNORDERED:
10242 return fold_builtin_unordered_cmp (loc, fndecl,
10243 arg0, arg1, UNORDERED_EXPR,
10244 NOP_EXPR);
10245
10246 /* We do the folding for va_start in the expander. */
10247 case BUILT_IN_VA_START:
10248 break;
10249
10250 case BUILT_IN_OBJECT_SIZE:
10251 return fold_builtin_object_size (arg0, arg1);
10252
10253 case BUILT_IN_PRINTF:
10254 case BUILT_IN_PRINTF_UNLOCKED:
10255 case BUILT_IN_VPRINTF:
10256 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10257
10258 case BUILT_IN_PRINTF_CHK:
10259 case BUILT_IN_VPRINTF_CHK:
10260 if (!validate_arg (arg0, INTEGER_TYPE)
10261 || TREE_SIDE_EFFECTS (arg0))
10262 return NULL_TREE;
10263 else
10264 return fold_builtin_printf (loc, fndecl,
10265 arg1, NULL_TREE, ignore, fcode);
10266 break;
10267
10268 case BUILT_IN_FPRINTF:
10269 case BUILT_IN_FPRINTF_UNLOCKED:
10270 case BUILT_IN_VFPRINTF:
10271 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10272 ignore, fcode);
10273
10274 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10275 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10276
10277 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10278 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10279
10280 default:
10281 break;
10282 }
10283 return NULL_TREE;
10284 }
10285
10286 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10287 and ARG2. IGNORE is true if the result of the function call is ignored.
10288 This function returns NULL_TREE if no simplification was possible. */
10289
10290 static tree
10291 fold_builtin_3 (location_t loc, tree fndecl,
10292 tree arg0, tree arg1, tree arg2, bool ignore)
10293 {
10294 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10295 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10296 switch (fcode)
10297 {
10298
10299 CASE_FLT_FN (BUILT_IN_SINCOS):
10300 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10301
10302 CASE_FLT_FN (BUILT_IN_FMA):
10303 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10304 break;
10305
10306 CASE_FLT_FN (BUILT_IN_REMQUO):
10307 if (validate_arg (arg0, REAL_TYPE)
10308 && validate_arg (arg1, REAL_TYPE)
10309 && validate_arg (arg2, POINTER_TYPE))
10310 return do_mpfr_remquo (arg0, arg1, arg2);
10311 break;
10312
10313 case BUILT_IN_STRNCAT:
10314 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10315
10316 case BUILT_IN_STRNCMP:
10317 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10318
10319 case BUILT_IN_MEMCHR:
10320 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10321
10322 case BUILT_IN_BCMP:
10323 case BUILT_IN_MEMCMP:
10324 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10325
10326 case BUILT_IN_PRINTF_CHK:
10327 case BUILT_IN_VPRINTF_CHK:
10328 if (!validate_arg (arg0, INTEGER_TYPE)
10329 || TREE_SIDE_EFFECTS (arg0))
10330 return NULL_TREE;
10331 else
10332 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10333 break;
10334
10335 case BUILT_IN_FPRINTF:
10336 case BUILT_IN_FPRINTF_UNLOCKED:
10337 case BUILT_IN_VFPRINTF:
10338 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10339 ignore, fcode);
10340
10341 case BUILT_IN_FPRINTF_CHK:
10342 case BUILT_IN_VFPRINTF_CHK:
10343 if (!validate_arg (arg1, INTEGER_TYPE)
10344 || TREE_SIDE_EFFECTS (arg1))
10345 return NULL_TREE;
10346 else
10347 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10348 ignore, fcode);
10349
10350 case BUILT_IN_EXPECT:
10351 return fold_builtin_expect (loc, arg0, arg1, arg2);
10352
10353 default:
10354 break;
10355 }
10356 return NULL_TREE;
10357 }
10358
10359 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10360 ARG2, and ARG3. IGNORE is true if the result of the function call is
10361 ignored. This function returns NULL_TREE if no simplification was
10362 possible. */
10363
10364 static tree
10365 fold_builtin_4 (location_t loc, tree fndecl,
10366 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10367 {
10368 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10369
10370 switch (fcode)
10371 {
10372 case BUILT_IN_FPRINTF_CHK:
10373 case BUILT_IN_VFPRINTF_CHK:
10374 if (!validate_arg (arg1, INTEGER_TYPE)
10375 || TREE_SIDE_EFFECTS (arg1))
10376 return NULL_TREE;
10377 else
10378 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10379 ignore, fcode);
10380 break;
10381
10382 default:
10383 break;
10384 }
10385 return NULL_TREE;
10386 }
10387
10388 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10389 arguments, where NARGS <= 4. IGNORE is true if the result of the
10390 function call is ignored. This function returns NULL_TREE if no
10391 simplification was possible. Note that this only folds builtins with
10392 fixed argument patterns. Foldings that do varargs-to-varargs
10393 transformations, or that match calls with more than 4 arguments,
10394 need to be handled with fold_builtin_varargs instead. */
10395
10396 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10397
10398 tree
10399 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10400 {
10401 tree ret = NULL_TREE;
10402
10403 switch (nargs)
10404 {
10405 case 0:
10406 ret = fold_builtin_0 (loc, fndecl, ignore);
10407 break;
10408 case 1:
10409 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10410 break;
10411 case 2:
10412 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10413 break;
10414 case 3:
10415 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10416 break;
10417 case 4:
10418 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10419 ignore);
10420 break;
10421 default:
10422 break;
10423 }
10424 if (ret)
10425 {
10426 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10427 SET_EXPR_LOCATION (ret, loc);
10428 TREE_NO_WARNING (ret) = 1;
10429 return ret;
10430 }
10431 return NULL_TREE;
10432 }
10433
10434 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10435 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10436 of arguments in ARGS to be omitted. OLDNARGS is the number of
10437 elements in ARGS. */
10438
10439 static tree
10440 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10441 int skip, tree fndecl, int n, va_list newargs)
10442 {
10443 int nargs = oldnargs - skip + n;
10444 tree *buffer;
10445
10446 if (n > 0)
10447 {
10448 int i, j;
10449
10450 buffer = XALLOCAVEC (tree, nargs);
10451 for (i = 0; i < n; i++)
10452 buffer[i] = va_arg (newargs, tree);
10453 for (j = skip; j < oldnargs; j++, i++)
10454 buffer[i] = args[j];
10455 }
10456 else
10457 buffer = args + skip;
10458
10459 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10460 }
10461
10462 /* Return true if FNDECL shouldn't be folded right now.
10463 If a built-in function has an inline attribute always_inline
10464 wrapper, defer folding it after always_inline functions have
10465 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10466 might not be performed. */
10467
10468 bool
10469 avoid_folding_inline_builtin (tree fndecl)
10470 {
10471 return (DECL_DECLARED_INLINE_P (fndecl)
10472 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10473 && cfun
10474 && !cfun->always_inline_functions_inlined
10475 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10476 }
10477
10478 /* A wrapper function for builtin folding that prevents warnings for
10479 "statement without effect" and the like, caused by removing the
10480 call node earlier than the warning is generated. */
10481
10482 tree
10483 fold_call_expr (location_t loc, tree exp, bool ignore)
10484 {
10485 tree ret = NULL_TREE;
10486 tree fndecl = get_callee_fndecl (exp);
10487 if (fndecl
10488 && TREE_CODE (fndecl) == FUNCTION_DECL
10489 && DECL_BUILT_IN (fndecl)
10490 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10491 yet. Defer folding until we see all the arguments
10492 (after inlining). */
10493 && !CALL_EXPR_VA_ARG_PACK (exp))
10494 {
10495 int nargs = call_expr_nargs (exp);
10496
10497 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10498 instead last argument is __builtin_va_arg_pack (). Defer folding
10499 even in that case, until arguments are finalized. */
10500 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10501 {
10502 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10503 if (fndecl2
10504 && TREE_CODE (fndecl2) == FUNCTION_DECL
10505 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10506 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10507 return NULL_TREE;
10508 }
10509
10510 if (avoid_folding_inline_builtin (fndecl))
10511 return NULL_TREE;
10512
10513 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10514 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10515 CALL_EXPR_ARGP (exp), ignore);
10516 else
10517 {
10518 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10519 {
10520 tree *args = CALL_EXPR_ARGP (exp);
10521 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10522 }
10523 if (!ret)
10524 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10525 if (ret)
10526 return ret;
10527 }
10528 }
10529 return NULL_TREE;
10530 }
10531
10532 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10533 N arguments are passed in the array ARGARRAY. */
10534
10535 tree
10536 fold_builtin_call_array (location_t loc, tree type,
10537 tree fn,
10538 int n,
10539 tree *argarray)
10540 {
10541 tree ret = NULL_TREE;
10542 tree exp;
10543
10544 if (TREE_CODE (fn) == ADDR_EXPR)
10545 {
10546 tree fndecl = TREE_OPERAND (fn, 0);
10547 if (TREE_CODE (fndecl) == FUNCTION_DECL
10548 && DECL_BUILT_IN (fndecl))
10549 {
10550 /* If last argument is __builtin_va_arg_pack (), arguments to this
10551 function are not finalized yet. Defer folding until they are. */
10552 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10553 {
10554 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10555 if (fndecl2
10556 && TREE_CODE (fndecl2) == FUNCTION_DECL
10557 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10558 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10559 return build_call_array_loc (loc, type, fn, n, argarray);
10560 }
10561 if (avoid_folding_inline_builtin (fndecl))
10562 return build_call_array_loc (loc, type, fn, n, argarray);
10563 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10564 {
10565 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10566 if (ret)
10567 return ret;
10568
10569 return build_call_array_loc (loc, type, fn, n, argarray);
10570 }
10571 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10572 {
10573 /* First try the transformations that don't require consing up
10574 an exp. */
10575 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10576 if (ret)
10577 return ret;
10578 }
10579
10580 /* If we got this far, we need to build an exp. */
10581 exp = build_call_array_loc (loc, type, fn, n, argarray);
10582 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10583 return ret ? ret : exp;
10584 }
10585 }
10586
10587 return build_call_array_loc (loc, type, fn, n, argarray);
10588 }
10589
10590 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10591 along with N new arguments specified as the "..." parameters. SKIP
10592 is the number of arguments in EXP to be omitted. This function is used
10593 to do varargs-to-varargs transformations. */
10594
10595 static tree
10596 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10597 {
10598 va_list ap;
10599 tree t;
10600
10601 va_start (ap, n);
10602 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10603 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10604 va_end (ap);
10605
10606 return t;
10607 }
10608
10609 /* Validate a single argument ARG against a tree code CODE representing
10610 a type. */
10611
10612 static bool
10613 validate_arg (const_tree arg, enum tree_code code)
10614 {
10615 if (!arg)
10616 return false;
10617 else if (code == POINTER_TYPE)
10618 return POINTER_TYPE_P (TREE_TYPE (arg));
10619 else if (code == INTEGER_TYPE)
10620 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10621 return code == TREE_CODE (TREE_TYPE (arg));
10622 }
10623
10624 /* This function validates the types of a function call argument list
10625 against a specified list of tree_codes. If the last specifier is a 0,
10626 that represents an ellipses, otherwise the last specifier must be a
10627 VOID_TYPE.
10628
10629 This is the GIMPLE version of validate_arglist. Eventually we want to
10630 completely convert builtins.c to work from GIMPLEs and the tree based
10631 validate_arglist will then be removed. */
10632
10633 bool
10634 validate_gimple_arglist (const_gimple call, ...)
10635 {
10636 enum tree_code code;
10637 bool res = 0;
10638 va_list ap;
10639 const_tree arg;
10640 size_t i;
10641
10642 va_start (ap, call);
10643 i = 0;
10644
10645 do
10646 {
10647 code = (enum tree_code) va_arg (ap, int);
10648 switch (code)
10649 {
10650 case 0:
10651 /* This signifies an ellipses, any further arguments are all ok. */
10652 res = true;
10653 goto end;
10654 case VOID_TYPE:
10655 /* This signifies an endlink, if no arguments remain, return
10656 true, otherwise return false. */
10657 res = (i == gimple_call_num_args (call));
10658 goto end;
10659 default:
10660 /* If no parameters remain or the parameter's code does not
10661 match the specified code, return false. Otherwise continue
10662 checking any remaining arguments. */
10663 arg = gimple_call_arg (call, i++);
10664 if (!validate_arg (arg, code))
10665 goto end;
10666 break;
10667 }
10668 }
10669 while (1);
10670
10671 /* We need gotos here since we can only have one VA_CLOSE in a
10672 function. */
10673 end: ;
10674 va_end (ap);
10675
10676 return res;
10677 }
10678
10679 /* Default target-specific builtin expander that does nothing. */
10680
10681 rtx
10682 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10683 rtx target ATTRIBUTE_UNUSED,
10684 rtx subtarget ATTRIBUTE_UNUSED,
10685 enum machine_mode mode ATTRIBUTE_UNUSED,
10686 int ignore ATTRIBUTE_UNUSED)
10687 {
10688 return NULL_RTX;
10689 }
10690
10691 /* Returns true is EXP represents data that would potentially reside
10692 in a readonly section. */
10693
10694 bool
10695 readonly_data_expr (tree exp)
10696 {
10697 STRIP_NOPS (exp);
10698
10699 if (TREE_CODE (exp) != ADDR_EXPR)
10700 return false;
10701
10702 exp = get_base_address (TREE_OPERAND (exp, 0));
10703 if (!exp)
10704 return false;
10705
10706 /* Make sure we call decl_readonly_section only for trees it
10707 can handle (since it returns true for everything it doesn't
10708 understand). */
10709 if (TREE_CODE (exp) == STRING_CST
10710 || TREE_CODE (exp) == CONSTRUCTOR
10711 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10712 return decl_readonly_section (exp, 0);
10713 else
10714 return false;
10715 }
10716
10717 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10718 to the call, and TYPE is its return type.
10719
10720 Return NULL_TREE if no simplification was possible, otherwise return the
10721 simplified form of the call as a tree.
10722
10723 The simplified form may be a constant or other expression which
10724 computes the same value, but in a more efficient manner (including
10725 calls to other builtin functions).
10726
10727 The call may contain arguments which need to be evaluated, but
10728 which are not useful to determine the result of the call. In
10729 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10730 COMPOUND_EXPR will be an argument which must be evaluated.
10731 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10732 COMPOUND_EXPR in the chain will contain the tree for the simplified
10733 form of the builtin function call. */
10734
10735 static tree
10736 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10737 {
10738 if (!validate_arg (s1, POINTER_TYPE)
10739 || !validate_arg (s2, POINTER_TYPE))
10740 return NULL_TREE;
10741 else
10742 {
10743 tree fn;
10744 const char *p1, *p2;
10745
10746 p2 = c_getstr (s2);
10747 if (p2 == NULL)
10748 return NULL_TREE;
10749
10750 p1 = c_getstr (s1);
10751 if (p1 != NULL)
10752 {
10753 const char *r = strstr (p1, p2);
10754 tree tem;
10755
10756 if (r == NULL)
10757 return build_int_cst (TREE_TYPE (s1), 0);
10758
10759 /* Return an offset into the constant string argument. */
10760 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10761 return fold_convert_loc (loc, type, tem);
10762 }
10763
10764 /* The argument is const char *, and the result is char *, so we need
10765 a type conversion here to avoid a warning. */
10766 if (p2[0] == '\0')
10767 return fold_convert_loc (loc, type, s1);
10768
10769 if (p2[1] != '\0')
10770 return NULL_TREE;
10771
10772 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10773 if (!fn)
10774 return NULL_TREE;
10775
10776 /* New argument list transforming strstr(s1, s2) to
10777 strchr(s1, s2[0]). */
10778 return build_call_expr_loc (loc, fn, 2, s1,
10779 build_int_cst (integer_type_node, p2[0]));
10780 }
10781 }
10782
10783 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10784 the call, and TYPE is its return type.
10785
10786 Return NULL_TREE if no simplification was possible, otherwise return the
10787 simplified form of the call as a tree.
10788
10789 The simplified form may be a constant or other expression which
10790 computes the same value, but in a more efficient manner (including
10791 calls to other builtin functions).
10792
10793 The call may contain arguments which need to be evaluated, but
10794 which are not useful to determine the result of the call. In
10795 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10796 COMPOUND_EXPR will be an argument which must be evaluated.
10797 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10798 COMPOUND_EXPR in the chain will contain the tree for the simplified
10799 form of the builtin function call. */
10800
10801 static tree
10802 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10803 {
10804 if (!validate_arg (s1, POINTER_TYPE)
10805 || !validate_arg (s2, INTEGER_TYPE))
10806 return NULL_TREE;
10807 else
10808 {
10809 const char *p1;
10810
10811 if (TREE_CODE (s2) != INTEGER_CST)
10812 return NULL_TREE;
10813
10814 p1 = c_getstr (s1);
10815 if (p1 != NULL)
10816 {
10817 char c;
10818 const char *r;
10819 tree tem;
10820
10821 if (target_char_cast (s2, &c))
10822 return NULL_TREE;
10823
10824 r = strchr (p1, c);
10825
10826 if (r == NULL)
10827 return build_int_cst (TREE_TYPE (s1), 0);
10828
10829 /* Return an offset into the constant string argument. */
10830 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10831 return fold_convert_loc (loc, type, tem);
10832 }
10833 return NULL_TREE;
10834 }
10835 }
10836
10837 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10838 the call, and TYPE is its return type.
10839
10840 Return NULL_TREE if no simplification was possible, otherwise return the
10841 simplified form of the call as a tree.
10842
10843 The simplified form may be a constant or other expression which
10844 computes the same value, but in a more efficient manner (including
10845 calls to other builtin functions).
10846
10847 The call may contain arguments which need to be evaluated, but
10848 which are not useful to determine the result of the call. In
10849 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10850 COMPOUND_EXPR will be an argument which must be evaluated.
10851 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10852 COMPOUND_EXPR in the chain will contain the tree for the simplified
10853 form of the builtin function call. */
10854
10855 static tree
10856 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10857 {
10858 if (!validate_arg (s1, POINTER_TYPE)
10859 || !validate_arg (s2, INTEGER_TYPE))
10860 return NULL_TREE;
10861 else
10862 {
10863 tree fn;
10864 const char *p1;
10865
10866 if (TREE_CODE (s2) != INTEGER_CST)
10867 return NULL_TREE;
10868
10869 p1 = c_getstr (s1);
10870 if (p1 != NULL)
10871 {
10872 char c;
10873 const char *r;
10874 tree tem;
10875
10876 if (target_char_cast (s2, &c))
10877 return NULL_TREE;
10878
10879 r = strrchr (p1, c);
10880
10881 if (r == NULL)
10882 return build_int_cst (TREE_TYPE (s1), 0);
10883
10884 /* Return an offset into the constant string argument. */
10885 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10886 return fold_convert_loc (loc, type, tem);
10887 }
10888
10889 if (! integer_zerop (s2))
10890 return NULL_TREE;
10891
10892 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10893 if (!fn)
10894 return NULL_TREE;
10895
10896 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10897 return build_call_expr_loc (loc, fn, 2, s1, s2);
10898 }
10899 }
10900
10901 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10902 to the call, and TYPE is its return type.
10903
10904 Return NULL_TREE if no simplification was possible, otherwise return the
10905 simplified form of the call as a tree.
10906
10907 The simplified form may be a constant or other expression which
10908 computes the same value, but in a more efficient manner (including
10909 calls to other builtin functions).
10910
10911 The call may contain arguments which need to be evaluated, but
10912 which are not useful to determine the result of the call. In
10913 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10914 COMPOUND_EXPR will be an argument which must be evaluated.
10915 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10916 COMPOUND_EXPR in the chain will contain the tree for the simplified
10917 form of the builtin function call. */
10918
10919 static tree
10920 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10921 {
10922 if (!validate_arg (s1, POINTER_TYPE)
10923 || !validate_arg (s2, POINTER_TYPE))
10924 return NULL_TREE;
10925 else
10926 {
10927 tree fn;
10928 const char *p1, *p2;
10929
10930 p2 = c_getstr (s2);
10931 if (p2 == NULL)
10932 return NULL_TREE;
10933
10934 p1 = c_getstr (s1);
10935 if (p1 != NULL)
10936 {
10937 const char *r = strpbrk (p1, p2);
10938 tree tem;
10939
10940 if (r == NULL)
10941 return build_int_cst (TREE_TYPE (s1), 0);
10942
10943 /* Return an offset into the constant string argument. */
10944 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10945 return fold_convert_loc (loc, type, tem);
10946 }
10947
10948 if (p2[0] == '\0')
10949 /* strpbrk(x, "") == NULL.
10950 Evaluate and ignore s1 in case it had side-effects. */
10951 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
10952
10953 if (p2[1] != '\0')
10954 return NULL_TREE; /* Really call strpbrk. */
10955
10956 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10957 if (!fn)
10958 return NULL_TREE;
10959
10960 /* New argument list transforming strpbrk(s1, s2) to
10961 strchr(s1, s2[0]). */
10962 return build_call_expr_loc (loc, fn, 2, s1,
10963 build_int_cst (integer_type_node, p2[0]));
10964 }
10965 }
10966
10967 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
10968 arguments to the call.
10969
10970 Return NULL_TREE if no simplification was possible, otherwise return the
10971 simplified form of the call as a tree.
10972
10973 The simplified form may be a constant or other expression which
10974 computes the same value, but in a more efficient manner (including
10975 calls to other builtin functions).
10976
10977 The call may contain arguments which need to be evaluated, but
10978 which are not useful to determine the result of the call. In
10979 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10980 COMPOUND_EXPR will be an argument which must be evaluated.
10981 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10982 COMPOUND_EXPR in the chain will contain the tree for the simplified
10983 form of the builtin function call. */
10984
10985 static tree
10986 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
10987 {
10988 if (!validate_arg (dst, POINTER_TYPE)
10989 || !validate_arg (src, POINTER_TYPE)
10990 || !validate_arg (len, INTEGER_TYPE))
10991 return NULL_TREE;
10992 else
10993 {
10994 const char *p = c_getstr (src);
10995
10996 /* If the requested length is zero, or the src parameter string
10997 length is zero, return the dst parameter. */
10998 if (integer_zerop (len) || (p && *p == '\0'))
10999 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11000
11001 /* If the requested len is greater than or equal to the string
11002 length, call strcat. */
11003 if (TREE_CODE (len) == INTEGER_CST && p
11004 && compare_tree_int (len, strlen (p)) >= 0)
11005 {
11006 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11007
11008 /* If the replacement _DECL isn't initialized, don't do the
11009 transformation. */
11010 if (!fn)
11011 return NULL_TREE;
11012
11013 return build_call_expr_loc (loc, fn, 2, dst, src);
11014 }
11015 return NULL_TREE;
11016 }
11017 }
11018
11019 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11020 to the call.
11021
11022 Return NULL_TREE if no simplification was possible, otherwise return the
11023 simplified form of the call as a tree.
11024
11025 The simplified form may be a constant or other expression which
11026 computes the same value, but in a more efficient manner (including
11027 calls to other builtin functions).
11028
11029 The call may contain arguments which need to be evaluated, but
11030 which are not useful to determine the result of the call. In
11031 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11032 COMPOUND_EXPR will be an argument which must be evaluated.
11033 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11034 COMPOUND_EXPR in the chain will contain the tree for the simplified
11035 form of the builtin function call. */
11036
11037 static tree
11038 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11039 {
11040 if (!validate_arg (s1, POINTER_TYPE)
11041 || !validate_arg (s2, POINTER_TYPE))
11042 return NULL_TREE;
11043 else
11044 {
11045 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11046
11047 /* If both arguments are constants, evaluate at compile-time. */
11048 if (p1 && p2)
11049 {
11050 const size_t r = strspn (p1, p2);
11051 return build_int_cst (size_type_node, r);
11052 }
11053
11054 /* If either argument is "", return NULL_TREE. */
11055 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11056 /* Evaluate and ignore both arguments in case either one has
11057 side-effects. */
11058 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11059 s1, s2);
11060 return NULL_TREE;
11061 }
11062 }
11063
11064 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11065 to the call.
11066
11067 Return NULL_TREE if no simplification was possible, otherwise return the
11068 simplified form of the call as a tree.
11069
11070 The simplified form may be a constant or other expression which
11071 computes the same value, but in a more efficient manner (including
11072 calls to other builtin functions).
11073
11074 The call may contain arguments which need to be evaluated, but
11075 which are not useful to determine the result of the call. In
11076 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11077 COMPOUND_EXPR will be an argument which must be evaluated.
11078 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11079 COMPOUND_EXPR in the chain will contain the tree for the simplified
11080 form of the builtin function call. */
11081
11082 static tree
11083 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11084 {
11085 if (!validate_arg (s1, POINTER_TYPE)
11086 || !validate_arg (s2, POINTER_TYPE))
11087 return NULL_TREE;
11088 else
11089 {
11090 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11091
11092 /* If both arguments are constants, evaluate at compile-time. */
11093 if (p1 && p2)
11094 {
11095 const size_t r = strcspn (p1, p2);
11096 return build_int_cst (size_type_node, r);
11097 }
11098
11099 /* If the first argument is "", return NULL_TREE. */
11100 if (p1 && *p1 == '\0')
11101 {
11102 /* Evaluate and ignore argument s2 in case it has
11103 side-effects. */
11104 return omit_one_operand_loc (loc, size_type_node,
11105 size_zero_node, s2);
11106 }
11107
11108 /* If the second argument is "", return __builtin_strlen(s1). */
11109 if (p2 && *p2 == '\0')
11110 {
11111 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11112
11113 /* If the replacement _DECL isn't initialized, don't do the
11114 transformation. */
11115 if (!fn)
11116 return NULL_TREE;
11117
11118 return build_call_expr_loc (loc, fn, 1, s1);
11119 }
11120 return NULL_TREE;
11121 }
11122 }
11123
11124 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11125 produced. False otherwise. This is done so that we don't output the error
11126 or warning twice or three times. */
11127
11128 bool
11129 fold_builtin_next_arg (tree exp, bool va_start_p)
11130 {
11131 tree fntype = TREE_TYPE (current_function_decl);
11132 int nargs = call_expr_nargs (exp);
11133 tree arg;
11134 /* There is good chance the current input_location points inside the
11135 definition of the va_start macro (perhaps on the token for
11136 builtin) in a system header, so warnings will not be emitted.
11137 Use the location in real source code. */
11138 source_location current_location =
11139 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11140 NULL);
11141
11142 if (!stdarg_p (fntype))
11143 {
11144 error ("%<va_start%> used in function with fixed args");
11145 return true;
11146 }
11147
11148 if (va_start_p)
11149 {
11150 if (va_start_p && (nargs != 2))
11151 {
11152 error ("wrong number of arguments to function %<va_start%>");
11153 return true;
11154 }
11155 arg = CALL_EXPR_ARG (exp, 1);
11156 }
11157 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11158 when we checked the arguments and if needed issued a warning. */
11159 else
11160 {
11161 if (nargs == 0)
11162 {
11163 /* Evidently an out of date version of <stdarg.h>; can't validate
11164 va_start's second argument, but can still work as intended. */
11165 warning_at (current_location,
11166 OPT_Wvarargs,
11167 "%<__builtin_next_arg%> called without an argument");
11168 return true;
11169 }
11170 else if (nargs > 1)
11171 {
11172 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11173 return true;
11174 }
11175 arg = CALL_EXPR_ARG (exp, 0);
11176 }
11177
11178 if (TREE_CODE (arg) == SSA_NAME)
11179 arg = SSA_NAME_VAR (arg);
11180
11181 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11182 or __builtin_next_arg (0) the first time we see it, after checking
11183 the arguments and if needed issuing a warning. */
11184 if (!integer_zerop (arg))
11185 {
11186 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11187
11188 /* Strip off all nops for the sake of the comparison. This
11189 is not quite the same as STRIP_NOPS. It does more.
11190 We must also strip off INDIRECT_EXPR for C++ reference
11191 parameters. */
11192 while (CONVERT_EXPR_P (arg)
11193 || TREE_CODE (arg) == INDIRECT_REF)
11194 arg = TREE_OPERAND (arg, 0);
11195 if (arg != last_parm)
11196 {
11197 /* FIXME: Sometimes with the tree optimizers we can get the
11198 not the last argument even though the user used the last
11199 argument. We just warn and set the arg to be the last
11200 argument so that we will get wrong-code because of
11201 it. */
11202 warning_at (current_location,
11203 OPT_Wvarargs,
11204 "second parameter of %<va_start%> not last named argument");
11205 }
11206
11207 /* Undefined by C99 7.15.1.4p4 (va_start):
11208 "If the parameter parmN is declared with the register storage
11209 class, with a function or array type, or with a type that is
11210 not compatible with the type that results after application of
11211 the default argument promotions, the behavior is undefined."
11212 */
11213 else if (DECL_REGISTER (arg))
11214 {
11215 warning_at (current_location,
11216 OPT_Wvarargs,
11217 "undefined behaviour when second parameter of "
11218 "%<va_start%> is declared with %<register%> storage");
11219 }
11220
11221 /* We want to verify the second parameter just once before the tree
11222 optimizers are run and then avoid keeping it in the tree,
11223 as otherwise we could warn even for correct code like:
11224 void foo (int i, ...)
11225 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11226 if (va_start_p)
11227 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11228 else
11229 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11230 }
11231 return false;
11232 }
11233
11234
11235 /* Expand a call EXP to __builtin_object_size. */
11236
11237 static rtx
11238 expand_builtin_object_size (tree exp)
11239 {
11240 tree ost;
11241 int object_size_type;
11242 tree fndecl = get_callee_fndecl (exp);
11243
11244 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11245 {
11246 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11247 exp, fndecl);
11248 expand_builtin_trap ();
11249 return const0_rtx;
11250 }
11251
11252 ost = CALL_EXPR_ARG (exp, 1);
11253 STRIP_NOPS (ost);
11254
11255 if (TREE_CODE (ost) != INTEGER_CST
11256 || tree_int_cst_sgn (ost) < 0
11257 || compare_tree_int (ost, 3) > 0)
11258 {
11259 error ("%Klast argument of %D is not integer constant between 0 and 3",
11260 exp, fndecl);
11261 expand_builtin_trap ();
11262 return const0_rtx;
11263 }
11264
11265 object_size_type = tree_to_shwi (ost);
11266
11267 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11268 }
11269
11270 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11271 FCODE is the BUILT_IN_* to use.
11272 Return NULL_RTX if we failed; the caller should emit a normal call,
11273 otherwise try to get the result in TARGET, if convenient (and in
11274 mode MODE if that's convenient). */
11275
11276 static rtx
11277 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11278 enum built_in_function fcode)
11279 {
11280 tree dest, src, len, size;
11281
11282 if (!validate_arglist (exp,
11283 POINTER_TYPE,
11284 fcode == BUILT_IN_MEMSET_CHK
11285 ? INTEGER_TYPE : POINTER_TYPE,
11286 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11287 return NULL_RTX;
11288
11289 dest = CALL_EXPR_ARG (exp, 0);
11290 src = CALL_EXPR_ARG (exp, 1);
11291 len = CALL_EXPR_ARG (exp, 2);
11292 size = CALL_EXPR_ARG (exp, 3);
11293
11294 if (! tree_fits_uhwi_p (size))
11295 return NULL_RTX;
11296
11297 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11298 {
11299 tree fn;
11300
11301 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11302 {
11303 warning_at (tree_nonartificial_location (exp),
11304 0, "%Kcall to %D will always overflow destination buffer",
11305 exp, get_callee_fndecl (exp));
11306 return NULL_RTX;
11307 }
11308
11309 fn = NULL_TREE;
11310 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11311 mem{cpy,pcpy,move,set} is available. */
11312 switch (fcode)
11313 {
11314 case BUILT_IN_MEMCPY_CHK:
11315 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11316 break;
11317 case BUILT_IN_MEMPCPY_CHK:
11318 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11319 break;
11320 case BUILT_IN_MEMMOVE_CHK:
11321 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11322 break;
11323 case BUILT_IN_MEMSET_CHK:
11324 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11325 break;
11326 default:
11327 break;
11328 }
11329
11330 if (! fn)
11331 return NULL_RTX;
11332
11333 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11334 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11335 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11336 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11337 }
11338 else if (fcode == BUILT_IN_MEMSET_CHK)
11339 return NULL_RTX;
11340 else
11341 {
11342 unsigned int dest_align = get_pointer_alignment (dest);
11343
11344 /* If DEST is not a pointer type, call the normal function. */
11345 if (dest_align == 0)
11346 return NULL_RTX;
11347
11348 /* If SRC and DEST are the same (and not volatile), do nothing. */
11349 if (operand_equal_p (src, dest, 0))
11350 {
11351 tree expr;
11352
11353 if (fcode != BUILT_IN_MEMPCPY_CHK)
11354 {
11355 /* Evaluate and ignore LEN in case it has side-effects. */
11356 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11357 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11358 }
11359
11360 expr = fold_build_pointer_plus (dest, len);
11361 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11362 }
11363
11364 /* __memmove_chk special case. */
11365 if (fcode == BUILT_IN_MEMMOVE_CHK)
11366 {
11367 unsigned int src_align = get_pointer_alignment (src);
11368
11369 if (src_align == 0)
11370 return NULL_RTX;
11371
11372 /* If src is categorized for a readonly section we can use
11373 normal __memcpy_chk. */
11374 if (readonly_data_expr (src))
11375 {
11376 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11377 if (!fn)
11378 return NULL_RTX;
11379 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11380 dest, src, len, size);
11381 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11382 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11383 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11384 }
11385 }
11386 return NULL_RTX;
11387 }
11388 }
11389
11390 /* Emit warning if a buffer overflow is detected at compile time. */
11391
11392 static void
11393 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11394 {
11395 int is_strlen = 0;
11396 tree len, size;
11397 location_t loc = tree_nonartificial_location (exp);
11398
11399 switch (fcode)
11400 {
11401 case BUILT_IN_STRCPY_CHK:
11402 case BUILT_IN_STPCPY_CHK:
11403 /* For __strcat_chk the warning will be emitted only if overflowing
11404 by at least strlen (dest) + 1 bytes. */
11405 case BUILT_IN_STRCAT_CHK:
11406 len = CALL_EXPR_ARG (exp, 1);
11407 size = CALL_EXPR_ARG (exp, 2);
11408 is_strlen = 1;
11409 break;
11410 case BUILT_IN_STRNCAT_CHK:
11411 case BUILT_IN_STRNCPY_CHK:
11412 case BUILT_IN_STPNCPY_CHK:
11413 len = CALL_EXPR_ARG (exp, 2);
11414 size = CALL_EXPR_ARG (exp, 3);
11415 break;
11416 case BUILT_IN_SNPRINTF_CHK:
11417 case BUILT_IN_VSNPRINTF_CHK:
11418 len = CALL_EXPR_ARG (exp, 1);
11419 size = CALL_EXPR_ARG (exp, 3);
11420 break;
11421 default:
11422 gcc_unreachable ();
11423 }
11424
11425 if (!len || !size)
11426 return;
11427
11428 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11429 return;
11430
11431 if (is_strlen)
11432 {
11433 len = c_strlen (len, 1);
11434 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11435 return;
11436 }
11437 else if (fcode == BUILT_IN_STRNCAT_CHK)
11438 {
11439 tree src = CALL_EXPR_ARG (exp, 1);
11440 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11441 return;
11442 src = c_strlen (src, 1);
11443 if (! src || ! tree_fits_uhwi_p (src))
11444 {
11445 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11446 exp, get_callee_fndecl (exp));
11447 return;
11448 }
11449 else if (tree_int_cst_lt (src, size))
11450 return;
11451 }
11452 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
11453 return;
11454
11455 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11456 exp, get_callee_fndecl (exp));
11457 }
11458
11459 /* Emit warning if a buffer overflow is detected at compile time
11460 in __sprintf_chk/__vsprintf_chk calls. */
11461
11462 static void
11463 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11464 {
11465 tree size, len, fmt;
11466 const char *fmt_str;
11467 int nargs = call_expr_nargs (exp);
11468
11469 /* Verify the required arguments in the original call. */
11470
11471 if (nargs < 4)
11472 return;
11473 size = CALL_EXPR_ARG (exp, 2);
11474 fmt = CALL_EXPR_ARG (exp, 3);
11475
11476 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11477 return;
11478
11479 /* Check whether the format is a literal string constant. */
11480 fmt_str = c_getstr (fmt);
11481 if (fmt_str == NULL)
11482 return;
11483
11484 if (!init_target_chars ())
11485 return;
11486
11487 /* If the format doesn't contain % args or %%, we know its size. */
11488 if (strchr (fmt_str, target_percent) == 0)
11489 len = build_int_cstu (size_type_node, strlen (fmt_str));
11490 /* If the format is "%s" and first ... argument is a string literal,
11491 we know it too. */
11492 else if (fcode == BUILT_IN_SPRINTF_CHK
11493 && strcmp (fmt_str, target_percent_s) == 0)
11494 {
11495 tree arg;
11496
11497 if (nargs < 5)
11498 return;
11499 arg = CALL_EXPR_ARG (exp, 4);
11500 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11501 return;
11502
11503 len = c_strlen (arg, 1);
11504 if (!len || ! tree_fits_uhwi_p (len))
11505 return;
11506 }
11507 else
11508 return;
11509
11510 if (! tree_int_cst_lt (len, size))
11511 warning_at (tree_nonartificial_location (exp),
11512 0, "%Kcall to %D will always overflow destination buffer",
11513 exp, get_callee_fndecl (exp));
11514 }
11515
11516 /* Emit warning if a free is called with address of a variable. */
11517
11518 static void
11519 maybe_emit_free_warning (tree exp)
11520 {
11521 tree arg = CALL_EXPR_ARG (exp, 0);
11522
11523 STRIP_NOPS (arg);
11524 if (TREE_CODE (arg) != ADDR_EXPR)
11525 return;
11526
11527 arg = get_base_address (TREE_OPERAND (arg, 0));
11528 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11529 return;
11530
11531 if (SSA_VAR_P (arg))
11532 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11533 "%Kattempt to free a non-heap object %qD", exp, arg);
11534 else
11535 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11536 "%Kattempt to free a non-heap object", exp);
11537 }
11538
11539 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11540 if possible. */
11541
11542 static tree
11543 fold_builtin_object_size (tree ptr, tree ost)
11544 {
11545 unsigned HOST_WIDE_INT bytes;
11546 int object_size_type;
11547
11548 if (!validate_arg (ptr, POINTER_TYPE)
11549 || !validate_arg (ost, INTEGER_TYPE))
11550 return NULL_TREE;
11551
11552 STRIP_NOPS (ost);
11553
11554 if (TREE_CODE (ost) != INTEGER_CST
11555 || tree_int_cst_sgn (ost) < 0
11556 || compare_tree_int (ost, 3) > 0)
11557 return NULL_TREE;
11558
11559 object_size_type = tree_to_shwi (ost);
11560
11561 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11562 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11563 and (size_t) 0 for types 2 and 3. */
11564 if (TREE_SIDE_EFFECTS (ptr))
11565 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11566
11567 if (TREE_CODE (ptr) == ADDR_EXPR)
11568 {
11569 bytes = compute_builtin_object_size (ptr, object_size_type);
11570 if (wi::fits_to_tree_p (bytes, size_type_node))
11571 return build_int_cstu (size_type_node, bytes);
11572 }
11573 else if (TREE_CODE (ptr) == SSA_NAME)
11574 {
11575 /* If object size is not known yet, delay folding until
11576 later. Maybe subsequent passes will help determining
11577 it. */
11578 bytes = compute_builtin_object_size (ptr, object_size_type);
11579 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11580 && wi::fits_to_tree_p (bytes, size_type_node))
11581 return build_int_cstu (size_type_node, bytes);
11582 }
11583
11584 return NULL_TREE;
11585 }
11586
11587 /* Builtins with folding operations that operate on "..." arguments
11588 need special handling; we need to store the arguments in a convenient
11589 data structure before attempting any folding. Fortunately there are
11590 only a few builtins that fall into this category. FNDECL is the
11591 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11592 result of the function call is ignored. */
11593
11594 static tree
11595 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
11596 bool ignore ATTRIBUTE_UNUSED)
11597 {
11598 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11599 tree ret = NULL_TREE;
11600
11601 switch (fcode)
11602 {
11603 case BUILT_IN_FPCLASSIFY:
11604 ret = fold_builtin_fpclassify (loc, exp);
11605 break;
11606
11607 default:
11608 break;
11609 }
11610 if (ret)
11611 {
11612 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11613 SET_EXPR_LOCATION (ret, loc);
11614 TREE_NO_WARNING (ret) = 1;
11615 return ret;
11616 }
11617 return NULL_TREE;
11618 }
11619
11620 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
11621 FMT and ARG are the arguments to the call; we don't fold cases with
11622 more than 2 arguments, and ARG may be null if this is a 1-argument case.
11623
11624 Return NULL_TREE if no simplification was possible, otherwise return the
11625 simplified form of the call as a tree. FCODE is the BUILT_IN_*
11626 code of the function to be simplified. */
11627
11628 static tree
11629 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
11630 tree arg, bool ignore,
11631 enum built_in_function fcode)
11632 {
11633 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
11634 const char *fmt_str = NULL;
11635
11636 /* If the return value is used, don't do the transformation. */
11637 if (! ignore)
11638 return NULL_TREE;
11639
11640 /* Verify the required arguments in the original call. */
11641 if (!validate_arg (fmt, POINTER_TYPE))
11642 return NULL_TREE;
11643
11644 /* Check whether the format is a literal string constant. */
11645 fmt_str = c_getstr (fmt);
11646 if (fmt_str == NULL)
11647 return NULL_TREE;
11648
11649 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
11650 {
11651 /* If we're using an unlocked function, assume the other
11652 unlocked functions exist explicitly. */
11653 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
11654 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
11655 }
11656 else
11657 {
11658 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
11659 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
11660 }
11661
11662 if (!init_target_chars ())
11663 return NULL_TREE;
11664
11665 if (strcmp (fmt_str, target_percent_s) == 0
11666 || strchr (fmt_str, target_percent) == NULL)
11667 {
11668 const char *str;
11669
11670 if (strcmp (fmt_str, target_percent_s) == 0)
11671 {
11672 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
11673 return NULL_TREE;
11674
11675 if (!arg || !validate_arg (arg, POINTER_TYPE))
11676 return NULL_TREE;
11677
11678 str = c_getstr (arg);
11679 if (str == NULL)
11680 return NULL_TREE;
11681 }
11682 else
11683 {
11684 /* The format specifier doesn't contain any '%' characters. */
11685 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
11686 && arg)
11687 return NULL_TREE;
11688 str = fmt_str;
11689 }
11690
11691 /* If the string was "", printf does nothing. */
11692 if (str[0] == '\0')
11693 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
11694
11695 /* If the string has length of 1, call putchar. */
11696 if (str[1] == '\0')
11697 {
11698 /* Given printf("c"), (where c is any one character,)
11699 convert "c"[0] to an int and pass that to the replacement
11700 function. */
11701 newarg = build_int_cst (integer_type_node, str[0]);
11702 if (fn_putchar)
11703 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
11704 }
11705 else
11706 {
11707 /* If the string was "string\n", call puts("string"). */
11708 size_t len = strlen (str);
11709 if ((unsigned char)str[len - 1] == target_newline
11710 && (size_t) (int) len == len
11711 && (int) len > 0)
11712 {
11713 char *newstr;
11714 tree offset_node, string_cst;
11715
11716 /* Create a NUL-terminated string that's one char shorter
11717 than the original, stripping off the trailing '\n'. */
11718 newarg = build_string_literal (len, str);
11719 string_cst = string_constant (newarg, &offset_node);
11720 gcc_checking_assert (string_cst
11721 && (TREE_STRING_LENGTH (string_cst)
11722 == (int) len)
11723 && integer_zerop (offset_node)
11724 && (unsigned char)
11725 TREE_STRING_POINTER (string_cst)[len - 1]
11726 == target_newline);
11727 /* build_string_literal creates a new STRING_CST,
11728 modify it in place to avoid double copying. */
11729 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
11730 newstr[len - 1] = '\0';
11731 if (fn_puts)
11732 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
11733 }
11734 else
11735 /* We'd like to arrange to call fputs(string,stdout) here,
11736 but we need stdout and don't have a way to get it yet. */
11737 return NULL_TREE;
11738 }
11739 }
11740
11741 /* The other optimizations can be done only on the non-va_list variants. */
11742 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
11743 return NULL_TREE;
11744
11745 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
11746 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
11747 {
11748 if (!arg || !validate_arg (arg, POINTER_TYPE))
11749 return NULL_TREE;
11750 if (fn_puts)
11751 call = build_call_expr_loc (loc, fn_puts, 1, arg);
11752 }
11753
11754 /* If the format specifier was "%c", call __builtin_putchar(arg). */
11755 else if (strcmp (fmt_str, target_percent_c) == 0)
11756 {
11757 if (!arg || !validate_arg (arg, INTEGER_TYPE))
11758 return NULL_TREE;
11759 if (fn_putchar)
11760 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
11761 }
11762
11763 if (!call)
11764 return NULL_TREE;
11765
11766 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
11767 }
11768
11769 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
11770 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
11771 more than 3 arguments, and ARG may be null in the 2-argument case.
11772
11773 Return NULL_TREE if no simplification was possible, otherwise return the
11774 simplified form of the call as a tree. FCODE is the BUILT_IN_*
11775 code of the function to be simplified. */
11776
11777 static tree
11778 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
11779 tree fmt, tree arg, bool ignore,
11780 enum built_in_function fcode)
11781 {
11782 tree fn_fputc, fn_fputs, call = NULL_TREE;
11783 const char *fmt_str = NULL;
11784
11785 /* If the return value is used, don't do the transformation. */
11786 if (! ignore)
11787 return NULL_TREE;
11788
11789 /* Verify the required arguments in the original call. */
11790 if (!validate_arg (fp, POINTER_TYPE))
11791 return NULL_TREE;
11792 if (!validate_arg (fmt, POINTER_TYPE))
11793 return NULL_TREE;
11794
11795 /* Check whether the format is a literal string constant. */
11796 fmt_str = c_getstr (fmt);
11797 if (fmt_str == NULL)
11798 return NULL_TREE;
11799
11800 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
11801 {
11802 /* If we're using an unlocked function, assume the other
11803 unlocked functions exist explicitly. */
11804 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
11805 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
11806 }
11807 else
11808 {
11809 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
11810 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
11811 }
11812
11813 if (!init_target_chars ())
11814 return NULL_TREE;
11815
11816 /* If the format doesn't contain % args or %%, use strcpy. */
11817 if (strchr (fmt_str, target_percent) == NULL)
11818 {
11819 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
11820 && arg)
11821 return NULL_TREE;
11822
11823 /* If the format specifier was "", fprintf does nothing. */
11824 if (fmt_str[0] == '\0')
11825 {
11826 /* If FP has side-effects, just wait until gimplification is
11827 done. */
11828 if (TREE_SIDE_EFFECTS (fp))
11829 return NULL_TREE;
11830
11831 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
11832 }
11833
11834 /* When "string" doesn't contain %, replace all cases of
11835 fprintf (fp, string) with fputs (string, fp). The fputs
11836 builtin will take care of special cases like length == 1. */
11837 if (fn_fputs)
11838 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
11839 }
11840
11841 /* The other optimizations can be done only on the non-va_list variants. */
11842 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
11843 return NULL_TREE;
11844
11845 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
11846 else if (strcmp (fmt_str, target_percent_s) == 0)
11847 {
11848 if (!arg || !validate_arg (arg, POINTER_TYPE))
11849 return NULL_TREE;
11850 if (fn_fputs)
11851 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
11852 }
11853
11854 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
11855 else if (strcmp (fmt_str, target_percent_c) == 0)
11856 {
11857 if (!arg || !validate_arg (arg, INTEGER_TYPE))
11858 return NULL_TREE;
11859 if (fn_fputc)
11860 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
11861 }
11862
11863 if (!call)
11864 return NULL_TREE;
11865 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
11866 }
11867
11868 /* Initialize format string characters in the target charset. */
11869
11870 bool
11871 init_target_chars (void)
11872 {
11873 static bool init;
11874 if (!init)
11875 {
11876 target_newline = lang_hooks.to_target_charset ('\n');
11877 target_percent = lang_hooks.to_target_charset ('%');
11878 target_c = lang_hooks.to_target_charset ('c');
11879 target_s = lang_hooks.to_target_charset ('s');
11880 if (target_newline == 0 || target_percent == 0 || target_c == 0
11881 || target_s == 0)
11882 return false;
11883
11884 target_percent_c[0] = target_percent;
11885 target_percent_c[1] = target_c;
11886 target_percent_c[2] = '\0';
11887
11888 target_percent_s[0] = target_percent;
11889 target_percent_s[1] = target_s;
11890 target_percent_s[2] = '\0';
11891
11892 target_percent_s_newline[0] = target_percent;
11893 target_percent_s_newline[1] = target_s;
11894 target_percent_s_newline[2] = target_newline;
11895 target_percent_s_newline[3] = '\0';
11896
11897 init = true;
11898 }
11899 return true;
11900 }
11901
11902 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11903 and no overflow/underflow occurred. INEXACT is true if M was not
11904 exactly calculated. TYPE is the tree type for the result. This
11905 function assumes that you cleared the MPFR flags and then
11906 calculated M to see if anything subsequently set a flag prior to
11907 entering this function. Return NULL_TREE if any checks fail. */
11908
11909 static tree
11910 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11911 {
11912 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11913 overflow/underflow occurred. If -frounding-math, proceed iff the
11914 result of calling FUNC was exact. */
11915 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11916 && (!flag_rounding_math || !inexact))
11917 {
11918 REAL_VALUE_TYPE rr;
11919
11920 real_from_mpfr (&rr, m, type, GMP_RNDN);
11921 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11922 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11923 but the mpft_t is not, then we underflowed in the
11924 conversion. */
11925 if (real_isfinite (&rr)
11926 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11927 {
11928 REAL_VALUE_TYPE rmode;
11929
11930 real_convert (&rmode, TYPE_MODE (type), &rr);
11931 /* Proceed iff the specified mode can hold the value. */
11932 if (real_identical (&rmode, &rr))
11933 return build_real (type, rmode);
11934 }
11935 }
11936 return NULL_TREE;
11937 }
11938
11939 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11940 number and no overflow/underflow occurred. INEXACT is true if M
11941 was not exactly calculated. TYPE is the tree type for the result.
11942 This function assumes that you cleared the MPFR flags and then
11943 calculated M to see if anything subsequently set a flag prior to
11944 entering this function. Return NULL_TREE if any checks fail, if
11945 FORCE_CONVERT is true, then bypass the checks. */
11946
11947 static tree
11948 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11949 {
11950 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11951 overflow/underflow occurred. If -frounding-math, proceed iff the
11952 result of calling FUNC was exact. */
11953 if (force_convert
11954 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11955 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11956 && (!flag_rounding_math || !inexact)))
11957 {
11958 REAL_VALUE_TYPE re, im;
11959
11960 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
11961 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
11962 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11963 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11964 but the mpft_t is not, then we underflowed in the
11965 conversion. */
11966 if (force_convert
11967 || (real_isfinite (&re) && real_isfinite (&im)
11968 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11969 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11970 {
11971 REAL_VALUE_TYPE re_mode, im_mode;
11972
11973 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11974 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11975 /* Proceed iff the specified mode can hold the value. */
11976 if (force_convert
11977 || (real_identical (&re_mode, &re)
11978 && real_identical (&im_mode, &im)))
11979 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11980 build_real (TREE_TYPE (type), im_mode));
11981 }
11982 }
11983 return NULL_TREE;
11984 }
11985
11986 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11987 FUNC on it and return the resulting value as a tree with type TYPE.
11988 If MIN and/or MAX are not NULL, then the supplied ARG must be
11989 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11990 acceptable values, otherwise they are not. The mpfr precision is
11991 set to the precision of TYPE. We assume that function FUNC returns
11992 zero if the result could be calculated exactly within the requested
11993 precision. */
11994
11995 static tree
11996 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
11997 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
11998 bool inclusive)
11999 {
12000 tree result = NULL_TREE;
12001
12002 STRIP_NOPS (arg);
12003
12004 /* To proceed, MPFR must exactly represent the target floating point
12005 format, which only happens when the target base equals two. */
12006 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12007 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12008 {
12009 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12010
12011 if (real_isfinite (ra)
12012 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12013 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12014 {
12015 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12016 const int prec = fmt->p;
12017 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12018 int inexact;
12019 mpfr_t m;
12020
12021 mpfr_init2 (m, prec);
12022 mpfr_from_real (m, ra, GMP_RNDN);
12023 mpfr_clear_flags ();
12024 inexact = func (m, m, rnd);
12025 result = do_mpfr_ckconv (m, type, inexact);
12026 mpfr_clear (m);
12027 }
12028 }
12029
12030 return result;
12031 }
12032
12033 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12034 FUNC on it and return the resulting value as a tree with type TYPE.
12035 The mpfr precision is set to the precision of TYPE. We assume that
12036 function FUNC returns zero if the result could be calculated
12037 exactly within the requested precision. */
12038
12039 static tree
12040 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12041 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12042 {
12043 tree result = NULL_TREE;
12044
12045 STRIP_NOPS (arg1);
12046 STRIP_NOPS (arg2);
12047
12048 /* To proceed, MPFR must exactly represent the target floating point
12049 format, which only happens when the target base equals two. */
12050 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12051 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12052 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12053 {
12054 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12055 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12056
12057 if (real_isfinite (ra1) && real_isfinite (ra2))
12058 {
12059 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12060 const int prec = fmt->p;
12061 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12062 int inexact;
12063 mpfr_t m1, m2;
12064
12065 mpfr_inits2 (prec, m1, m2, NULL);
12066 mpfr_from_real (m1, ra1, GMP_RNDN);
12067 mpfr_from_real (m2, ra2, GMP_RNDN);
12068 mpfr_clear_flags ();
12069 inexact = func (m1, m1, m2, rnd);
12070 result = do_mpfr_ckconv (m1, type, inexact);
12071 mpfr_clears (m1, m2, NULL);
12072 }
12073 }
12074
12075 return result;
12076 }
12077
12078 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12079 FUNC on it and return the resulting value as a tree with type TYPE.
12080 The mpfr precision is set to the precision of TYPE. We assume that
12081 function FUNC returns zero if the result could be calculated
12082 exactly within the requested precision. */
12083
12084 static tree
12085 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12086 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12087 {
12088 tree result = NULL_TREE;
12089
12090 STRIP_NOPS (arg1);
12091 STRIP_NOPS (arg2);
12092 STRIP_NOPS (arg3);
12093
12094 /* To proceed, MPFR must exactly represent the target floating point
12095 format, which only happens when the target base equals two. */
12096 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12097 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12098 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12099 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12100 {
12101 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12102 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12103 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12104
12105 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12106 {
12107 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12108 const int prec = fmt->p;
12109 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12110 int inexact;
12111 mpfr_t m1, m2, m3;
12112
12113 mpfr_inits2 (prec, m1, m2, m3, NULL);
12114 mpfr_from_real (m1, ra1, GMP_RNDN);
12115 mpfr_from_real (m2, ra2, GMP_RNDN);
12116 mpfr_from_real (m3, ra3, GMP_RNDN);
12117 mpfr_clear_flags ();
12118 inexact = func (m1, m1, m2, m3, rnd);
12119 result = do_mpfr_ckconv (m1, type, inexact);
12120 mpfr_clears (m1, m2, m3, NULL);
12121 }
12122 }
12123
12124 return result;
12125 }
12126
12127 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12128 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12129 If ARG_SINP and ARG_COSP are NULL then the result is returned
12130 as a complex value.
12131 The type is taken from the type of ARG and is used for setting the
12132 precision of the calculation and results. */
12133
12134 static tree
12135 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12136 {
12137 tree const type = TREE_TYPE (arg);
12138 tree result = NULL_TREE;
12139
12140 STRIP_NOPS (arg);
12141
12142 /* To proceed, MPFR must exactly represent the target floating point
12143 format, which only happens when the target base equals two. */
12144 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12145 && TREE_CODE (arg) == REAL_CST
12146 && !TREE_OVERFLOW (arg))
12147 {
12148 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12149
12150 if (real_isfinite (ra))
12151 {
12152 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12153 const int prec = fmt->p;
12154 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12155 tree result_s, result_c;
12156 int inexact;
12157 mpfr_t m, ms, mc;
12158
12159 mpfr_inits2 (prec, m, ms, mc, NULL);
12160 mpfr_from_real (m, ra, GMP_RNDN);
12161 mpfr_clear_flags ();
12162 inexact = mpfr_sin_cos (ms, mc, m, rnd);
12163 result_s = do_mpfr_ckconv (ms, type, inexact);
12164 result_c = do_mpfr_ckconv (mc, type, inexact);
12165 mpfr_clears (m, ms, mc, NULL);
12166 if (result_s && result_c)
12167 {
12168 /* If we are to return in a complex value do so. */
12169 if (!arg_sinp && !arg_cosp)
12170 return build_complex (build_complex_type (type),
12171 result_c, result_s);
12172
12173 /* Dereference the sin/cos pointer arguments. */
12174 arg_sinp = build_fold_indirect_ref (arg_sinp);
12175 arg_cosp = build_fold_indirect_ref (arg_cosp);
12176 /* Proceed if valid pointer type were passed in. */
12177 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12178 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12179 {
12180 /* Set the values. */
12181 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12182 result_s);
12183 TREE_SIDE_EFFECTS (result_s) = 1;
12184 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12185 result_c);
12186 TREE_SIDE_EFFECTS (result_c) = 1;
12187 /* Combine the assignments into a compound expr. */
12188 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12189 result_s, result_c));
12190 }
12191 }
12192 }
12193 }
12194 return result;
12195 }
12196
12197 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12198 two-argument mpfr order N Bessel function FUNC on them and return
12199 the resulting value as a tree with type TYPE. The mpfr precision
12200 is set to the precision of TYPE. We assume that function FUNC
12201 returns zero if the result could be calculated exactly within the
12202 requested precision. */
12203 static tree
12204 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12205 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12206 const REAL_VALUE_TYPE *min, bool inclusive)
12207 {
12208 tree result = NULL_TREE;
12209
12210 STRIP_NOPS (arg1);
12211 STRIP_NOPS (arg2);
12212
12213 /* To proceed, MPFR must exactly represent the target floating point
12214 format, which only happens when the target base equals two. */
12215 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12216 && tree_fits_shwi_p (arg1)
12217 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12218 {
12219 const HOST_WIDE_INT n = tree_to_shwi (arg1);
12220 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12221
12222 if (n == (long)n
12223 && real_isfinite (ra)
12224 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12225 {
12226 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12227 const int prec = fmt->p;
12228 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12229 int inexact;
12230 mpfr_t m;
12231
12232 mpfr_init2 (m, prec);
12233 mpfr_from_real (m, ra, GMP_RNDN);
12234 mpfr_clear_flags ();
12235 inexact = func (m, n, m, rnd);
12236 result = do_mpfr_ckconv (m, type, inexact);
12237 mpfr_clear (m);
12238 }
12239 }
12240
12241 return result;
12242 }
12243
12244 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12245 the pointer *(ARG_QUO) and return the result. The type is taken
12246 from the type of ARG0 and is used for setting the precision of the
12247 calculation and results. */
12248
12249 static tree
12250 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12251 {
12252 tree const type = TREE_TYPE (arg0);
12253 tree result = NULL_TREE;
12254
12255 STRIP_NOPS (arg0);
12256 STRIP_NOPS (arg1);
12257
12258 /* To proceed, MPFR must exactly represent the target floating point
12259 format, which only happens when the target base equals two. */
12260 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12261 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12262 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12263 {
12264 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12265 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12266
12267 if (real_isfinite (ra0) && real_isfinite (ra1))
12268 {
12269 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12270 const int prec = fmt->p;
12271 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12272 tree result_rem;
12273 long integer_quo;
12274 mpfr_t m0, m1;
12275
12276 mpfr_inits2 (prec, m0, m1, NULL);
12277 mpfr_from_real (m0, ra0, GMP_RNDN);
12278 mpfr_from_real (m1, ra1, GMP_RNDN);
12279 mpfr_clear_flags ();
12280 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12281 /* Remquo is independent of the rounding mode, so pass
12282 inexact=0 to do_mpfr_ckconv(). */
12283 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12284 mpfr_clears (m0, m1, NULL);
12285 if (result_rem)
12286 {
12287 /* MPFR calculates quo in the host's long so it may
12288 return more bits in quo than the target int can hold
12289 if sizeof(host long) > sizeof(target int). This can
12290 happen even for native compilers in LP64 mode. In
12291 these cases, modulo the quo value with the largest
12292 number that the target int can hold while leaving one
12293 bit for the sign. */
12294 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12295 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12296
12297 /* Dereference the quo pointer argument. */
12298 arg_quo = build_fold_indirect_ref (arg_quo);
12299 /* Proceed iff a valid pointer type was passed in. */
12300 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12301 {
12302 /* Set the value. */
12303 tree result_quo
12304 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12305 build_int_cst (TREE_TYPE (arg_quo),
12306 integer_quo));
12307 TREE_SIDE_EFFECTS (result_quo) = 1;
12308 /* Combine the quo assignment with the rem. */
12309 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12310 result_quo, result_rem));
12311 }
12312 }
12313 }
12314 }
12315 return result;
12316 }
12317
12318 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12319 resulting value as a tree with type TYPE. The mpfr precision is
12320 set to the precision of TYPE. We assume that this mpfr function
12321 returns zero if the result could be calculated exactly within the
12322 requested precision. In addition, the integer pointer represented
12323 by ARG_SG will be dereferenced and set to the appropriate signgam
12324 (-1,1) value. */
12325
12326 static tree
12327 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12328 {
12329 tree result = NULL_TREE;
12330
12331 STRIP_NOPS (arg);
12332
12333 /* To proceed, MPFR must exactly represent the target floating point
12334 format, which only happens when the target base equals two. Also
12335 verify ARG is a constant and that ARG_SG is an int pointer. */
12336 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12337 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12338 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12339 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12340 {
12341 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12342
12343 /* In addition to NaN and Inf, the argument cannot be zero or a
12344 negative integer. */
12345 if (real_isfinite (ra)
12346 && ra->cl != rvc_zero
12347 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12348 {
12349 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12350 const int prec = fmt->p;
12351 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12352 int inexact, sg;
12353 mpfr_t m;
12354 tree result_lg;
12355
12356 mpfr_init2 (m, prec);
12357 mpfr_from_real (m, ra, GMP_RNDN);
12358 mpfr_clear_flags ();
12359 inexact = mpfr_lgamma (m, &sg, m, rnd);
12360 result_lg = do_mpfr_ckconv (m, type, inexact);
12361 mpfr_clear (m);
12362 if (result_lg)
12363 {
12364 tree result_sg;
12365
12366 /* Dereference the arg_sg pointer argument. */
12367 arg_sg = build_fold_indirect_ref (arg_sg);
12368 /* Assign the signgam value into *arg_sg. */
12369 result_sg = fold_build2 (MODIFY_EXPR,
12370 TREE_TYPE (arg_sg), arg_sg,
12371 build_int_cst (TREE_TYPE (arg_sg), sg));
12372 TREE_SIDE_EFFECTS (result_sg) = 1;
12373 /* Combine the signgam assignment with the lgamma result. */
12374 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12375 result_sg, result_lg));
12376 }
12377 }
12378 }
12379
12380 return result;
12381 }
12382
12383 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12384 function FUNC on it and return the resulting value as a tree with
12385 type TYPE. The mpfr precision is set to the precision of TYPE. We
12386 assume that function FUNC returns zero if the result could be
12387 calculated exactly within the requested precision. */
12388
12389 static tree
12390 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12391 {
12392 tree result = NULL_TREE;
12393
12394 STRIP_NOPS (arg);
12395
12396 /* To proceed, MPFR must exactly represent the target floating point
12397 format, which only happens when the target base equals two. */
12398 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12399 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12400 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12401 {
12402 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12403 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12404
12405 if (real_isfinite (re) && real_isfinite (im))
12406 {
12407 const struct real_format *const fmt =
12408 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12409 const int prec = fmt->p;
12410 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12411 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12412 int inexact;
12413 mpc_t m;
12414
12415 mpc_init2 (m, prec);
12416 mpfr_from_real (mpc_realref (m), re, rnd);
12417 mpfr_from_real (mpc_imagref (m), im, rnd);
12418 mpfr_clear_flags ();
12419 inexact = func (m, m, crnd);
12420 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
12421 mpc_clear (m);
12422 }
12423 }
12424
12425 return result;
12426 }
12427
12428 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12429 mpc function FUNC on it and return the resulting value as a tree
12430 with type TYPE. The mpfr precision is set to the precision of
12431 TYPE. We assume that function FUNC returns zero if the result
12432 could be calculated exactly within the requested precision. If
12433 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12434 in the arguments and/or results. */
12435
12436 tree
12437 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12438 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12439 {
12440 tree result = NULL_TREE;
12441
12442 STRIP_NOPS (arg0);
12443 STRIP_NOPS (arg1);
12444
12445 /* To proceed, MPFR must exactly represent the target floating point
12446 format, which only happens when the target base equals two. */
12447 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12448 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12449 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12450 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12451 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12452 {
12453 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12454 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12455 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12456 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12457
12458 if (do_nonfinite
12459 || (real_isfinite (re0) && real_isfinite (im0)
12460 && real_isfinite (re1) && real_isfinite (im1)))
12461 {
12462 const struct real_format *const fmt =
12463 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12464 const int prec = fmt->p;
12465 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12466 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12467 int inexact;
12468 mpc_t m0, m1;
12469
12470 mpc_init2 (m0, prec);
12471 mpc_init2 (m1, prec);
12472 mpfr_from_real (mpc_realref (m0), re0, rnd);
12473 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12474 mpfr_from_real (mpc_realref (m1), re1, rnd);
12475 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12476 mpfr_clear_flags ();
12477 inexact = func (m0, m0, m1, crnd);
12478 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12479 mpc_clear (m0);
12480 mpc_clear (m1);
12481 }
12482 }
12483
12484 return result;
12485 }
12486
12487 /* A wrapper function for builtin folding that prevents warnings for
12488 "statement without effect" and the like, caused by removing the
12489 call node earlier than the warning is generated. */
12490
12491 tree
12492 fold_call_stmt (gimple stmt, bool ignore)
12493 {
12494 tree ret = NULL_TREE;
12495 tree fndecl = gimple_call_fndecl (stmt);
12496 location_t loc = gimple_location (stmt);
12497 if (fndecl
12498 && TREE_CODE (fndecl) == FUNCTION_DECL
12499 && DECL_BUILT_IN (fndecl)
12500 && !gimple_call_va_arg_pack_p (stmt))
12501 {
12502 int nargs = gimple_call_num_args (stmt);
12503 tree *args = (nargs > 0
12504 ? gimple_call_arg_ptr (stmt, 0)
12505 : &error_mark_node);
12506
12507 if (avoid_folding_inline_builtin (fndecl))
12508 return NULL_TREE;
12509 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12510 {
12511 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12512 }
12513 else
12514 {
12515 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
12516 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
12517 if (ret)
12518 {
12519 /* Propagate location information from original call to
12520 expansion of builtin. Otherwise things like
12521 maybe_emit_chk_warning, that operate on the expansion
12522 of a builtin, will use the wrong location information. */
12523 if (gimple_has_location (stmt))
12524 {
12525 tree realret = ret;
12526 if (TREE_CODE (ret) == NOP_EXPR)
12527 realret = TREE_OPERAND (ret, 0);
12528 if (CAN_HAVE_LOCATION_P (realret)
12529 && !EXPR_HAS_LOCATION (realret))
12530 SET_EXPR_LOCATION (realret, loc);
12531 return realret;
12532 }
12533 return ret;
12534 }
12535 }
12536 }
12537 return NULL_TREE;
12538 }
12539
12540 /* Look up the function in builtin_decl that corresponds to DECL
12541 and set ASMSPEC as its user assembler name. DECL must be a
12542 function decl that declares a builtin. */
12543
12544 void
12545 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12546 {
12547 tree builtin;
12548 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12549 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12550 && asmspec != 0);
12551
12552 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12553 set_user_assembler_name (builtin, asmspec);
12554 switch (DECL_FUNCTION_CODE (decl))
12555 {
12556 case BUILT_IN_MEMCPY:
12557 init_block_move_fn (asmspec);
12558 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12559 break;
12560 case BUILT_IN_MEMSET:
12561 init_block_clear_fn (asmspec);
12562 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12563 break;
12564 case BUILT_IN_MEMMOVE:
12565 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12566 break;
12567 case BUILT_IN_MEMCMP:
12568 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12569 break;
12570 case BUILT_IN_ABORT:
12571 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12572 break;
12573 case BUILT_IN_FFS:
12574 if (INT_TYPE_SIZE < BITS_PER_WORD)
12575 {
12576 set_user_assembler_libfunc ("ffs", asmspec);
12577 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12578 MODE_INT, 0), "ffs");
12579 }
12580 break;
12581 default:
12582 break;
12583 }
12584 }
12585
12586 /* Return true if DECL is a builtin that expands to a constant or similarly
12587 simple code. */
12588 bool
12589 is_simple_builtin (tree decl)
12590 {
12591 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12592 switch (DECL_FUNCTION_CODE (decl))
12593 {
12594 /* Builtins that expand to constants. */
12595 case BUILT_IN_CONSTANT_P:
12596 case BUILT_IN_EXPECT:
12597 case BUILT_IN_OBJECT_SIZE:
12598 case BUILT_IN_UNREACHABLE:
12599 /* Simple register moves or loads from stack. */
12600 case BUILT_IN_ASSUME_ALIGNED:
12601 case BUILT_IN_RETURN_ADDRESS:
12602 case BUILT_IN_EXTRACT_RETURN_ADDR:
12603 case BUILT_IN_FROB_RETURN_ADDR:
12604 case BUILT_IN_RETURN:
12605 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12606 case BUILT_IN_FRAME_ADDRESS:
12607 case BUILT_IN_VA_END:
12608 case BUILT_IN_STACK_SAVE:
12609 case BUILT_IN_STACK_RESTORE:
12610 /* Exception state returns or moves registers around. */
12611 case BUILT_IN_EH_FILTER:
12612 case BUILT_IN_EH_POINTER:
12613 case BUILT_IN_EH_COPY_VALUES:
12614 return true;
12615
12616 default:
12617 return false;
12618 }
12619
12620 return false;
12621 }
12622
12623 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12624 most probably expanded inline into reasonably simple code. This is a
12625 superset of is_simple_builtin. */
12626 bool
12627 is_inexpensive_builtin (tree decl)
12628 {
12629 if (!decl)
12630 return false;
12631 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12632 return true;
12633 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12634 switch (DECL_FUNCTION_CODE (decl))
12635 {
12636 case BUILT_IN_ABS:
12637 case BUILT_IN_ALLOCA:
12638 case BUILT_IN_ALLOCA_WITH_ALIGN:
12639 case BUILT_IN_BSWAP16:
12640 case BUILT_IN_BSWAP32:
12641 case BUILT_IN_BSWAP64:
12642 case BUILT_IN_CLZ:
12643 case BUILT_IN_CLZIMAX:
12644 case BUILT_IN_CLZL:
12645 case BUILT_IN_CLZLL:
12646 case BUILT_IN_CTZ:
12647 case BUILT_IN_CTZIMAX:
12648 case BUILT_IN_CTZL:
12649 case BUILT_IN_CTZLL:
12650 case BUILT_IN_FFS:
12651 case BUILT_IN_FFSIMAX:
12652 case BUILT_IN_FFSL:
12653 case BUILT_IN_FFSLL:
12654 case BUILT_IN_IMAXABS:
12655 case BUILT_IN_FINITE:
12656 case BUILT_IN_FINITEF:
12657 case BUILT_IN_FINITEL:
12658 case BUILT_IN_FINITED32:
12659 case BUILT_IN_FINITED64:
12660 case BUILT_IN_FINITED128:
12661 case BUILT_IN_FPCLASSIFY:
12662 case BUILT_IN_ISFINITE:
12663 case BUILT_IN_ISINF_SIGN:
12664 case BUILT_IN_ISINF:
12665 case BUILT_IN_ISINFF:
12666 case BUILT_IN_ISINFL:
12667 case BUILT_IN_ISINFD32:
12668 case BUILT_IN_ISINFD64:
12669 case BUILT_IN_ISINFD128:
12670 case BUILT_IN_ISNAN:
12671 case BUILT_IN_ISNANF:
12672 case BUILT_IN_ISNANL:
12673 case BUILT_IN_ISNAND32:
12674 case BUILT_IN_ISNAND64:
12675 case BUILT_IN_ISNAND128:
12676 case BUILT_IN_ISNORMAL:
12677 case BUILT_IN_ISGREATER:
12678 case BUILT_IN_ISGREATEREQUAL:
12679 case BUILT_IN_ISLESS:
12680 case BUILT_IN_ISLESSEQUAL:
12681 case BUILT_IN_ISLESSGREATER:
12682 case BUILT_IN_ISUNORDERED:
12683 case BUILT_IN_VA_ARG_PACK:
12684 case BUILT_IN_VA_ARG_PACK_LEN:
12685 case BUILT_IN_VA_COPY:
12686 case BUILT_IN_TRAP:
12687 case BUILT_IN_SAVEREGS:
12688 case BUILT_IN_POPCOUNTL:
12689 case BUILT_IN_POPCOUNTLL:
12690 case BUILT_IN_POPCOUNTIMAX:
12691 case BUILT_IN_POPCOUNT:
12692 case BUILT_IN_PARITYL:
12693 case BUILT_IN_PARITYLL:
12694 case BUILT_IN_PARITYIMAX:
12695 case BUILT_IN_PARITY:
12696 case BUILT_IN_LABS:
12697 case BUILT_IN_LLABS:
12698 case BUILT_IN_PREFETCH:
12699 return true;
12700
12701 default:
12702 return is_simple_builtin (decl);
12703 }
12704
12705 return false;
12706 }