real.h (HONOR_SNANS, [...]): Replace macros with 3 overloaded declarations.
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "calls.h"
30 #include "varasm.h"
31 #include "tree-object-size.h"
32 #include "realmpfr.h"
33 #include "predict.h"
34 #include "vec.h"
35 #include "hashtab.h"
36 #include "hash-set.h"
37 #include "hard-reg-set.h"
38 #include "input.h"
39 #include "function.h"
40 #include "cfgrtl.h"
41 #include "basic-block.h"
42 #include "tree-ssa-alias.h"
43 #include "internal-fn.h"
44 #include "gimple-expr.h"
45 #include "is-a.h"
46 #include "gimple.h"
47 #include "flags.h"
48 #include "regs.h"
49 #include "except.h"
50 #include "insn-config.h"
51 #include "expr.h"
52 #include "insn-codes.h"
53 #include "optabs.h"
54 #include "libfuncs.h"
55 #include "recog.h"
56 #include "output.h"
57 #include "typeclass.h"
58 #include "tm_p.h"
59 #include "target.h"
60 #include "langhooks.h"
61 #include "tree-ssanames.h"
62 #include "tree-dfa.h"
63 #include "value-prof.h"
64 #include "diagnostic-core.h"
65 #include "builtins.h"
66 #include "asan.h"
67 #include "cilk.h"
68 #include "ipa-ref.h"
69 #include "lto-streamer.h"
70 #include "cgraph.h"
71 #include "tree-chkp.h"
72 #include "rtl-chkp.h"
73
74
75 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
76
77 struct target_builtins default_target_builtins;
78 #if SWITCHABLE_TARGET
79 struct target_builtins *this_target_builtins = &default_target_builtins;
80 #endif
81
82 /* Define the names of the builtin function types and codes. */
83 const char *const built_in_class_names[BUILT_IN_LAST]
84 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
85
86 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
87 const char * built_in_names[(int) END_BUILTINS] =
88 {
89 #include "builtins.def"
90 };
91 #undef DEF_BUILTIN
92
93 /* Setup an array of _DECL trees, make sure each element is
94 initialized to NULL_TREE. */
95 builtin_info_type builtin_info;
96
97 /* Non-zero if __builtin_constant_p should be folded right away. */
98 bool force_folding_builtin_constant_p;
99
100 static rtx c_readstr (const char *, machine_mode);
101 static int target_char_cast (tree, char *);
102 static rtx get_memory_rtx (tree, tree);
103 static int apply_args_size (void);
104 static int apply_result_size (void);
105 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
106 static rtx result_vector (int, rtx);
107 #endif
108 static void expand_builtin_update_setjmp_buf (rtx);
109 static void expand_builtin_prefetch (tree);
110 static rtx expand_builtin_apply_args (void);
111 static rtx expand_builtin_apply_args_1 (void);
112 static rtx expand_builtin_apply (rtx, rtx, rtx);
113 static void expand_builtin_return (rtx);
114 static enum type_class type_to_class (tree);
115 static rtx expand_builtin_classify_type (tree);
116 static void expand_errno_check (tree, rtx);
117 static rtx expand_builtin_mathfn (tree, rtx, rtx);
118 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
119 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
120 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
121 static rtx expand_builtin_interclass_mathfn (tree, rtx);
122 static rtx expand_builtin_sincos (tree);
123 static rtx expand_builtin_cexpi (tree, rtx);
124 static rtx expand_builtin_int_roundingfn (tree, rtx);
125 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
126 static rtx expand_builtin_next_arg (void);
127 static rtx expand_builtin_va_start (tree);
128 static rtx expand_builtin_va_end (tree);
129 static rtx expand_builtin_va_copy (tree);
130 static rtx expand_builtin_memcmp (tree, rtx, machine_mode);
131 static rtx expand_builtin_strcmp (tree, rtx);
132 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
133 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
134 static rtx expand_builtin_memcpy (tree, rtx);
135 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
136 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
137 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
138 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
139 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
140 machine_mode, int, tree);
141 static rtx expand_builtin_strcpy (tree, rtx);
142 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
143 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
144 static rtx expand_builtin_strncpy (tree, rtx);
145 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
146 static rtx expand_builtin_memset (tree, rtx, machine_mode);
147 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
148 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
149 static rtx expand_builtin_bzero (tree);
150 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
151 static rtx expand_builtin_alloca (tree, bool);
152 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
153 static rtx expand_builtin_frame_address (tree, tree);
154 static tree stabilize_va_list_loc (location_t, tree, int);
155 static rtx expand_builtin_expect (tree, rtx);
156 static tree fold_builtin_constant_p (tree);
157 static tree fold_builtin_classify_type (tree);
158 static tree fold_builtin_strlen (location_t, tree, tree);
159 static tree fold_builtin_inf (location_t, tree, int);
160 static tree fold_builtin_nan (tree, tree, int);
161 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
162 static bool validate_arg (const_tree, enum tree_code code);
163 static bool integer_valued_real_p (tree);
164 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
165 static rtx expand_builtin_fabs (tree, rtx, rtx);
166 static rtx expand_builtin_signbit (tree, rtx);
167 static tree fold_builtin_sqrt (location_t, tree, tree);
168 static tree fold_builtin_cbrt (location_t, tree, tree);
169 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
170 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
171 static tree fold_builtin_cos (location_t, tree, tree, tree);
172 static tree fold_builtin_cosh (location_t, tree, tree, tree);
173 static tree fold_builtin_tan (tree, tree);
174 static tree fold_builtin_trunc (location_t, tree, tree);
175 static tree fold_builtin_floor (location_t, tree, tree);
176 static tree fold_builtin_ceil (location_t, tree, tree);
177 static tree fold_builtin_round (location_t, tree, tree);
178 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
179 static tree fold_builtin_bitop (tree, tree);
180 static tree fold_builtin_strchr (location_t, tree, tree, tree);
181 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
182 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
183 static tree fold_builtin_strcmp (location_t, tree, tree);
184 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
185 static tree fold_builtin_signbit (location_t, tree, tree);
186 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
187 static tree fold_builtin_isascii (location_t, tree);
188 static tree fold_builtin_toascii (location_t, tree);
189 static tree fold_builtin_isdigit (location_t, tree);
190 static tree fold_builtin_fabs (location_t, tree, tree);
191 static tree fold_builtin_abs (location_t, tree, tree);
192 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
193 enum tree_code);
194 static tree fold_builtin_0 (location_t, tree);
195 static tree fold_builtin_1 (location_t, tree, tree);
196 static tree fold_builtin_2 (location_t, tree, tree, tree);
197 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
198 static tree fold_builtin_varargs (location_t, tree, tree*, int);
199
200 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
201 static tree fold_builtin_strstr (location_t, tree, tree, tree);
202 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
203 static tree fold_builtin_strspn (location_t, tree, tree);
204 static tree fold_builtin_strcspn (location_t, tree, tree);
205
206 static rtx expand_builtin_object_size (tree);
207 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
208 enum built_in_function);
209 static void maybe_emit_chk_warning (tree, enum built_in_function);
210 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
211 static void maybe_emit_free_warning (tree);
212 static tree fold_builtin_object_size (tree, tree);
213
214 unsigned HOST_WIDE_INT target_newline;
215 unsigned HOST_WIDE_INT target_percent;
216 static unsigned HOST_WIDE_INT target_c;
217 static unsigned HOST_WIDE_INT target_s;
218 char target_percent_c[3];
219 char target_percent_s[3];
220 char target_percent_s_newline[4];
221 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
222 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
223 static tree do_mpfr_arg2 (tree, tree, tree,
224 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
225 static tree do_mpfr_arg3 (tree, tree, tree, tree,
226 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
227 static tree do_mpfr_sincos (tree, tree, tree);
228 static tree do_mpfr_bessel_n (tree, tree, tree,
229 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
230 const REAL_VALUE_TYPE *, bool);
231 static tree do_mpfr_remquo (tree, tree, tree);
232 static tree do_mpfr_lgamma_r (tree, tree, tree);
233 static void expand_builtin_sync_synchronize (void);
234
235 /* Return true if NAME starts with __builtin_ or __sync_. */
236
237 static bool
238 is_builtin_name (const char *name)
239 {
240 if (strncmp (name, "__builtin_", 10) == 0)
241 return true;
242 if (strncmp (name, "__sync_", 7) == 0)
243 return true;
244 if (strncmp (name, "__atomic_", 9) == 0)
245 return true;
246 if (flag_cilkplus
247 && (!strcmp (name, "__cilkrts_detach")
248 || !strcmp (name, "__cilkrts_pop_frame")))
249 return true;
250 return false;
251 }
252
253
254 /* Return true if DECL is a function symbol representing a built-in. */
255
256 bool
257 is_builtin_fn (tree decl)
258 {
259 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
260 }
261
262 /* Return true if NODE should be considered for inline expansion regardless
263 of the optimization level. This means whenever a function is invoked with
264 its "internal" name, which normally contains the prefix "__builtin". */
265
266 static bool
267 called_as_built_in (tree node)
268 {
269 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
270 we want the name used to call the function, not the name it
271 will have. */
272 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
273 return is_builtin_name (name);
274 }
275
276 /* Compute values M and N such that M divides (address of EXP - N) and such
277 that N < M. If these numbers can be determined, store M in alignp and N in
278 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
279 *alignp and any bit-offset to *bitposp.
280
281 Note that the address (and thus the alignment) computed here is based
282 on the address to which a symbol resolves, whereas DECL_ALIGN is based
283 on the address at which an object is actually located. These two
284 addresses are not always the same. For example, on ARM targets,
285 the address &foo of a Thumb function foo() has the lowest bit set,
286 whereas foo() itself starts on an even address.
287
288 If ADDR_P is true we are taking the address of the memory reference EXP
289 and thus cannot rely on the access taking place. */
290
291 static bool
292 get_object_alignment_2 (tree exp, unsigned int *alignp,
293 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
294 {
295 HOST_WIDE_INT bitsize, bitpos;
296 tree offset;
297 machine_mode mode;
298 int unsignedp, volatilep;
299 unsigned int align = BITS_PER_UNIT;
300 bool known_alignment = false;
301
302 /* Get the innermost object and the constant (bitpos) and possibly
303 variable (offset) offset of the access. */
304 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
305 &mode, &unsignedp, &volatilep, true);
306
307 /* Extract alignment information from the innermost object and
308 possibly adjust bitpos and offset. */
309 if (TREE_CODE (exp) == FUNCTION_DECL)
310 {
311 /* Function addresses can encode extra information besides their
312 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
313 allows the low bit to be used as a virtual bit, we know
314 that the address itself must be at least 2-byte aligned. */
315 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
316 align = 2 * BITS_PER_UNIT;
317 }
318 else if (TREE_CODE (exp) == LABEL_DECL)
319 ;
320 else if (TREE_CODE (exp) == CONST_DECL)
321 {
322 /* The alignment of a CONST_DECL is determined by its initializer. */
323 exp = DECL_INITIAL (exp);
324 align = TYPE_ALIGN (TREE_TYPE (exp));
325 #ifdef CONSTANT_ALIGNMENT
326 if (CONSTANT_CLASS_P (exp))
327 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
328 #endif
329 known_alignment = true;
330 }
331 else if (DECL_P (exp))
332 {
333 align = DECL_ALIGN (exp);
334 known_alignment = true;
335 }
336 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
337 {
338 align = TYPE_ALIGN (TREE_TYPE (exp));
339 }
340 else if (TREE_CODE (exp) == INDIRECT_REF
341 || TREE_CODE (exp) == MEM_REF
342 || TREE_CODE (exp) == TARGET_MEM_REF)
343 {
344 tree addr = TREE_OPERAND (exp, 0);
345 unsigned ptr_align;
346 unsigned HOST_WIDE_INT ptr_bitpos;
347
348 if (TREE_CODE (addr) == BIT_AND_EXPR
349 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
350 {
351 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
352 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
353 align *= BITS_PER_UNIT;
354 addr = TREE_OPERAND (addr, 0);
355 }
356
357 known_alignment
358 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
359 align = MAX (ptr_align, align);
360
361 /* The alignment of the pointer operand in a TARGET_MEM_REF
362 has to take the variable offset parts into account. */
363 if (TREE_CODE (exp) == TARGET_MEM_REF)
364 {
365 if (TMR_INDEX (exp))
366 {
367 unsigned HOST_WIDE_INT step = 1;
368 if (TMR_STEP (exp))
369 step = TREE_INT_CST_LOW (TMR_STEP (exp));
370 align = MIN (align, (step & -step) * BITS_PER_UNIT);
371 }
372 if (TMR_INDEX2 (exp))
373 align = BITS_PER_UNIT;
374 known_alignment = false;
375 }
376
377 /* When EXP is an actual memory reference then we can use
378 TYPE_ALIGN of a pointer indirection to derive alignment.
379 Do so only if get_pointer_alignment_1 did not reveal absolute
380 alignment knowledge and if using that alignment would
381 improve the situation. */
382 if (!addr_p && !known_alignment
383 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
384 align = TYPE_ALIGN (TREE_TYPE (exp));
385 else
386 {
387 /* Else adjust bitpos accordingly. */
388 bitpos += ptr_bitpos;
389 if (TREE_CODE (exp) == MEM_REF
390 || TREE_CODE (exp) == TARGET_MEM_REF)
391 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
392 }
393 }
394 else if (TREE_CODE (exp) == STRING_CST)
395 {
396 /* STRING_CST are the only constant objects we allow to be not
397 wrapped inside a CONST_DECL. */
398 align = TYPE_ALIGN (TREE_TYPE (exp));
399 #ifdef CONSTANT_ALIGNMENT
400 if (CONSTANT_CLASS_P (exp))
401 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
402 #endif
403 known_alignment = true;
404 }
405
406 /* If there is a non-constant offset part extract the maximum
407 alignment that can prevail. */
408 if (offset)
409 {
410 unsigned int trailing_zeros = tree_ctz (offset);
411 if (trailing_zeros < HOST_BITS_PER_INT)
412 {
413 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
414 if (inner)
415 align = MIN (align, inner);
416 }
417 }
418
419 *alignp = align;
420 *bitposp = bitpos & (*alignp - 1);
421 return known_alignment;
422 }
423
424 /* For a memory reference expression EXP compute values M and N such that M
425 divides (&EXP - N) and such that N < M. If these numbers can be determined,
426 store M in alignp and N in *BITPOSP and return true. Otherwise return false
427 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
428
429 bool
430 get_object_alignment_1 (tree exp, unsigned int *alignp,
431 unsigned HOST_WIDE_INT *bitposp)
432 {
433 return get_object_alignment_2 (exp, alignp, bitposp, false);
434 }
435
436 /* Return the alignment in bits of EXP, an object. */
437
438 unsigned int
439 get_object_alignment (tree exp)
440 {
441 unsigned HOST_WIDE_INT bitpos = 0;
442 unsigned int align;
443
444 get_object_alignment_1 (exp, &align, &bitpos);
445
446 /* align and bitpos now specify known low bits of the pointer.
447 ptr & (align - 1) == bitpos. */
448
449 if (bitpos != 0)
450 align = (bitpos & -bitpos);
451 return align;
452 }
453
454 /* For a pointer valued expression EXP compute values M and N such that M
455 divides (EXP - N) and such that N < M. If these numbers can be determined,
456 store M in alignp and N in *BITPOSP and return true. Return false if
457 the results are just a conservative approximation.
458
459 If EXP is not a pointer, false is returned too. */
460
461 bool
462 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
463 unsigned HOST_WIDE_INT *bitposp)
464 {
465 STRIP_NOPS (exp);
466
467 if (TREE_CODE (exp) == ADDR_EXPR)
468 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
469 alignp, bitposp, true);
470 else if (TREE_CODE (exp) == SSA_NAME
471 && POINTER_TYPE_P (TREE_TYPE (exp)))
472 {
473 unsigned int ptr_align, ptr_misalign;
474 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
475
476 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
477 {
478 *bitposp = ptr_misalign * BITS_PER_UNIT;
479 *alignp = ptr_align * BITS_PER_UNIT;
480 /* We cannot really tell whether this result is an approximation. */
481 return true;
482 }
483 else
484 {
485 *bitposp = 0;
486 *alignp = BITS_PER_UNIT;
487 return false;
488 }
489 }
490 else if (TREE_CODE (exp) == INTEGER_CST)
491 {
492 *alignp = BIGGEST_ALIGNMENT;
493 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
494 & (BIGGEST_ALIGNMENT - 1));
495 return true;
496 }
497
498 *bitposp = 0;
499 *alignp = BITS_PER_UNIT;
500 return false;
501 }
502
503 /* Return the alignment in bits of EXP, a pointer valued expression.
504 The alignment returned is, by default, the alignment of the thing that
505 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
506
507 Otherwise, look at the expression to see if we can do better, i.e., if the
508 expression is actually pointing at an object whose alignment is tighter. */
509
510 unsigned int
511 get_pointer_alignment (tree exp)
512 {
513 unsigned HOST_WIDE_INT bitpos = 0;
514 unsigned int align;
515
516 get_pointer_alignment_1 (exp, &align, &bitpos);
517
518 /* align and bitpos now specify known low bits of the pointer.
519 ptr & (align - 1) == bitpos. */
520
521 if (bitpos != 0)
522 align = (bitpos & -bitpos);
523
524 return align;
525 }
526
527 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
528 way, because it could contain a zero byte in the middle.
529 TREE_STRING_LENGTH is the size of the character array, not the string.
530
531 ONLY_VALUE should be nonzero if the result is not going to be emitted
532 into the instruction stream and zero if it is going to be expanded.
533 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
534 is returned, otherwise NULL, since
535 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
536 evaluate the side-effects.
537
538 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
539 accesses. Note that this implies the result is not going to be emitted
540 into the instruction stream.
541
542 The value returned is of type `ssizetype'.
543
544 Unfortunately, string_constant can't access the values of const char
545 arrays with initializers, so neither can we do so here. */
546
547 tree
548 c_strlen (tree src, int only_value)
549 {
550 tree offset_node;
551 HOST_WIDE_INT offset;
552 int max;
553 const char *ptr;
554 location_t loc;
555
556 STRIP_NOPS (src);
557 if (TREE_CODE (src) == COND_EXPR
558 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
559 {
560 tree len1, len2;
561
562 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
563 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
564 if (tree_int_cst_equal (len1, len2))
565 return len1;
566 }
567
568 if (TREE_CODE (src) == COMPOUND_EXPR
569 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
570 return c_strlen (TREE_OPERAND (src, 1), only_value);
571
572 loc = EXPR_LOC_OR_LOC (src, input_location);
573
574 src = string_constant (src, &offset_node);
575 if (src == 0)
576 return NULL_TREE;
577
578 max = TREE_STRING_LENGTH (src) - 1;
579 ptr = TREE_STRING_POINTER (src);
580
581 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
582 {
583 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
584 compute the offset to the following null if we don't know where to
585 start searching for it. */
586 int i;
587
588 for (i = 0; i < max; i++)
589 if (ptr[i] == 0)
590 return NULL_TREE;
591
592 /* We don't know the starting offset, but we do know that the string
593 has no internal zero bytes. We can assume that the offset falls
594 within the bounds of the string; otherwise, the programmer deserves
595 what he gets. Subtract the offset from the length of the string,
596 and return that. This would perhaps not be valid if we were dealing
597 with named arrays in addition to literal string constants. */
598
599 return size_diffop_loc (loc, size_int (max), offset_node);
600 }
601
602 /* We have a known offset into the string. Start searching there for
603 a null character if we can represent it as a single HOST_WIDE_INT. */
604 if (offset_node == 0)
605 offset = 0;
606 else if (! tree_fits_shwi_p (offset_node))
607 offset = -1;
608 else
609 offset = tree_to_shwi (offset_node);
610
611 /* If the offset is known to be out of bounds, warn, and call strlen at
612 runtime. */
613 if (offset < 0 || offset > max)
614 {
615 /* Suppress multiple warnings for propagated constant strings. */
616 if (only_value != 2
617 && !TREE_NO_WARNING (src))
618 {
619 warning_at (loc, 0, "offset outside bounds of constant string");
620 TREE_NO_WARNING (src) = 1;
621 }
622 return NULL_TREE;
623 }
624
625 /* Use strlen to search for the first zero byte. Since any strings
626 constructed with build_string will have nulls appended, we win even
627 if we get handed something like (char[4])"abcd".
628
629 Since OFFSET is our starting index into the string, no further
630 calculation is needed. */
631 return ssize_int (strlen (ptr + offset));
632 }
633
634 /* Return a char pointer for a C string if it is a string constant
635 or sum of string constant and integer constant. */
636
637 const char *
638 c_getstr (tree src)
639 {
640 tree offset_node;
641
642 src = string_constant (src, &offset_node);
643 if (src == 0)
644 return 0;
645
646 if (offset_node == 0)
647 return TREE_STRING_POINTER (src);
648 else if (!tree_fits_uhwi_p (offset_node)
649 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
650 return 0;
651
652 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
653 }
654
655 /* Return a constant integer corresponding to target reading
656 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
657
658 static rtx
659 c_readstr (const char *str, machine_mode mode)
660 {
661 HOST_WIDE_INT ch;
662 unsigned int i, j;
663 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
664
665 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
666 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
667 / HOST_BITS_PER_WIDE_INT;
668
669 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
670 for (i = 0; i < len; i++)
671 tmp[i] = 0;
672
673 ch = 1;
674 for (i = 0; i < GET_MODE_SIZE (mode); i++)
675 {
676 j = i;
677 if (WORDS_BIG_ENDIAN)
678 j = GET_MODE_SIZE (mode) - i - 1;
679 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
680 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
681 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
682 j *= BITS_PER_UNIT;
683
684 if (ch)
685 ch = (unsigned char) str[i];
686 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
687 }
688
689 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
690 return immed_wide_int_const (c, mode);
691 }
692
693 /* Cast a target constant CST to target CHAR and if that value fits into
694 host char type, return zero and put that value into variable pointed to by
695 P. */
696
697 static int
698 target_char_cast (tree cst, char *p)
699 {
700 unsigned HOST_WIDE_INT val, hostval;
701
702 if (TREE_CODE (cst) != INTEGER_CST
703 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
704 return 1;
705
706 /* Do not care if it fits or not right here. */
707 val = TREE_INT_CST_LOW (cst);
708
709 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
710 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
711
712 hostval = val;
713 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
714 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
715
716 if (val != hostval)
717 return 1;
718
719 *p = hostval;
720 return 0;
721 }
722
723 /* Similar to save_expr, but assumes that arbitrary code is not executed
724 in between the multiple evaluations. In particular, we assume that a
725 non-addressable local variable will not be modified. */
726
727 static tree
728 builtin_save_expr (tree exp)
729 {
730 if (TREE_CODE (exp) == SSA_NAME
731 || (TREE_ADDRESSABLE (exp) == 0
732 && (TREE_CODE (exp) == PARM_DECL
733 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
734 return exp;
735
736 return save_expr (exp);
737 }
738
739 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
740 times to get the address of either a higher stack frame, or a return
741 address located within it (depending on FNDECL_CODE). */
742
743 static rtx
744 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
745 {
746 int i;
747
748 #ifdef INITIAL_FRAME_ADDRESS_RTX
749 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
750 #else
751 rtx tem;
752
753 /* For a zero count with __builtin_return_address, we don't care what
754 frame address we return, because target-specific definitions will
755 override us. Therefore frame pointer elimination is OK, and using
756 the soft frame pointer is OK.
757
758 For a nonzero count, or a zero count with __builtin_frame_address,
759 we require a stable offset from the current frame pointer to the
760 previous one, so we must use the hard frame pointer, and
761 we must disable frame pointer elimination. */
762 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
763 tem = frame_pointer_rtx;
764 else
765 {
766 tem = hard_frame_pointer_rtx;
767
768 /* Tell reload not to eliminate the frame pointer. */
769 crtl->accesses_prior_frames = 1;
770 }
771 #endif
772
773 /* Some machines need special handling before we can access
774 arbitrary frames. For example, on the SPARC, we must first flush
775 all register windows to the stack. */
776 #ifdef SETUP_FRAME_ADDRESSES
777 if (count > 0)
778 SETUP_FRAME_ADDRESSES ();
779 #endif
780
781 /* On the SPARC, the return address is not in the frame, it is in a
782 register. There is no way to access it off of the current frame
783 pointer, but it can be accessed off the previous frame pointer by
784 reading the value from the register window save area. */
785 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
786 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
787 count--;
788 #endif
789
790 /* Scan back COUNT frames to the specified frame. */
791 for (i = 0; i < count; i++)
792 {
793 /* Assume the dynamic chain pointer is in the word that the
794 frame address points to, unless otherwise specified. */
795 #ifdef DYNAMIC_CHAIN_ADDRESS
796 tem = DYNAMIC_CHAIN_ADDRESS (tem);
797 #endif
798 tem = memory_address (Pmode, tem);
799 tem = gen_frame_mem (Pmode, tem);
800 tem = copy_to_reg (tem);
801 }
802
803 /* For __builtin_frame_address, return what we've got. But, on
804 the SPARC for example, we may have to add a bias. */
805 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
806 #ifdef FRAME_ADDR_RTX
807 return FRAME_ADDR_RTX (tem);
808 #else
809 return tem;
810 #endif
811
812 /* For __builtin_return_address, get the return address from that frame. */
813 #ifdef RETURN_ADDR_RTX
814 tem = RETURN_ADDR_RTX (count, tem);
815 #else
816 tem = memory_address (Pmode,
817 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
818 tem = gen_frame_mem (Pmode, tem);
819 #endif
820 return tem;
821 }
822
823 /* Alias set used for setjmp buffer. */
824 static alias_set_type setjmp_alias_set = -1;
825
826 /* Construct the leading half of a __builtin_setjmp call. Control will
827 return to RECEIVER_LABEL. This is also called directly by the SJLJ
828 exception handling code. */
829
830 void
831 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
832 {
833 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
834 rtx stack_save;
835 rtx mem;
836
837 if (setjmp_alias_set == -1)
838 setjmp_alias_set = new_alias_set ();
839
840 buf_addr = convert_memory_address (Pmode, buf_addr);
841
842 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
843
844 /* We store the frame pointer and the address of receiver_label in
845 the buffer and use the rest of it for the stack save area, which
846 is machine-dependent. */
847
848 mem = gen_rtx_MEM (Pmode, buf_addr);
849 set_mem_alias_set (mem, setjmp_alias_set);
850 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
851
852 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
853 GET_MODE_SIZE (Pmode))),
854 set_mem_alias_set (mem, setjmp_alias_set);
855
856 emit_move_insn (validize_mem (mem),
857 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
858
859 stack_save = gen_rtx_MEM (sa_mode,
860 plus_constant (Pmode, buf_addr,
861 2 * GET_MODE_SIZE (Pmode)));
862 set_mem_alias_set (stack_save, setjmp_alias_set);
863 emit_stack_save (SAVE_NONLOCAL, &stack_save);
864
865 /* If there is further processing to do, do it. */
866 #ifdef HAVE_builtin_setjmp_setup
867 if (HAVE_builtin_setjmp_setup)
868 emit_insn (gen_builtin_setjmp_setup (buf_addr));
869 #endif
870
871 /* We have a nonlocal label. */
872 cfun->has_nonlocal_label = 1;
873 }
874
875 /* Construct the trailing part of a __builtin_setjmp call. This is
876 also called directly by the SJLJ exception handling code.
877 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
878
879 void
880 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
881 {
882 rtx chain;
883
884 /* Mark the FP as used when we get here, so we have to make sure it's
885 marked as used by this function. */
886 emit_use (hard_frame_pointer_rtx);
887
888 /* Mark the static chain as clobbered here so life information
889 doesn't get messed up for it. */
890 chain = targetm.calls.static_chain (current_function_decl, true);
891 if (chain && REG_P (chain))
892 emit_clobber (chain);
893
894 /* Now put in the code to restore the frame pointer, and argument
895 pointer, if needed. */
896 #ifdef HAVE_nonlocal_goto
897 if (! HAVE_nonlocal_goto)
898 #endif
899 {
900 /* First adjust our frame pointer to its actual value. It was
901 previously set to the start of the virtual area corresponding to
902 the stacked variables when we branched here and now needs to be
903 adjusted to the actual hardware fp value.
904
905 Assignments to virtual registers are converted by
906 instantiate_virtual_regs into the corresponding assignment
907 to the underlying register (fp in this case) that makes
908 the original assignment true.
909 So the following insn will actually be decrementing fp by
910 STARTING_FRAME_OFFSET. */
911 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
912
913 /* Restoring the frame pointer also modifies the hard frame pointer.
914 Mark it used (so that the previous assignment remains live once
915 the frame pointer is eliminated) and clobbered (to represent the
916 implicit update from the assignment). */
917 emit_use (hard_frame_pointer_rtx);
918 emit_clobber (hard_frame_pointer_rtx);
919 }
920
921 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
922 if (fixed_regs[ARG_POINTER_REGNUM])
923 {
924 #ifdef ELIMINABLE_REGS
925 /* If the argument pointer can be eliminated in favor of the
926 frame pointer, we don't need to restore it. We assume here
927 that if such an elimination is present, it can always be used.
928 This is the case on all known machines; if we don't make this
929 assumption, we do unnecessary saving on many machines. */
930 size_t i;
931 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
932
933 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
934 if (elim_regs[i].from == ARG_POINTER_REGNUM
935 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
936 break;
937
938 if (i == ARRAY_SIZE (elim_regs))
939 #endif
940 {
941 /* Now restore our arg pointer from the address at which it
942 was saved in our stack frame. */
943 emit_move_insn (crtl->args.internal_arg_pointer,
944 copy_to_reg (get_arg_pointer_save_area ()));
945 }
946 }
947 #endif
948
949 #ifdef HAVE_builtin_setjmp_receiver
950 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
951 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
952 else
953 #endif
954 #ifdef HAVE_nonlocal_goto_receiver
955 if (HAVE_nonlocal_goto_receiver)
956 emit_insn (gen_nonlocal_goto_receiver ());
957 else
958 #endif
959 { /* Nothing */ }
960
961 /* We must not allow the code we just generated to be reordered by
962 scheduling. Specifically, the update of the frame pointer must
963 happen immediately, not later. */
964 emit_insn (gen_blockage ());
965 }
966
967 /* __builtin_longjmp is passed a pointer to an array of five words (not
968 all will be used on all machines). It operates similarly to the C
969 library function of the same name, but is more efficient. Much of
970 the code below is copied from the handling of non-local gotos. */
971
972 static void
973 expand_builtin_longjmp (rtx buf_addr, rtx value)
974 {
975 rtx fp, lab, stack;
976 rtx_insn *insn, *last;
977 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
978
979 /* DRAP is needed for stack realign if longjmp is expanded to current
980 function */
981 if (SUPPORTS_STACK_ALIGNMENT)
982 crtl->need_drap = true;
983
984 if (setjmp_alias_set == -1)
985 setjmp_alias_set = new_alias_set ();
986
987 buf_addr = convert_memory_address (Pmode, buf_addr);
988
989 buf_addr = force_reg (Pmode, buf_addr);
990
991 /* We require that the user must pass a second argument of 1, because
992 that is what builtin_setjmp will return. */
993 gcc_assert (value == const1_rtx);
994
995 last = get_last_insn ();
996 #ifdef HAVE_builtin_longjmp
997 if (HAVE_builtin_longjmp)
998 emit_insn (gen_builtin_longjmp (buf_addr));
999 else
1000 #endif
1001 {
1002 fp = gen_rtx_MEM (Pmode, buf_addr);
1003 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1004 GET_MODE_SIZE (Pmode)));
1005
1006 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1007 2 * GET_MODE_SIZE (Pmode)));
1008 set_mem_alias_set (fp, setjmp_alias_set);
1009 set_mem_alias_set (lab, setjmp_alias_set);
1010 set_mem_alias_set (stack, setjmp_alias_set);
1011
1012 /* Pick up FP, label, and SP from the block and jump. This code is
1013 from expand_goto in stmt.c; see there for detailed comments. */
1014 #ifdef HAVE_nonlocal_goto
1015 if (HAVE_nonlocal_goto)
1016 /* We have to pass a value to the nonlocal_goto pattern that will
1017 get copied into the static_chain pointer, but it does not matter
1018 what that value is, because builtin_setjmp does not use it. */
1019 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1020 else
1021 #endif
1022 {
1023 lab = copy_to_reg (lab);
1024
1025 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1026 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1027
1028 emit_move_insn (hard_frame_pointer_rtx, fp);
1029 emit_stack_restore (SAVE_NONLOCAL, stack);
1030
1031 emit_use (hard_frame_pointer_rtx);
1032 emit_use (stack_pointer_rtx);
1033 emit_indirect_jump (lab);
1034 }
1035 }
1036
1037 /* Search backwards and mark the jump insn as a non-local goto.
1038 Note that this precludes the use of __builtin_longjmp to a
1039 __builtin_setjmp target in the same function. However, we've
1040 already cautioned the user that these functions are for
1041 internal exception handling use only. */
1042 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1043 {
1044 gcc_assert (insn != last);
1045
1046 if (JUMP_P (insn))
1047 {
1048 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1049 break;
1050 }
1051 else if (CALL_P (insn))
1052 break;
1053 }
1054 }
1055
1056 static inline bool
1057 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1058 {
1059 return (iter->i < iter->n);
1060 }
1061
1062 /* This function validates the types of a function call argument list
1063 against a specified list of tree_codes. If the last specifier is a 0,
1064 that represents an ellipses, otherwise the last specifier must be a
1065 VOID_TYPE. */
1066
1067 static bool
1068 validate_arglist (const_tree callexpr, ...)
1069 {
1070 enum tree_code code;
1071 bool res = 0;
1072 va_list ap;
1073 const_call_expr_arg_iterator iter;
1074 const_tree arg;
1075
1076 va_start (ap, callexpr);
1077 init_const_call_expr_arg_iterator (callexpr, &iter);
1078
1079 do
1080 {
1081 code = (enum tree_code) va_arg (ap, int);
1082 switch (code)
1083 {
1084 case 0:
1085 /* This signifies an ellipses, any further arguments are all ok. */
1086 res = true;
1087 goto end;
1088 case VOID_TYPE:
1089 /* This signifies an endlink, if no arguments remain, return
1090 true, otherwise return false. */
1091 res = !more_const_call_expr_args_p (&iter);
1092 goto end;
1093 default:
1094 /* If no parameters remain or the parameter's code does not
1095 match the specified code, return false. Otherwise continue
1096 checking any remaining arguments. */
1097 arg = next_const_call_expr_arg (&iter);
1098 if (!validate_arg (arg, code))
1099 goto end;
1100 break;
1101 }
1102 }
1103 while (1);
1104
1105 /* We need gotos here since we can only have one VA_CLOSE in a
1106 function. */
1107 end: ;
1108 va_end (ap);
1109
1110 return res;
1111 }
1112
1113 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1114 and the address of the save area. */
1115
1116 static rtx
1117 expand_builtin_nonlocal_goto (tree exp)
1118 {
1119 tree t_label, t_save_area;
1120 rtx r_label, r_save_area, r_fp, r_sp;
1121 rtx_insn *insn;
1122
1123 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1124 return NULL_RTX;
1125
1126 t_label = CALL_EXPR_ARG (exp, 0);
1127 t_save_area = CALL_EXPR_ARG (exp, 1);
1128
1129 r_label = expand_normal (t_label);
1130 r_label = convert_memory_address (Pmode, r_label);
1131 r_save_area = expand_normal (t_save_area);
1132 r_save_area = convert_memory_address (Pmode, r_save_area);
1133 /* Copy the address of the save location to a register just in case it was
1134 based on the frame pointer. */
1135 r_save_area = copy_to_reg (r_save_area);
1136 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1137 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1138 plus_constant (Pmode, r_save_area,
1139 GET_MODE_SIZE (Pmode)));
1140
1141 crtl->has_nonlocal_goto = 1;
1142
1143 #ifdef HAVE_nonlocal_goto
1144 /* ??? We no longer need to pass the static chain value, afaik. */
1145 if (HAVE_nonlocal_goto)
1146 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1147 else
1148 #endif
1149 {
1150 r_label = copy_to_reg (r_label);
1151
1152 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1153 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1154
1155 /* Restore frame pointer for containing function. */
1156 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1157 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1158
1159 /* USE of hard_frame_pointer_rtx added for consistency;
1160 not clear if really needed. */
1161 emit_use (hard_frame_pointer_rtx);
1162 emit_use (stack_pointer_rtx);
1163
1164 /* If the architecture is using a GP register, we must
1165 conservatively assume that the target function makes use of it.
1166 The prologue of functions with nonlocal gotos must therefore
1167 initialize the GP register to the appropriate value, and we
1168 must then make sure that this value is live at the point
1169 of the jump. (Note that this doesn't necessarily apply
1170 to targets with a nonlocal_goto pattern; they are free
1171 to implement it in their own way. Note also that this is
1172 a no-op if the GP register is a global invariant.) */
1173 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1174 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1175 emit_use (pic_offset_table_rtx);
1176
1177 emit_indirect_jump (r_label);
1178 }
1179
1180 /* Search backwards to the jump insn and mark it as a
1181 non-local goto. */
1182 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1183 {
1184 if (JUMP_P (insn))
1185 {
1186 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1187 break;
1188 }
1189 else if (CALL_P (insn))
1190 break;
1191 }
1192
1193 return const0_rtx;
1194 }
1195
1196 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1197 (not all will be used on all machines) that was passed to __builtin_setjmp.
1198 It updates the stack pointer in that block to correspond to the current
1199 stack pointer. */
1200
1201 static void
1202 expand_builtin_update_setjmp_buf (rtx buf_addr)
1203 {
1204 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1205 rtx stack_save
1206 = gen_rtx_MEM (sa_mode,
1207 memory_address
1208 (sa_mode,
1209 plus_constant (Pmode, buf_addr,
1210 2 * GET_MODE_SIZE (Pmode))));
1211
1212 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1213 }
1214
1215 /* Expand a call to __builtin_prefetch. For a target that does not support
1216 data prefetch, evaluate the memory address argument in case it has side
1217 effects. */
1218
1219 static void
1220 expand_builtin_prefetch (tree exp)
1221 {
1222 tree arg0, arg1, arg2;
1223 int nargs;
1224 rtx op0, op1, op2;
1225
1226 if (!validate_arglist (exp, POINTER_TYPE, 0))
1227 return;
1228
1229 arg0 = CALL_EXPR_ARG (exp, 0);
1230
1231 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1232 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1233 locality). */
1234 nargs = call_expr_nargs (exp);
1235 if (nargs > 1)
1236 arg1 = CALL_EXPR_ARG (exp, 1);
1237 else
1238 arg1 = integer_zero_node;
1239 if (nargs > 2)
1240 arg2 = CALL_EXPR_ARG (exp, 2);
1241 else
1242 arg2 = integer_three_node;
1243
1244 /* Argument 0 is an address. */
1245 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1246
1247 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1248 if (TREE_CODE (arg1) != INTEGER_CST)
1249 {
1250 error ("second argument to %<__builtin_prefetch%> must be a constant");
1251 arg1 = integer_zero_node;
1252 }
1253 op1 = expand_normal (arg1);
1254 /* Argument 1 must be either zero or one. */
1255 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1256 {
1257 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1258 " using zero");
1259 op1 = const0_rtx;
1260 }
1261
1262 /* Argument 2 (locality) must be a compile-time constant int. */
1263 if (TREE_CODE (arg2) != INTEGER_CST)
1264 {
1265 error ("third argument to %<__builtin_prefetch%> must be a constant");
1266 arg2 = integer_zero_node;
1267 }
1268 op2 = expand_normal (arg2);
1269 /* Argument 2 must be 0, 1, 2, or 3. */
1270 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1271 {
1272 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1273 op2 = const0_rtx;
1274 }
1275
1276 #ifdef HAVE_prefetch
1277 if (HAVE_prefetch)
1278 {
1279 struct expand_operand ops[3];
1280
1281 create_address_operand (&ops[0], op0);
1282 create_integer_operand (&ops[1], INTVAL (op1));
1283 create_integer_operand (&ops[2], INTVAL (op2));
1284 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1285 return;
1286 }
1287 #endif
1288
1289 /* Don't do anything with direct references to volatile memory, but
1290 generate code to handle other side effects. */
1291 if (!MEM_P (op0) && side_effects_p (op0))
1292 emit_insn (op0);
1293 }
1294
1295 /* Get a MEM rtx for expression EXP which is the address of an operand
1296 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1297 the maximum length of the block of memory that might be accessed or
1298 NULL if unknown. */
1299
1300 static rtx
1301 get_memory_rtx (tree exp, tree len)
1302 {
1303 tree orig_exp = exp;
1304 rtx addr, mem;
1305
1306 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1307 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1308 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1309 exp = TREE_OPERAND (exp, 0);
1310
1311 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1312 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1313
1314 /* Get an expression we can use to find the attributes to assign to MEM.
1315 First remove any nops. */
1316 while (CONVERT_EXPR_P (exp)
1317 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1318 exp = TREE_OPERAND (exp, 0);
1319
1320 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1321 (as builtin stringops may alias with anything). */
1322 exp = fold_build2 (MEM_REF,
1323 build_array_type (char_type_node,
1324 build_range_type (sizetype,
1325 size_one_node, len)),
1326 exp, build_int_cst (ptr_type_node, 0));
1327
1328 /* If the MEM_REF has no acceptable address, try to get the base object
1329 from the original address we got, and build an all-aliasing
1330 unknown-sized access to that one. */
1331 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1332 set_mem_attributes (mem, exp, 0);
1333 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1334 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1335 0))))
1336 {
1337 exp = build_fold_addr_expr (exp);
1338 exp = fold_build2 (MEM_REF,
1339 build_array_type (char_type_node,
1340 build_range_type (sizetype,
1341 size_zero_node,
1342 NULL)),
1343 exp, build_int_cst (ptr_type_node, 0));
1344 set_mem_attributes (mem, exp, 0);
1345 }
1346 set_mem_alias_set (mem, 0);
1347 return mem;
1348 }
1349 \f
1350 /* Built-in functions to perform an untyped call and return. */
1351
1352 #define apply_args_mode \
1353 (this_target_builtins->x_apply_args_mode)
1354 #define apply_result_mode \
1355 (this_target_builtins->x_apply_result_mode)
1356
1357 /* Return the size required for the block returned by __builtin_apply_args,
1358 and initialize apply_args_mode. */
1359
1360 static int
1361 apply_args_size (void)
1362 {
1363 static int size = -1;
1364 int align;
1365 unsigned int regno;
1366 machine_mode mode;
1367
1368 /* The values computed by this function never change. */
1369 if (size < 0)
1370 {
1371 /* The first value is the incoming arg-pointer. */
1372 size = GET_MODE_SIZE (Pmode);
1373
1374 /* The second value is the structure value address unless this is
1375 passed as an "invisible" first argument. */
1376 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1377 size += GET_MODE_SIZE (Pmode);
1378
1379 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1380 if (FUNCTION_ARG_REGNO_P (regno))
1381 {
1382 mode = targetm.calls.get_raw_arg_mode (regno);
1383
1384 gcc_assert (mode != VOIDmode);
1385
1386 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1387 if (size % align != 0)
1388 size = CEIL (size, align) * align;
1389 size += GET_MODE_SIZE (mode);
1390 apply_args_mode[regno] = mode;
1391 }
1392 else
1393 {
1394 apply_args_mode[regno] = VOIDmode;
1395 }
1396 }
1397 return size;
1398 }
1399
1400 /* Return the size required for the block returned by __builtin_apply,
1401 and initialize apply_result_mode. */
1402
1403 static int
1404 apply_result_size (void)
1405 {
1406 static int size = -1;
1407 int align, regno;
1408 machine_mode mode;
1409
1410 /* The values computed by this function never change. */
1411 if (size < 0)
1412 {
1413 size = 0;
1414
1415 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1416 if (targetm.calls.function_value_regno_p (regno))
1417 {
1418 mode = targetm.calls.get_raw_result_mode (regno);
1419
1420 gcc_assert (mode != VOIDmode);
1421
1422 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1423 if (size % align != 0)
1424 size = CEIL (size, align) * align;
1425 size += GET_MODE_SIZE (mode);
1426 apply_result_mode[regno] = mode;
1427 }
1428 else
1429 apply_result_mode[regno] = VOIDmode;
1430
1431 /* Allow targets that use untyped_call and untyped_return to override
1432 the size so that machine-specific information can be stored here. */
1433 #ifdef APPLY_RESULT_SIZE
1434 size = APPLY_RESULT_SIZE;
1435 #endif
1436 }
1437 return size;
1438 }
1439
1440 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1441 /* Create a vector describing the result block RESULT. If SAVEP is true,
1442 the result block is used to save the values; otherwise it is used to
1443 restore the values. */
1444
1445 static rtx
1446 result_vector (int savep, rtx result)
1447 {
1448 int regno, size, align, nelts;
1449 machine_mode mode;
1450 rtx reg, mem;
1451 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1452
1453 size = nelts = 0;
1454 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1455 if ((mode = apply_result_mode[regno]) != VOIDmode)
1456 {
1457 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1458 if (size % align != 0)
1459 size = CEIL (size, align) * align;
1460 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1461 mem = adjust_address (result, mode, size);
1462 savevec[nelts++] = (savep
1463 ? gen_rtx_SET (VOIDmode, mem, reg)
1464 : gen_rtx_SET (VOIDmode, reg, mem));
1465 size += GET_MODE_SIZE (mode);
1466 }
1467 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1468 }
1469 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1470
1471 /* Save the state required to perform an untyped call with the same
1472 arguments as were passed to the current function. */
1473
1474 static rtx
1475 expand_builtin_apply_args_1 (void)
1476 {
1477 rtx registers, tem;
1478 int size, align, regno;
1479 machine_mode mode;
1480 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1481
1482 /* Create a block where the arg-pointer, structure value address,
1483 and argument registers can be saved. */
1484 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1485
1486 /* Walk past the arg-pointer and structure value address. */
1487 size = GET_MODE_SIZE (Pmode);
1488 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1489 size += GET_MODE_SIZE (Pmode);
1490
1491 /* Save each register used in calling a function to the block. */
1492 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1493 if ((mode = apply_args_mode[regno]) != VOIDmode)
1494 {
1495 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1496 if (size % align != 0)
1497 size = CEIL (size, align) * align;
1498
1499 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1500
1501 emit_move_insn (adjust_address (registers, mode, size), tem);
1502 size += GET_MODE_SIZE (mode);
1503 }
1504
1505 /* Save the arg pointer to the block. */
1506 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1507 #ifdef STACK_GROWS_DOWNWARD
1508 /* We need the pointer as the caller actually passed them to us, not
1509 as we might have pretended they were passed. Make sure it's a valid
1510 operand, as emit_move_insn isn't expected to handle a PLUS. */
1511 tem
1512 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1513 NULL_RTX);
1514 #endif
1515 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1516
1517 size = GET_MODE_SIZE (Pmode);
1518
1519 /* Save the structure value address unless this is passed as an
1520 "invisible" first argument. */
1521 if (struct_incoming_value)
1522 {
1523 emit_move_insn (adjust_address (registers, Pmode, size),
1524 copy_to_reg (struct_incoming_value));
1525 size += GET_MODE_SIZE (Pmode);
1526 }
1527
1528 /* Return the address of the block. */
1529 return copy_addr_to_reg (XEXP (registers, 0));
1530 }
1531
1532 /* __builtin_apply_args returns block of memory allocated on
1533 the stack into which is stored the arg pointer, structure
1534 value address, static chain, and all the registers that might
1535 possibly be used in performing a function call. The code is
1536 moved to the start of the function so the incoming values are
1537 saved. */
1538
1539 static rtx
1540 expand_builtin_apply_args (void)
1541 {
1542 /* Don't do __builtin_apply_args more than once in a function.
1543 Save the result of the first call and reuse it. */
1544 if (apply_args_value != 0)
1545 return apply_args_value;
1546 {
1547 /* When this function is called, it means that registers must be
1548 saved on entry to this function. So we migrate the
1549 call to the first insn of this function. */
1550 rtx temp;
1551 rtx seq;
1552
1553 start_sequence ();
1554 temp = expand_builtin_apply_args_1 ();
1555 seq = get_insns ();
1556 end_sequence ();
1557
1558 apply_args_value = temp;
1559
1560 /* Put the insns after the NOTE that starts the function.
1561 If this is inside a start_sequence, make the outer-level insn
1562 chain current, so the code is placed at the start of the
1563 function. If internal_arg_pointer is a non-virtual pseudo,
1564 it needs to be placed after the function that initializes
1565 that pseudo. */
1566 push_topmost_sequence ();
1567 if (REG_P (crtl->args.internal_arg_pointer)
1568 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1569 emit_insn_before (seq, parm_birth_insn);
1570 else
1571 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1572 pop_topmost_sequence ();
1573 return temp;
1574 }
1575 }
1576
1577 /* Perform an untyped call and save the state required to perform an
1578 untyped return of whatever value was returned by the given function. */
1579
1580 static rtx
1581 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1582 {
1583 int size, align, regno;
1584 machine_mode mode;
1585 rtx incoming_args, result, reg, dest, src;
1586 rtx_call_insn *call_insn;
1587 rtx old_stack_level = 0;
1588 rtx call_fusage = 0;
1589 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1590
1591 arguments = convert_memory_address (Pmode, arguments);
1592
1593 /* Create a block where the return registers can be saved. */
1594 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1595
1596 /* Fetch the arg pointer from the ARGUMENTS block. */
1597 incoming_args = gen_reg_rtx (Pmode);
1598 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1599 #ifndef STACK_GROWS_DOWNWARD
1600 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1601 incoming_args, 0, OPTAB_LIB_WIDEN);
1602 #endif
1603
1604 /* Push a new argument block and copy the arguments. Do not allow
1605 the (potential) memcpy call below to interfere with our stack
1606 manipulations. */
1607 do_pending_stack_adjust ();
1608 NO_DEFER_POP;
1609
1610 /* Save the stack with nonlocal if available. */
1611 #ifdef HAVE_save_stack_nonlocal
1612 if (HAVE_save_stack_nonlocal)
1613 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1614 else
1615 #endif
1616 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1617
1618 /* Allocate a block of memory onto the stack and copy the memory
1619 arguments to the outgoing arguments address. We can pass TRUE
1620 as the 4th argument because we just saved the stack pointer
1621 and will restore it right after the call. */
1622 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1623
1624 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1625 may have already set current_function_calls_alloca to true.
1626 current_function_calls_alloca won't be set if argsize is zero,
1627 so we have to guarantee need_drap is true here. */
1628 if (SUPPORTS_STACK_ALIGNMENT)
1629 crtl->need_drap = true;
1630
1631 dest = virtual_outgoing_args_rtx;
1632 #ifndef STACK_GROWS_DOWNWARD
1633 if (CONST_INT_P (argsize))
1634 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1635 else
1636 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1637 #endif
1638 dest = gen_rtx_MEM (BLKmode, dest);
1639 set_mem_align (dest, PARM_BOUNDARY);
1640 src = gen_rtx_MEM (BLKmode, incoming_args);
1641 set_mem_align (src, PARM_BOUNDARY);
1642 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1643
1644 /* Refer to the argument block. */
1645 apply_args_size ();
1646 arguments = gen_rtx_MEM (BLKmode, arguments);
1647 set_mem_align (arguments, PARM_BOUNDARY);
1648
1649 /* Walk past the arg-pointer and structure value address. */
1650 size = GET_MODE_SIZE (Pmode);
1651 if (struct_value)
1652 size += GET_MODE_SIZE (Pmode);
1653
1654 /* Restore each of the registers previously saved. Make USE insns
1655 for each of these registers for use in making the call. */
1656 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1657 if ((mode = apply_args_mode[regno]) != VOIDmode)
1658 {
1659 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1660 if (size % align != 0)
1661 size = CEIL (size, align) * align;
1662 reg = gen_rtx_REG (mode, regno);
1663 emit_move_insn (reg, adjust_address (arguments, mode, size));
1664 use_reg (&call_fusage, reg);
1665 size += GET_MODE_SIZE (mode);
1666 }
1667
1668 /* Restore the structure value address unless this is passed as an
1669 "invisible" first argument. */
1670 size = GET_MODE_SIZE (Pmode);
1671 if (struct_value)
1672 {
1673 rtx value = gen_reg_rtx (Pmode);
1674 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1675 emit_move_insn (struct_value, value);
1676 if (REG_P (struct_value))
1677 use_reg (&call_fusage, struct_value);
1678 size += GET_MODE_SIZE (Pmode);
1679 }
1680
1681 /* All arguments and registers used for the call are set up by now! */
1682 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1683
1684 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1685 and we don't want to load it into a register as an optimization,
1686 because prepare_call_address already did it if it should be done. */
1687 if (GET_CODE (function) != SYMBOL_REF)
1688 function = memory_address (FUNCTION_MODE, function);
1689
1690 /* Generate the actual call instruction and save the return value. */
1691 #ifdef HAVE_untyped_call
1692 if (HAVE_untyped_call)
1693 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1694 result, result_vector (1, result)));
1695 else
1696 #endif
1697 #ifdef HAVE_call_value
1698 if (HAVE_call_value)
1699 {
1700 rtx valreg = 0;
1701
1702 /* Locate the unique return register. It is not possible to
1703 express a call that sets more than one return register using
1704 call_value; use untyped_call for that. In fact, untyped_call
1705 only needs to save the return registers in the given block. */
1706 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1707 if ((mode = apply_result_mode[regno]) != VOIDmode)
1708 {
1709 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1710
1711 valreg = gen_rtx_REG (mode, regno);
1712 }
1713
1714 emit_call_insn (GEN_CALL_VALUE (valreg,
1715 gen_rtx_MEM (FUNCTION_MODE, function),
1716 const0_rtx, NULL_RTX, const0_rtx));
1717
1718 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1719 }
1720 else
1721 #endif
1722 gcc_unreachable ();
1723
1724 /* Find the CALL insn we just emitted, and attach the register usage
1725 information. */
1726 call_insn = last_call_insn ();
1727 add_function_usage_to (call_insn, call_fusage);
1728
1729 /* Restore the stack. */
1730 #ifdef HAVE_save_stack_nonlocal
1731 if (HAVE_save_stack_nonlocal)
1732 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1733 else
1734 #endif
1735 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1736 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1737
1738 OK_DEFER_POP;
1739
1740 /* Return the address of the result block. */
1741 result = copy_addr_to_reg (XEXP (result, 0));
1742 return convert_memory_address (ptr_mode, result);
1743 }
1744
1745 /* Perform an untyped return. */
1746
1747 static void
1748 expand_builtin_return (rtx result)
1749 {
1750 int size, align, regno;
1751 machine_mode mode;
1752 rtx reg;
1753 rtx_insn *call_fusage = 0;
1754
1755 result = convert_memory_address (Pmode, result);
1756
1757 apply_result_size ();
1758 result = gen_rtx_MEM (BLKmode, result);
1759
1760 #ifdef HAVE_untyped_return
1761 if (HAVE_untyped_return)
1762 {
1763 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1764 emit_barrier ();
1765 return;
1766 }
1767 #endif
1768
1769 /* Restore the return value and note that each value is used. */
1770 size = 0;
1771 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1772 if ((mode = apply_result_mode[regno]) != VOIDmode)
1773 {
1774 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1775 if (size % align != 0)
1776 size = CEIL (size, align) * align;
1777 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1778 emit_move_insn (reg, adjust_address (result, mode, size));
1779
1780 push_to_sequence (call_fusage);
1781 emit_use (reg);
1782 call_fusage = get_insns ();
1783 end_sequence ();
1784 size += GET_MODE_SIZE (mode);
1785 }
1786
1787 /* Put the USE insns before the return. */
1788 emit_insn (call_fusage);
1789
1790 /* Return whatever values was restored by jumping directly to the end
1791 of the function. */
1792 expand_naked_return ();
1793 }
1794
1795 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1796
1797 static enum type_class
1798 type_to_class (tree type)
1799 {
1800 switch (TREE_CODE (type))
1801 {
1802 case VOID_TYPE: return void_type_class;
1803 case INTEGER_TYPE: return integer_type_class;
1804 case ENUMERAL_TYPE: return enumeral_type_class;
1805 case BOOLEAN_TYPE: return boolean_type_class;
1806 case POINTER_TYPE: return pointer_type_class;
1807 case REFERENCE_TYPE: return reference_type_class;
1808 case OFFSET_TYPE: return offset_type_class;
1809 case REAL_TYPE: return real_type_class;
1810 case COMPLEX_TYPE: return complex_type_class;
1811 case FUNCTION_TYPE: return function_type_class;
1812 case METHOD_TYPE: return method_type_class;
1813 case RECORD_TYPE: return record_type_class;
1814 case UNION_TYPE:
1815 case QUAL_UNION_TYPE: return union_type_class;
1816 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1817 ? string_type_class : array_type_class);
1818 case LANG_TYPE: return lang_type_class;
1819 default: return no_type_class;
1820 }
1821 }
1822
1823 /* Expand a call EXP to __builtin_classify_type. */
1824
1825 static rtx
1826 expand_builtin_classify_type (tree exp)
1827 {
1828 if (call_expr_nargs (exp))
1829 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1830 return GEN_INT (no_type_class);
1831 }
1832
1833 /* This helper macro, meant to be used in mathfn_built_in below,
1834 determines which among a set of three builtin math functions is
1835 appropriate for a given type mode. The `F' and `L' cases are
1836 automatically generated from the `double' case. */
1837 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1838 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1839 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1840 fcodel = BUILT_IN_MATHFN##L ; break;
1841 /* Similar to above, but appends _R after any F/L suffix. */
1842 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1843 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1844 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1845 fcodel = BUILT_IN_MATHFN##L_R ; break;
1846
1847 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1848 if available. If IMPLICIT is true use the implicit builtin declaration,
1849 otherwise use the explicit declaration. If we can't do the conversion,
1850 return zero. */
1851
1852 static tree
1853 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1854 {
1855 enum built_in_function fcode, fcodef, fcodel, fcode2;
1856
1857 switch (fn)
1858 {
1859 CASE_MATHFN (BUILT_IN_ACOS)
1860 CASE_MATHFN (BUILT_IN_ACOSH)
1861 CASE_MATHFN (BUILT_IN_ASIN)
1862 CASE_MATHFN (BUILT_IN_ASINH)
1863 CASE_MATHFN (BUILT_IN_ATAN)
1864 CASE_MATHFN (BUILT_IN_ATAN2)
1865 CASE_MATHFN (BUILT_IN_ATANH)
1866 CASE_MATHFN (BUILT_IN_CBRT)
1867 CASE_MATHFN (BUILT_IN_CEIL)
1868 CASE_MATHFN (BUILT_IN_CEXPI)
1869 CASE_MATHFN (BUILT_IN_COPYSIGN)
1870 CASE_MATHFN (BUILT_IN_COS)
1871 CASE_MATHFN (BUILT_IN_COSH)
1872 CASE_MATHFN (BUILT_IN_DREM)
1873 CASE_MATHFN (BUILT_IN_ERF)
1874 CASE_MATHFN (BUILT_IN_ERFC)
1875 CASE_MATHFN (BUILT_IN_EXP)
1876 CASE_MATHFN (BUILT_IN_EXP10)
1877 CASE_MATHFN (BUILT_IN_EXP2)
1878 CASE_MATHFN (BUILT_IN_EXPM1)
1879 CASE_MATHFN (BUILT_IN_FABS)
1880 CASE_MATHFN (BUILT_IN_FDIM)
1881 CASE_MATHFN (BUILT_IN_FLOOR)
1882 CASE_MATHFN (BUILT_IN_FMA)
1883 CASE_MATHFN (BUILT_IN_FMAX)
1884 CASE_MATHFN (BUILT_IN_FMIN)
1885 CASE_MATHFN (BUILT_IN_FMOD)
1886 CASE_MATHFN (BUILT_IN_FREXP)
1887 CASE_MATHFN (BUILT_IN_GAMMA)
1888 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1889 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1890 CASE_MATHFN (BUILT_IN_HYPOT)
1891 CASE_MATHFN (BUILT_IN_ILOGB)
1892 CASE_MATHFN (BUILT_IN_ICEIL)
1893 CASE_MATHFN (BUILT_IN_IFLOOR)
1894 CASE_MATHFN (BUILT_IN_INF)
1895 CASE_MATHFN (BUILT_IN_IRINT)
1896 CASE_MATHFN (BUILT_IN_IROUND)
1897 CASE_MATHFN (BUILT_IN_ISINF)
1898 CASE_MATHFN (BUILT_IN_J0)
1899 CASE_MATHFN (BUILT_IN_J1)
1900 CASE_MATHFN (BUILT_IN_JN)
1901 CASE_MATHFN (BUILT_IN_LCEIL)
1902 CASE_MATHFN (BUILT_IN_LDEXP)
1903 CASE_MATHFN (BUILT_IN_LFLOOR)
1904 CASE_MATHFN (BUILT_IN_LGAMMA)
1905 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1906 CASE_MATHFN (BUILT_IN_LLCEIL)
1907 CASE_MATHFN (BUILT_IN_LLFLOOR)
1908 CASE_MATHFN (BUILT_IN_LLRINT)
1909 CASE_MATHFN (BUILT_IN_LLROUND)
1910 CASE_MATHFN (BUILT_IN_LOG)
1911 CASE_MATHFN (BUILT_IN_LOG10)
1912 CASE_MATHFN (BUILT_IN_LOG1P)
1913 CASE_MATHFN (BUILT_IN_LOG2)
1914 CASE_MATHFN (BUILT_IN_LOGB)
1915 CASE_MATHFN (BUILT_IN_LRINT)
1916 CASE_MATHFN (BUILT_IN_LROUND)
1917 CASE_MATHFN (BUILT_IN_MODF)
1918 CASE_MATHFN (BUILT_IN_NAN)
1919 CASE_MATHFN (BUILT_IN_NANS)
1920 CASE_MATHFN (BUILT_IN_NEARBYINT)
1921 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1922 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1923 CASE_MATHFN (BUILT_IN_POW)
1924 CASE_MATHFN (BUILT_IN_POWI)
1925 CASE_MATHFN (BUILT_IN_POW10)
1926 CASE_MATHFN (BUILT_IN_REMAINDER)
1927 CASE_MATHFN (BUILT_IN_REMQUO)
1928 CASE_MATHFN (BUILT_IN_RINT)
1929 CASE_MATHFN (BUILT_IN_ROUND)
1930 CASE_MATHFN (BUILT_IN_SCALB)
1931 CASE_MATHFN (BUILT_IN_SCALBLN)
1932 CASE_MATHFN (BUILT_IN_SCALBN)
1933 CASE_MATHFN (BUILT_IN_SIGNBIT)
1934 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1935 CASE_MATHFN (BUILT_IN_SIN)
1936 CASE_MATHFN (BUILT_IN_SINCOS)
1937 CASE_MATHFN (BUILT_IN_SINH)
1938 CASE_MATHFN (BUILT_IN_SQRT)
1939 CASE_MATHFN (BUILT_IN_TAN)
1940 CASE_MATHFN (BUILT_IN_TANH)
1941 CASE_MATHFN (BUILT_IN_TGAMMA)
1942 CASE_MATHFN (BUILT_IN_TRUNC)
1943 CASE_MATHFN (BUILT_IN_Y0)
1944 CASE_MATHFN (BUILT_IN_Y1)
1945 CASE_MATHFN (BUILT_IN_YN)
1946
1947 default:
1948 return NULL_TREE;
1949 }
1950
1951 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1952 fcode2 = fcode;
1953 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1954 fcode2 = fcodef;
1955 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1956 fcode2 = fcodel;
1957 else
1958 return NULL_TREE;
1959
1960 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1961 return NULL_TREE;
1962
1963 return builtin_decl_explicit (fcode2);
1964 }
1965
1966 /* Like mathfn_built_in_1(), but always use the implicit array. */
1967
1968 tree
1969 mathfn_built_in (tree type, enum built_in_function fn)
1970 {
1971 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1972 }
1973
1974 /* If errno must be maintained, expand the RTL to check if the result,
1975 TARGET, of a built-in function call, EXP, is NaN, and if so set
1976 errno to EDOM. */
1977
1978 static void
1979 expand_errno_check (tree exp, rtx target)
1980 {
1981 rtx_code_label *lab = gen_label_rtx ();
1982
1983 /* Test the result; if it is NaN, set errno=EDOM because
1984 the argument was not in the domain. */
1985 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1986 NULL_RTX, NULL_RTX, lab,
1987 /* The jump is very likely. */
1988 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1989
1990 #ifdef TARGET_EDOM
1991 /* If this built-in doesn't throw an exception, set errno directly. */
1992 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1993 {
1994 #ifdef GEN_ERRNO_RTX
1995 rtx errno_rtx = GEN_ERRNO_RTX;
1996 #else
1997 rtx errno_rtx
1998 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1999 #endif
2000 emit_move_insn (errno_rtx,
2001 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
2002 emit_label (lab);
2003 return;
2004 }
2005 #endif
2006
2007 /* Make sure the library call isn't expanded as a tail call. */
2008 CALL_EXPR_TAILCALL (exp) = 0;
2009
2010 /* We can't set errno=EDOM directly; let the library call do it.
2011 Pop the arguments right away in case the call gets deleted. */
2012 NO_DEFER_POP;
2013 expand_call (exp, target, 0);
2014 OK_DEFER_POP;
2015 emit_label (lab);
2016 }
2017
2018 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2019 Return NULL_RTX if a normal call should be emitted rather than expanding
2020 the function in-line. EXP is the expression that is a call to the builtin
2021 function; if convenient, the result should be placed in TARGET.
2022 SUBTARGET may be used as the target for computing one of EXP's operands. */
2023
2024 static rtx
2025 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2026 {
2027 optab builtin_optab;
2028 rtx op0;
2029 rtx_insn *insns;
2030 tree fndecl = get_callee_fndecl (exp);
2031 machine_mode mode;
2032 bool errno_set = false;
2033 bool try_widening = false;
2034 tree arg;
2035
2036 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2037 return NULL_RTX;
2038
2039 arg = CALL_EXPR_ARG (exp, 0);
2040
2041 switch (DECL_FUNCTION_CODE (fndecl))
2042 {
2043 CASE_FLT_FN (BUILT_IN_SQRT):
2044 errno_set = ! tree_expr_nonnegative_p (arg);
2045 try_widening = true;
2046 builtin_optab = sqrt_optab;
2047 break;
2048 CASE_FLT_FN (BUILT_IN_EXP):
2049 errno_set = true; builtin_optab = exp_optab; break;
2050 CASE_FLT_FN (BUILT_IN_EXP10):
2051 CASE_FLT_FN (BUILT_IN_POW10):
2052 errno_set = true; builtin_optab = exp10_optab; break;
2053 CASE_FLT_FN (BUILT_IN_EXP2):
2054 errno_set = true; builtin_optab = exp2_optab; break;
2055 CASE_FLT_FN (BUILT_IN_EXPM1):
2056 errno_set = true; builtin_optab = expm1_optab; break;
2057 CASE_FLT_FN (BUILT_IN_LOGB):
2058 errno_set = true; builtin_optab = logb_optab; break;
2059 CASE_FLT_FN (BUILT_IN_LOG):
2060 errno_set = true; builtin_optab = log_optab; break;
2061 CASE_FLT_FN (BUILT_IN_LOG10):
2062 errno_set = true; builtin_optab = log10_optab; break;
2063 CASE_FLT_FN (BUILT_IN_LOG2):
2064 errno_set = true; builtin_optab = log2_optab; break;
2065 CASE_FLT_FN (BUILT_IN_LOG1P):
2066 errno_set = true; builtin_optab = log1p_optab; break;
2067 CASE_FLT_FN (BUILT_IN_ASIN):
2068 builtin_optab = asin_optab; break;
2069 CASE_FLT_FN (BUILT_IN_ACOS):
2070 builtin_optab = acos_optab; break;
2071 CASE_FLT_FN (BUILT_IN_TAN):
2072 builtin_optab = tan_optab; break;
2073 CASE_FLT_FN (BUILT_IN_ATAN):
2074 builtin_optab = atan_optab; break;
2075 CASE_FLT_FN (BUILT_IN_FLOOR):
2076 builtin_optab = floor_optab; break;
2077 CASE_FLT_FN (BUILT_IN_CEIL):
2078 builtin_optab = ceil_optab; break;
2079 CASE_FLT_FN (BUILT_IN_TRUNC):
2080 builtin_optab = btrunc_optab; break;
2081 CASE_FLT_FN (BUILT_IN_ROUND):
2082 builtin_optab = round_optab; break;
2083 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2084 builtin_optab = nearbyint_optab;
2085 if (flag_trapping_math)
2086 break;
2087 /* Else fallthrough and expand as rint. */
2088 CASE_FLT_FN (BUILT_IN_RINT):
2089 builtin_optab = rint_optab; break;
2090 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2091 builtin_optab = significand_optab; break;
2092 default:
2093 gcc_unreachable ();
2094 }
2095
2096 /* Make a suitable register to place result in. */
2097 mode = TYPE_MODE (TREE_TYPE (exp));
2098
2099 if (! flag_errno_math || ! HONOR_NANS (mode))
2100 errno_set = false;
2101
2102 /* Before working hard, check whether the instruction is available, but try
2103 to widen the mode for specific operations. */
2104 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2105 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2106 && (!errno_set || !optimize_insn_for_size_p ()))
2107 {
2108 rtx result = gen_reg_rtx (mode);
2109
2110 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2111 need to expand the argument again. This way, we will not perform
2112 side-effects more the once. */
2113 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2114
2115 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2116
2117 start_sequence ();
2118
2119 /* Compute into RESULT.
2120 Set RESULT to wherever the result comes back. */
2121 result = expand_unop (mode, builtin_optab, op0, result, 0);
2122
2123 if (result != 0)
2124 {
2125 if (errno_set)
2126 expand_errno_check (exp, result);
2127
2128 /* Output the entire sequence. */
2129 insns = get_insns ();
2130 end_sequence ();
2131 emit_insn (insns);
2132 return result;
2133 }
2134
2135 /* If we were unable to expand via the builtin, stop the sequence
2136 (without outputting the insns) and call to the library function
2137 with the stabilized argument list. */
2138 end_sequence ();
2139 }
2140
2141 return expand_call (exp, target, target == const0_rtx);
2142 }
2143
2144 /* Expand a call to the builtin binary math functions (pow and atan2).
2145 Return NULL_RTX if a normal call should be emitted rather than expanding the
2146 function in-line. EXP is the expression that is a call to the builtin
2147 function; if convenient, the result should be placed in TARGET.
2148 SUBTARGET may be used as the target for computing one of EXP's
2149 operands. */
2150
2151 static rtx
2152 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2153 {
2154 optab builtin_optab;
2155 rtx op0, op1, result;
2156 rtx_insn *insns;
2157 int op1_type = REAL_TYPE;
2158 tree fndecl = get_callee_fndecl (exp);
2159 tree arg0, arg1;
2160 machine_mode mode;
2161 bool errno_set = true;
2162
2163 switch (DECL_FUNCTION_CODE (fndecl))
2164 {
2165 CASE_FLT_FN (BUILT_IN_SCALBN):
2166 CASE_FLT_FN (BUILT_IN_SCALBLN):
2167 CASE_FLT_FN (BUILT_IN_LDEXP):
2168 op1_type = INTEGER_TYPE;
2169 default:
2170 break;
2171 }
2172
2173 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2174 return NULL_RTX;
2175
2176 arg0 = CALL_EXPR_ARG (exp, 0);
2177 arg1 = CALL_EXPR_ARG (exp, 1);
2178
2179 switch (DECL_FUNCTION_CODE (fndecl))
2180 {
2181 CASE_FLT_FN (BUILT_IN_POW):
2182 builtin_optab = pow_optab; break;
2183 CASE_FLT_FN (BUILT_IN_ATAN2):
2184 builtin_optab = atan2_optab; break;
2185 CASE_FLT_FN (BUILT_IN_SCALB):
2186 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2187 return 0;
2188 builtin_optab = scalb_optab; break;
2189 CASE_FLT_FN (BUILT_IN_SCALBN):
2190 CASE_FLT_FN (BUILT_IN_SCALBLN):
2191 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2192 return 0;
2193 /* Fall through... */
2194 CASE_FLT_FN (BUILT_IN_LDEXP):
2195 builtin_optab = ldexp_optab; break;
2196 CASE_FLT_FN (BUILT_IN_FMOD):
2197 builtin_optab = fmod_optab; break;
2198 CASE_FLT_FN (BUILT_IN_REMAINDER):
2199 CASE_FLT_FN (BUILT_IN_DREM):
2200 builtin_optab = remainder_optab; break;
2201 default:
2202 gcc_unreachable ();
2203 }
2204
2205 /* Make a suitable register to place result in. */
2206 mode = TYPE_MODE (TREE_TYPE (exp));
2207
2208 /* Before working hard, check whether the instruction is available. */
2209 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2210 return NULL_RTX;
2211
2212 result = gen_reg_rtx (mode);
2213
2214 if (! flag_errno_math || ! HONOR_NANS (mode))
2215 errno_set = false;
2216
2217 if (errno_set && optimize_insn_for_size_p ())
2218 return 0;
2219
2220 /* Always stabilize the argument list. */
2221 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2222 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2223
2224 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2225 op1 = expand_normal (arg1);
2226
2227 start_sequence ();
2228
2229 /* Compute into RESULT.
2230 Set RESULT to wherever the result comes back. */
2231 result = expand_binop (mode, builtin_optab, op0, op1,
2232 result, 0, OPTAB_DIRECT);
2233
2234 /* If we were unable to expand via the builtin, stop the sequence
2235 (without outputting the insns) and call to the library function
2236 with the stabilized argument list. */
2237 if (result == 0)
2238 {
2239 end_sequence ();
2240 return expand_call (exp, target, target == const0_rtx);
2241 }
2242
2243 if (errno_set)
2244 expand_errno_check (exp, result);
2245
2246 /* Output the entire sequence. */
2247 insns = get_insns ();
2248 end_sequence ();
2249 emit_insn (insns);
2250
2251 return result;
2252 }
2253
2254 /* Expand a call to the builtin trinary math functions (fma).
2255 Return NULL_RTX if a normal call should be emitted rather than expanding the
2256 function in-line. EXP is the expression that is a call to the builtin
2257 function; if convenient, the result should be placed in TARGET.
2258 SUBTARGET may be used as the target for computing one of EXP's
2259 operands. */
2260
2261 static rtx
2262 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2263 {
2264 optab builtin_optab;
2265 rtx op0, op1, op2, result;
2266 rtx_insn *insns;
2267 tree fndecl = get_callee_fndecl (exp);
2268 tree arg0, arg1, arg2;
2269 machine_mode mode;
2270
2271 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2272 return NULL_RTX;
2273
2274 arg0 = CALL_EXPR_ARG (exp, 0);
2275 arg1 = CALL_EXPR_ARG (exp, 1);
2276 arg2 = CALL_EXPR_ARG (exp, 2);
2277
2278 switch (DECL_FUNCTION_CODE (fndecl))
2279 {
2280 CASE_FLT_FN (BUILT_IN_FMA):
2281 builtin_optab = fma_optab; break;
2282 default:
2283 gcc_unreachable ();
2284 }
2285
2286 /* Make a suitable register to place result in. */
2287 mode = TYPE_MODE (TREE_TYPE (exp));
2288
2289 /* Before working hard, check whether the instruction is available. */
2290 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2291 return NULL_RTX;
2292
2293 result = gen_reg_rtx (mode);
2294
2295 /* Always stabilize the argument list. */
2296 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2297 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2298 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2299
2300 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2301 op1 = expand_normal (arg1);
2302 op2 = expand_normal (arg2);
2303
2304 start_sequence ();
2305
2306 /* Compute into RESULT.
2307 Set RESULT to wherever the result comes back. */
2308 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2309 result, 0);
2310
2311 /* If we were unable to expand via the builtin, stop the sequence
2312 (without outputting the insns) and call to the library function
2313 with the stabilized argument list. */
2314 if (result == 0)
2315 {
2316 end_sequence ();
2317 return expand_call (exp, target, target == const0_rtx);
2318 }
2319
2320 /* Output the entire sequence. */
2321 insns = get_insns ();
2322 end_sequence ();
2323 emit_insn (insns);
2324
2325 return result;
2326 }
2327
2328 /* Expand a call to the builtin sin and cos math functions.
2329 Return NULL_RTX if a normal call should be emitted rather than expanding the
2330 function in-line. EXP is the expression that is a call to the builtin
2331 function; if convenient, the result should be placed in TARGET.
2332 SUBTARGET may be used as the target for computing one of EXP's
2333 operands. */
2334
2335 static rtx
2336 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2337 {
2338 optab builtin_optab;
2339 rtx op0;
2340 rtx_insn *insns;
2341 tree fndecl = get_callee_fndecl (exp);
2342 machine_mode mode;
2343 tree arg;
2344
2345 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2346 return NULL_RTX;
2347
2348 arg = CALL_EXPR_ARG (exp, 0);
2349
2350 switch (DECL_FUNCTION_CODE (fndecl))
2351 {
2352 CASE_FLT_FN (BUILT_IN_SIN):
2353 CASE_FLT_FN (BUILT_IN_COS):
2354 builtin_optab = sincos_optab; break;
2355 default:
2356 gcc_unreachable ();
2357 }
2358
2359 /* Make a suitable register to place result in. */
2360 mode = TYPE_MODE (TREE_TYPE (exp));
2361
2362 /* Check if sincos insn is available, otherwise fallback
2363 to sin or cos insn. */
2364 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2365 switch (DECL_FUNCTION_CODE (fndecl))
2366 {
2367 CASE_FLT_FN (BUILT_IN_SIN):
2368 builtin_optab = sin_optab; break;
2369 CASE_FLT_FN (BUILT_IN_COS):
2370 builtin_optab = cos_optab; break;
2371 default:
2372 gcc_unreachable ();
2373 }
2374
2375 /* Before working hard, check whether the instruction is available. */
2376 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2377 {
2378 rtx result = gen_reg_rtx (mode);
2379
2380 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2381 need to expand the argument again. This way, we will not perform
2382 side-effects more the once. */
2383 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2384
2385 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2386
2387 start_sequence ();
2388
2389 /* Compute into RESULT.
2390 Set RESULT to wherever the result comes back. */
2391 if (builtin_optab == sincos_optab)
2392 {
2393 int ok;
2394
2395 switch (DECL_FUNCTION_CODE (fndecl))
2396 {
2397 CASE_FLT_FN (BUILT_IN_SIN):
2398 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2399 break;
2400 CASE_FLT_FN (BUILT_IN_COS):
2401 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2402 break;
2403 default:
2404 gcc_unreachable ();
2405 }
2406 gcc_assert (ok);
2407 }
2408 else
2409 result = expand_unop (mode, builtin_optab, op0, result, 0);
2410
2411 if (result != 0)
2412 {
2413 /* Output the entire sequence. */
2414 insns = get_insns ();
2415 end_sequence ();
2416 emit_insn (insns);
2417 return result;
2418 }
2419
2420 /* If we were unable to expand via the builtin, stop the sequence
2421 (without outputting the insns) and call to the library function
2422 with the stabilized argument list. */
2423 end_sequence ();
2424 }
2425
2426 return expand_call (exp, target, target == const0_rtx);
2427 }
2428
2429 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2430 return an RTL instruction code that implements the functionality.
2431 If that isn't possible or available return CODE_FOR_nothing. */
2432
2433 static enum insn_code
2434 interclass_mathfn_icode (tree arg, tree fndecl)
2435 {
2436 bool errno_set = false;
2437 optab builtin_optab = unknown_optab;
2438 machine_mode mode;
2439
2440 switch (DECL_FUNCTION_CODE (fndecl))
2441 {
2442 CASE_FLT_FN (BUILT_IN_ILOGB):
2443 errno_set = true; builtin_optab = ilogb_optab; break;
2444 CASE_FLT_FN (BUILT_IN_ISINF):
2445 builtin_optab = isinf_optab; break;
2446 case BUILT_IN_ISNORMAL:
2447 case BUILT_IN_ISFINITE:
2448 CASE_FLT_FN (BUILT_IN_FINITE):
2449 case BUILT_IN_FINITED32:
2450 case BUILT_IN_FINITED64:
2451 case BUILT_IN_FINITED128:
2452 case BUILT_IN_ISINFD32:
2453 case BUILT_IN_ISINFD64:
2454 case BUILT_IN_ISINFD128:
2455 /* These builtins have no optabs (yet). */
2456 break;
2457 default:
2458 gcc_unreachable ();
2459 }
2460
2461 /* There's no easy way to detect the case we need to set EDOM. */
2462 if (flag_errno_math && errno_set)
2463 return CODE_FOR_nothing;
2464
2465 /* Optab mode depends on the mode of the input argument. */
2466 mode = TYPE_MODE (TREE_TYPE (arg));
2467
2468 if (builtin_optab)
2469 return optab_handler (builtin_optab, mode);
2470 return CODE_FOR_nothing;
2471 }
2472
2473 /* Expand a call to one of the builtin math functions that operate on
2474 floating point argument and output an integer result (ilogb, isinf,
2475 isnan, etc).
2476 Return 0 if a normal call should be emitted rather than expanding the
2477 function in-line. EXP is the expression that is a call to the builtin
2478 function; if convenient, the result should be placed in TARGET. */
2479
2480 static rtx
2481 expand_builtin_interclass_mathfn (tree exp, rtx target)
2482 {
2483 enum insn_code icode = CODE_FOR_nothing;
2484 rtx op0;
2485 tree fndecl = get_callee_fndecl (exp);
2486 machine_mode mode;
2487 tree arg;
2488
2489 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2490 return NULL_RTX;
2491
2492 arg = CALL_EXPR_ARG (exp, 0);
2493 icode = interclass_mathfn_icode (arg, fndecl);
2494 mode = TYPE_MODE (TREE_TYPE (arg));
2495
2496 if (icode != CODE_FOR_nothing)
2497 {
2498 struct expand_operand ops[1];
2499 rtx_insn *last = get_last_insn ();
2500 tree orig_arg = arg;
2501
2502 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2503 need to expand the argument again. This way, we will not perform
2504 side-effects more the once. */
2505 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2506
2507 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2508
2509 if (mode != GET_MODE (op0))
2510 op0 = convert_to_mode (mode, op0, 0);
2511
2512 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2513 if (maybe_legitimize_operands (icode, 0, 1, ops)
2514 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2515 return ops[0].value;
2516
2517 delete_insns_since (last);
2518 CALL_EXPR_ARG (exp, 0) = orig_arg;
2519 }
2520
2521 return NULL_RTX;
2522 }
2523
2524 /* Expand a call to the builtin sincos math function.
2525 Return NULL_RTX if a normal call should be emitted rather than expanding the
2526 function in-line. EXP is the expression that is a call to the builtin
2527 function. */
2528
2529 static rtx
2530 expand_builtin_sincos (tree exp)
2531 {
2532 rtx op0, op1, op2, target1, target2;
2533 machine_mode mode;
2534 tree arg, sinp, cosp;
2535 int result;
2536 location_t loc = EXPR_LOCATION (exp);
2537 tree alias_type, alias_off;
2538
2539 if (!validate_arglist (exp, REAL_TYPE,
2540 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2541 return NULL_RTX;
2542
2543 arg = CALL_EXPR_ARG (exp, 0);
2544 sinp = CALL_EXPR_ARG (exp, 1);
2545 cosp = CALL_EXPR_ARG (exp, 2);
2546
2547 /* Make a suitable register to place result in. */
2548 mode = TYPE_MODE (TREE_TYPE (arg));
2549
2550 /* Check if sincos insn is available, otherwise emit the call. */
2551 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2552 return NULL_RTX;
2553
2554 target1 = gen_reg_rtx (mode);
2555 target2 = gen_reg_rtx (mode);
2556
2557 op0 = expand_normal (arg);
2558 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2559 alias_off = build_int_cst (alias_type, 0);
2560 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2561 sinp, alias_off));
2562 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2563 cosp, alias_off));
2564
2565 /* Compute into target1 and target2.
2566 Set TARGET to wherever the result comes back. */
2567 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2568 gcc_assert (result);
2569
2570 /* Move target1 and target2 to the memory locations indicated
2571 by op1 and op2. */
2572 emit_move_insn (op1, target1);
2573 emit_move_insn (op2, target2);
2574
2575 return const0_rtx;
2576 }
2577
2578 /* Expand a call to the internal cexpi builtin to the sincos math function.
2579 EXP is the expression that is a call to the builtin function; if convenient,
2580 the result should be placed in TARGET. */
2581
2582 static rtx
2583 expand_builtin_cexpi (tree exp, rtx target)
2584 {
2585 tree fndecl = get_callee_fndecl (exp);
2586 tree arg, type;
2587 machine_mode mode;
2588 rtx op0, op1, op2;
2589 location_t loc = EXPR_LOCATION (exp);
2590
2591 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2592 return NULL_RTX;
2593
2594 arg = CALL_EXPR_ARG (exp, 0);
2595 type = TREE_TYPE (arg);
2596 mode = TYPE_MODE (TREE_TYPE (arg));
2597
2598 /* Try expanding via a sincos optab, fall back to emitting a libcall
2599 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2600 is only generated from sincos, cexp or if we have either of them. */
2601 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2602 {
2603 op1 = gen_reg_rtx (mode);
2604 op2 = gen_reg_rtx (mode);
2605
2606 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2607
2608 /* Compute into op1 and op2. */
2609 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2610 }
2611 else if (targetm.libc_has_function (function_sincos))
2612 {
2613 tree call, fn = NULL_TREE;
2614 tree top1, top2;
2615 rtx op1a, op2a;
2616
2617 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2618 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2619 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2620 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2621 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2622 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2623 else
2624 gcc_unreachable ();
2625
2626 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2627 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2628 op1a = copy_addr_to_reg (XEXP (op1, 0));
2629 op2a = copy_addr_to_reg (XEXP (op2, 0));
2630 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2631 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2632
2633 /* Make sure not to fold the sincos call again. */
2634 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2635 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2636 call, 3, arg, top1, top2));
2637 }
2638 else
2639 {
2640 tree call, fn = NULL_TREE, narg;
2641 tree ctype = build_complex_type (type);
2642
2643 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2644 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2645 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2646 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2647 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2648 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2649 else
2650 gcc_unreachable ();
2651
2652 /* If we don't have a decl for cexp create one. This is the
2653 friendliest fallback if the user calls __builtin_cexpi
2654 without full target C99 function support. */
2655 if (fn == NULL_TREE)
2656 {
2657 tree fntype;
2658 const char *name = NULL;
2659
2660 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2661 name = "cexpf";
2662 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2663 name = "cexp";
2664 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2665 name = "cexpl";
2666
2667 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2668 fn = build_fn_decl (name, fntype);
2669 }
2670
2671 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2672 build_real (type, dconst0), arg);
2673
2674 /* Make sure not to fold the cexp call again. */
2675 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2676 return expand_expr (build_call_nary (ctype, call, 1, narg),
2677 target, VOIDmode, EXPAND_NORMAL);
2678 }
2679
2680 /* Now build the proper return type. */
2681 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2682 make_tree (TREE_TYPE (arg), op2),
2683 make_tree (TREE_TYPE (arg), op1)),
2684 target, VOIDmode, EXPAND_NORMAL);
2685 }
2686
2687 /* Conveniently construct a function call expression. FNDECL names the
2688 function to be called, N is the number of arguments, and the "..."
2689 parameters are the argument expressions. Unlike build_call_exr
2690 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2691
2692 static tree
2693 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2694 {
2695 va_list ap;
2696 tree fntype = TREE_TYPE (fndecl);
2697 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2698
2699 va_start (ap, n);
2700 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2701 va_end (ap);
2702 SET_EXPR_LOCATION (fn, loc);
2703 return fn;
2704 }
2705
2706 /* Expand a call to one of the builtin rounding functions gcc defines
2707 as an extension (lfloor and lceil). As these are gcc extensions we
2708 do not need to worry about setting errno to EDOM.
2709 If expanding via optab fails, lower expression to (int)(floor(x)).
2710 EXP is the expression that is a call to the builtin function;
2711 if convenient, the result should be placed in TARGET. */
2712
2713 static rtx
2714 expand_builtin_int_roundingfn (tree exp, rtx target)
2715 {
2716 convert_optab builtin_optab;
2717 rtx op0, tmp;
2718 rtx_insn *insns;
2719 tree fndecl = get_callee_fndecl (exp);
2720 enum built_in_function fallback_fn;
2721 tree fallback_fndecl;
2722 machine_mode mode;
2723 tree arg;
2724
2725 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2726 gcc_unreachable ();
2727
2728 arg = CALL_EXPR_ARG (exp, 0);
2729
2730 switch (DECL_FUNCTION_CODE (fndecl))
2731 {
2732 CASE_FLT_FN (BUILT_IN_ICEIL):
2733 CASE_FLT_FN (BUILT_IN_LCEIL):
2734 CASE_FLT_FN (BUILT_IN_LLCEIL):
2735 builtin_optab = lceil_optab;
2736 fallback_fn = BUILT_IN_CEIL;
2737 break;
2738
2739 CASE_FLT_FN (BUILT_IN_IFLOOR):
2740 CASE_FLT_FN (BUILT_IN_LFLOOR):
2741 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2742 builtin_optab = lfloor_optab;
2743 fallback_fn = BUILT_IN_FLOOR;
2744 break;
2745
2746 default:
2747 gcc_unreachable ();
2748 }
2749
2750 /* Make a suitable register to place result in. */
2751 mode = TYPE_MODE (TREE_TYPE (exp));
2752
2753 target = gen_reg_rtx (mode);
2754
2755 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2756 need to expand the argument again. This way, we will not perform
2757 side-effects more the once. */
2758 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2759
2760 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2761
2762 start_sequence ();
2763
2764 /* Compute into TARGET. */
2765 if (expand_sfix_optab (target, op0, builtin_optab))
2766 {
2767 /* Output the entire sequence. */
2768 insns = get_insns ();
2769 end_sequence ();
2770 emit_insn (insns);
2771 return target;
2772 }
2773
2774 /* If we were unable to expand via the builtin, stop the sequence
2775 (without outputting the insns). */
2776 end_sequence ();
2777
2778 /* Fall back to floating point rounding optab. */
2779 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2780
2781 /* For non-C99 targets we may end up without a fallback fndecl here
2782 if the user called __builtin_lfloor directly. In this case emit
2783 a call to the floor/ceil variants nevertheless. This should result
2784 in the best user experience for not full C99 targets. */
2785 if (fallback_fndecl == NULL_TREE)
2786 {
2787 tree fntype;
2788 const char *name = NULL;
2789
2790 switch (DECL_FUNCTION_CODE (fndecl))
2791 {
2792 case BUILT_IN_ICEIL:
2793 case BUILT_IN_LCEIL:
2794 case BUILT_IN_LLCEIL:
2795 name = "ceil";
2796 break;
2797 case BUILT_IN_ICEILF:
2798 case BUILT_IN_LCEILF:
2799 case BUILT_IN_LLCEILF:
2800 name = "ceilf";
2801 break;
2802 case BUILT_IN_ICEILL:
2803 case BUILT_IN_LCEILL:
2804 case BUILT_IN_LLCEILL:
2805 name = "ceill";
2806 break;
2807 case BUILT_IN_IFLOOR:
2808 case BUILT_IN_LFLOOR:
2809 case BUILT_IN_LLFLOOR:
2810 name = "floor";
2811 break;
2812 case BUILT_IN_IFLOORF:
2813 case BUILT_IN_LFLOORF:
2814 case BUILT_IN_LLFLOORF:
2815 name = "floorf";
2816 break;
2817 case BUILT_IN_IFLOORL:
2818 case BUILT_IN_LFLOORL:
2819 case BUILT_IN_LLFLOORL:
2820 name = "floorl";
2821 break;
2822 default:
2823 gcc_unreachable ();
2824 }
2825
2826 fntype = build_function_type_list (TREE_TYPE (arg),
2827 TREE_TYPE (arg), NULL_TREE);
2828 fallback_fndecl = build_fn_decl (name, fntype);
2829 }
2830
2831 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2832
2833 tmp = expand_normal (exp);
2834 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2835
2836 /* Truncate the result of floating point optab to integer
2837 via expand_fix (). */
2838 target = gen_reg_rtx (mode);
2839 expand_fix (target, tmp, 0);
2840
2841 return target;
2842 }
2843
2844 /* Expand a call to one of the builtin math functions doing integer
2845 conversion (lrint).
2846 Return 0 if a normal call should be emitted rather than expanding the
2847 function in-line. EXP is the expression that is a call to the builtin
2848 function; if convenient, the result should be placed in TARGET. */
2849
2850 static rtx
2851 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2852 {
2853 convert_optab builtin_optab;
2854 rtx op0;
2855 rtx_insn *insns;
2856 tree fndecl = get_callee_fndecl (exp);
2857 tree arg;
2858 machine_mode mode;
2859 enum built_in_function fallback_fn = BUILT_IN_NONE;
2860
2861 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2862 gcc_unreachable ();
2863
2864 arg = CALL_EXPR_ARG (exp, 0);
2865
2866 switch (DECL_FUNCTION_CODE (fndecl))
2867 {
2868 CASE_FLT_FN (BUILT_IN_IRINT):
2869 fallback_fn = BUILT_IN_LRINT;
2870 /* FALLTHRU */
2871 CASE_FLT_FN (BUILT_IN_LRINT):
2872 CASE_FLT_FN (BUILT_IN_LLRINT):
2873 builtin_optab = lrint_optab;
2874 break;
2875
2876 CASE_FLT_FN (BUILT_IN_IROUND):
2877 fallback_fn = BUILT_IN_LROUND;
2878 /* FALLTHRU */
2879 CASE_FLT_FN (BUILT_IN_LROUND):
2880 CASE_FLT_FN (BUILT_IN_LLROUND):
2881 builtin_optab = lround_optab;
2882 break;
2883
2884 default:
2885 gcc_unreachable ();
2886 }
2887
2888 /* There's no easy way to detect the case we need to set EDOM. */
2889 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2890 return NULL_RTX;
2891
2892 /* Make a suitable register to place result in. */
2893 mode = TYPE_MODE (TREE_TYPE (exp));
2894
2895 /* There's no easy way to detect the case we need to set EDOM. */
2896 if (!flag_errno_math)
2897 {
2898 rtx result = gen_reg_rtx (mode);
2899
2900 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2901 need to expand the argument again. This way, we will not perform
2902 side-effects more the once. */
2903 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2904
2905 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2906
2907 start_sequence ();
2908
2909 if (expand_sfix_optab (result, op0, builtin_optab))
2910 {
2911 /* Output the entire sequence. */
2912 insns = get_insns ();
2913 end_sequence ();
2914 emit_insn (insns);
2915 return result;
2916 }
2917
2918 /* If we were unable to expand via the builtin, stop the sequence
2919 (without outputting the insns) and call to the library function
2920 with the stabilized argument list. */
2921 end_sequence ();
2922 }
2923
2924 if (fallback_fn != BUILT_IN_NONE)
2925 {
2926 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2927 targets, (int) round (x) should never be transformed into
2928 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2929 a call to lround in the hope that the target provides at least some
2930 C99 functions. This should result in the best user experience for
2931 not full C99 targets. */
2932 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2933 fallback_fn, 0);
2934
2935 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2936 fallback_fndecl, 1, arg);
2937
2938 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2939 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2940 return convert_to_mode (mode, target, 0);
2941 }
2942
2943 return expand_call (exp, target, target == const0_rtx);
2944 }
2945
2946 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2947 a normal call should be emitted rather than expanding the function
2948 in-line. EXP is the expression that is a call to the builtin
2949 function; if convenient, the result should be placed in TARGET. */
2950
2951 static rtx
2952 expand_builtin_powi (tree exp, rtx target)
2953 {
2954 tree arg0, arg1;
2955 rtx op0, op1;
2956 machine_mode mode;
2957 machine_mode mode2;
2958
2959 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2960 return NULL_RTX;
2961
2962 arg0 = CALL_EXPR_ARG (exp, 0);
2963 arg1 = CALL_EXPR_ARG (exp, 1);
2964 mode = TYPE_MODE (TREE_TYPE (exp));
2965
2966 /* Emit a libcall to libgcc. */
2967
2968 /* Mode of the 2nd argument must match that of an int. */
2969 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2970
2971 if (target == NULL_RTX)
2972 target = gen_reg_rtx (mode);
2973
2974 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2975 if (GET_MODE (op0) != mode)
2976 op0 = convert_to_mode (mode, op0, 0);
2977 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2978 if (GET_MODE (op1) != mode2)
2979 op1 = convert_to_mode (mode2, op1, 0);
2980
2981 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2982 target, LCT_CONST, mode, 2,
2983 op0, mode, op1, mode2);
2984
2985 return target;
2986 }
2987
2988 /* Expand expression EXP which is a call to the strlen builtin. Return
2989 NULL_RTX if we failed the caller should emit a normal call, otherwise
2990 try to get the result in TARGET, if convenient. */
2991
2992 static rtx
2993 expand_builtin_strlen (tree exp, rtx target,
2994 machine_mode target_mode)
2995 {
2996 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2997 return NULL_RTX;
2998 else
2999 {
3000 struct expand_operand ops[4];
3001 rtx pat;
3002 tree len;
3003 tree src = CALL_EXPR_ARG (exp, 0);
3004 rtx src_reg;
3005 rtx_insn *before_strlen;
3006 machine_mode insn_mode = target_mode;
3007 enum insn_code icode = CODE_FOR_nothing;
3008 unsigned int align;
3009
3010 /* If the length can be computed at compile-time, return it. */
3011 len = c_strlen (src, 0);
3012 if (len)
3013 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3014
3015 /* If the length can be computed at compile-time and is constant
3016 integer, but there are side-effects in src, evaluate
3017 src for side-effects, then return len.
3018 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3019 can be optimized into: i++; x = 3; */
3020 len = c_strlen (src, 1);
3021 if (len && TREE_CODE (len) == INTEGER_CST)
3022 {
3023 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3024 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3025 }
3026
3027 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3028
3029 /* If SRC is not a pointer type, don't do this operation inline. */
3030 if (align == 0)
3031 return NULL_RTX;
3032
3033 /* Bail out if we can't compute strlen in the right mode. */
3034 while (insn_mode != VOIDmode)
3035 {
3036 icode = optab_handler (strlen_optab, insn_mode);
3037 if (icode != CODE_FOR_nothing)
3038 break;
3039
3040 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3041 }
3042 if (insn_mode == VOIDmode)
3043 return NULL_RTX;
3044
3045 /* Make a place to hold the source address. We will not expand
3046 the actual source until we are sure that the expansion will
3047 not fail -- there are trees that cannot be expanded twice. */
3048 src_reg = gen_reg_rtx (Pmode);
3049
3050 /* Mark the beginning of the strlen sequence so we can emit the
3051 source operand later. */
3052 before_strlen = get_last_insn ();
3053
3054 create_output_operand (&ops[0], target, insn_mode);
3055 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3056 create_integer_operand (&ops[2], 0);
3057 create_integer_operand (&ops[3], align);
3058 if (!maybe_expand_insn (icode, 4, ops))
3059 return NULL_RTX;
3060
3061 /* Now that we are assured of success, expand the source. */
3062 start_sequence ();
3063 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3064 if (pat != src_reg)
3065 {
3066 #ifdef POINTERS_EXTEND_UNSIGNED
3067 if (GET_MODE (pat) != Pmode)
3068 pat = convert_to_mode (Pmode, pat,
3069 POINTERS_EXTEND_UNSIGNED);
3070 #endif
3071 emit_move_insn (src_reg, pat);
3072 }
3073 pat = get_insns ();
3074 end_sequence ();
3075
3076 if (before_strlen)
3077 emit_insn_after (pat, before_strlen);
3078 else
3079 emit_insn_before (pat, get_insns ());
3080
3081 /* Return the value in the proper mode for this function. */
3082 if (GET_MODE (ops[0].value) == target_mode)
3083 target = ops[0].value;
3084 else if (target != 0)
3085 convert_move (target, ops[0].value, 0);
3086 else
3087 target = convert_to_mode (target_mode, ops[0].value, 0);
3088
3089 return target;
3090 }
3091 }
3092
3093 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3094 bytes from constant string DATA + OFFSET and return it as target
3095 constant. */
3096
3097 static rtx
3098 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3099 machine_mode mode)
3100 {
3101 const char *str = (const char *) data;
3102
3103 gcc_assert (offset >= 0
3104 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3105 <= strlen (str) + 1));
3106
3107 return c_readstr (str + offset, mode);
3108 }
3109
3110 /* LEN specify length of the block of memcpy/memset operation.
3111 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3112 In some cases we can make very likely guess on max size, then we
3113 set it into PROBABLE_MAX_SIZE. */
3114
3115 static void
3116 determine_block_size (tree len, rtx len_rtx,
3117 unsigned HOST_WIDE_INT *min_size,
3118 unsigned HOST_WIDE_INT *max_size,
3119 unsigned HOST_WIDE_INT *probable_max_size)
3120 {
3121 if (CONST_INT_P (len_rtx))
3122 {
3123 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3124 return;
3125 }
3126 else
3127 {
3128 wide_int min, max;
3129 enum value_range_type range_type = VR_UNDEFINED;
3130
3131 /* Determine bounds from the type. */
3132 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3133 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3134 else
3135 *min_size = 0;
3136 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3137 *probable_max_size = *max_size
3138 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3139 else
3140 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3141
3142 if (TREE_CODE (len) == SSA_NAME)
3143 range_type = get_range_info (len, &min, &max);
3144 if (range_type == VR_RANGE)
3145 {
3146 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3147 *min_size = min.to_uhwi ();
3148 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3149 *probable_max_size = *max_size = max.to_uhwi ();
3150 }
3151 else if (range_type == VR_ANTI_RANGE)
3152 {
3153 /* Anti range 0...N lets us to determine minimal size to N+1. */
3154 if (min == 0)
3155 {
3156 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3157 *min_size = max.to_uhwi () + 1;
3158 }
3159 /* Code like
3160
3161 int n;
3162 if (n < 100)
3163 memcpy (a, b, n)
3164
3165 Produce anti range allowing negative values of N. We still
3166 can use the information and make a guess that N is not negative.
3167 */
3168 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3169 *probable_max_size = min.to_uhwi () - 1;
3170 }
3171 }
3172 gcc_checking_assert (*max_size <=
3173 (unsigned HOST_WIDE_INT)
3174 GET_MODE_MASK (GET_MODE (len_rtx)));
3175 }
3176
3177 /* Helper function to do the actual work for expand_builtin_memcpy. */
3178
3179 static rtx
3180 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3181 {
3182 const char *src_str;
3183 unsigned int src_align = get_pointer_alignment (src);
3184 unsigned int dest_align = get_pointer_alignment (dest);
3185 rtx dest_mem, src_mem, dest_addr, len_rtx;
3186 HOST_WIDE_INT expected_size = -1;
3187 unsigned int expected_align = 0;
3188 unsigned HOST_WIDE_INT min_size;
3189 unsigned HOST_WIDE_INT max_size;
3190 unsigned HOST_WIDE_INT probable_max_size;
3191
3192 /* If DEST is not a pointer type, call the normal function. */
3193 if (dest_align == 0)
3194 return NULL_RTX;
3195
3196 /* If either SRC is not a pointer type, don't do this
3197 operation in-line. */
3198 if (src_align == 0)
3199 return NULL_RTX;
3200
3201 if (currently_expanding_gimple_stmt)
3202 stringop_block_profile (currently_expanding_gimple_stmt,
3203 &expected_align, &expected_size);
3204
3205 if (expected_align < dest_align)
3206 expected_align = dest_align;
3207 dest_mem = get_memory_rtx (dest, len);
3208 set_mem_align (dest_mem, dest_align);
3209 len_rtx = expand_normal (len);
3210 determine_block_size (len, len_rtx, &min_size, &max_size,
3211 &probable_max_size);
3212 src_str = c_getstr (src);
3213
3214 /* If SRC is a string constant and block move would be done
3215 by pieces, we can avoid loading the string from memory
3216 and only stored the computed constants. */
3217 if (src_str
3218 && CONST_INT_P (len_rtx)
3219 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3220 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3221 CONST_CAST (char *, src_str),
3222 dest_align, false))
3223 {
3224 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3225 builtin_memcpy_read_str,
3226 CONST_CAST (char *, src_str),
3227 dest_align, false, 0);
3228 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3229 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3230 return dest_mem;
3231 }
3232
3233 src_mem = get_memory_rtx (src, len);
3234 set_mem_align (src_mem, src_align);
3235
3236 /* Copy word part most expediently. */
3237 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3238 CALL_EXPR_TAILCALL (exp)
3239 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3240 expected_align, expected_size,
3241 min_size, max_size, probable_max_size);
3242
3243 if (dest_addr == 0)
3244 {
3245 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3246 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3247 }
3248
3249 return dest_addr;
3250 }
3251
3252 /* Expand a call EXP to the memcpy builtin.
3253 Return NULL_RTX if we failed, the caller should emit a normal call,
3254 otherwise try to get the result in TARGET, if convenient (and in
3255 mode MODE if that's convenient). */
3256
3257 static rtx
3258 expand_builtin_memcpy (tree exp, rtx target)
3259 {
3260 if (!validate_arglist (exp,
3261 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3262 return NULL_RTX;
3263 else
3264 {
3265 tree dest = CALL_EXPR_ARG (exp, 0);
3266 tree src = CALL_EXPR_ARG (exp, 1);
3267 tree len = CALL_EXPR_ARG (exp, 2);
3268 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3269 }
3270 }
3271
3272 /* Expand an instrumented call EXP to the memcpy builtin.
3273 Return NULL_RTX if we failed, the caller should emit a normal call,
3274 otherwise try to get the result in TARGET, if convenient (and in
3275 mode MODE if that's convenient). */
3276
3277 static rtx
3278 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3279 {
3280 if (!validate_arglist (exp,
3281 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3282 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3283 INTEGER_TYPE, VOID_TYPE))
3284 return NULL_RTX;
3285 else
3286 {
3287 tree dest = CALL_EXPR_ARG (exp, 0);
3288 tree src = CALL_EXPR_ARG (exp, 2);
3289 tree len = CALL_EXPR_ARG (exp, 4);
3290 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3291
3292 /* Return src bounds with the result. */
3293 if (res)
3294 {
3295 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3296 expand_normal (CALL_EXPR_ARG (exp, 1)));
3297 res = chkp_join_splitted_slot (res, bnd);
3298 }
3299 return res;
3300 }
3301 }
3302
3303 /* Expand a call EXP to the mempcpy builtin.
3304 Return NULL_RTX if we failed; the caller should emit a normal call,
3305 otherwise try to get the result in TARGET, if convenient (and in
3306 mode MODE if that's convenient). If ENDP is 0 return the
3307 destination pointer, if ENDP is 1 return the end pointer ala
3308 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3309 stpcpy. */
3310
3311 static rtx
3312 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3313 {
3314 if (!validate_arglist (exp,
3315 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3316 return NULL_RTX;
3317 else
3318 {
3319 tree dest = CALL_EXPR_ARG (exp, 0);
3320 tree src = CALL_EXPR_ARG (exp, 1);
3321 tree len = CALL_EXPR_ARG (exp, 2);
3322 return expand_builtin_mempcpy_args (dest, src, len,
3323 target, mode, /*endp=*/ 1,
3324 exp);
3325 }
3326 }
3327
3328 /* Expand an instrumented call EXP to the mempcpy builtin.
3329 Return NULL_RTX if we failed, the caller should emit a normal call,
3330 otherwise try to get the result in TARGET, if convenient (and in
3331 mode MODE if that's convenient). */
3332
3333 static rtx
3334 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3335 {
3336 if (!validate_arglist (exp,
3337 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3338 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3339 INTEGER_TYPE, VOID_TYPE))
3340 return NULL_RTX;
3341 else
3342 {
3343 tree dest = CALL_EXPR_ARG (exp, 0);
3344 tree src = CALL_EXPR_ARG (exp, 2);
3345 tree len = CALL_EXPR_ARG (exp, 4);
3346 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3347 mode, 1, exp);
3348
3349 /* Return src bounds with the result. */
3350 if (res)
3351 {
3352 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3353 expand_normal (CALL_EXPR_ARG (exp, 1)));
3354 res = chkp_join_splitted_slot (res, bnd);
3355 }
3356 return res;
3357 }
3358 }
3359
3360 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3361 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3362 so that this can also be called without constructing an actual CALL_EXPR.
3363 The other arguments and return value are the same as for
3364 expand_builtin_mempcpy. */
3365
3366 static rtx
3367 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3368 rtx target, machine_mode mode, int endp,
3369 tree orig_exp)
3370 {
3371 tree fndecl = get_callee_fndecl (orig_exp);
3372
3373 /* If return value is ignored, transform mempcpy into memcpy. */
3374 if (target == const0_rtx
3375 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3376 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3377 {
3378 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3379 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3380 dest, src, len);
3381 return expand_expr (result, target, mode, EXPAND_NORMAL);
3382 }
3383 else if (target == const0_rtx
3384 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3385 {
3386 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3387 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3388 dest, src, len);
3389 return expand_expr (result, target, mode, EXPAND_NORMAL);
3390 }
3391 else
3392 {
3393 const char *src_str;
3394 unsigned int src_align = get_pointer_alignment (src);
3395 unsigned int dest_align = get_pointer_alignment (dest);
3396 rtx dest_mem, src_mem, len_rtx;
3397
3398 /* If either SRC or DEST is not a pointer type, don't do this
3399 operation in-line. */
3400 if (dest_align == 0 || src_align == 0)
3401 return NULL_RTX;
3402
3403 /* If LEN is not constant, call the normal function. */
3404 if (! tree_fits_uhwi_p (len))
3405 return NULL_RTX;
3406
3407 len_rtx = expand_normal (len);
3408 src_str = c_getstr (src);
3409
3410 /* If SRC is a string constant and block move would be done
3411 by pieces, we can avoid loading the string from memory
3412 and only stored the computed constants. */
3413 if (src_str
3414 && CONST_INT_P (len_rtx)
3415 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3416 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3417 CONST_CAST (char *, src_str),
3418 dest_align, false))
3419 {
3420 dest_mem = get_memory_rtx (dest, len);
3421 set_mem_align (dest_mem, dest_align);
3422 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3423 builtin_memcpy_read_str,
3424 CONST_CAST (char *, src_str),
3425 dest_align, false, endp);
3426 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3427 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3428 return dest_mem;
3429 }
3430
3431 if (CONST_INT_P (len_rtx)
3432 && can_move_by_pieces (INTVAL (len_rtx),
3433 MIN (dest_align, src_align)))
3434 {
3435 dest_mem = get_memory_rtx (dest, len);
3436 set_mem_align (dest_mem, dest_align);
3437 src_mem = get_memory_rtx (src, len);
3438 set_mem_align (src_mem, src_align);
3439 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3440 MIN (dest_align, src_align), endp);
3441 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3442 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3443 return dest_mem;
3444 }
3445
3446 return NULL_RTX;
3447 }
3448 }
3449
3450 #ifndef HAVE_movstr
3451 # define HAVE_movstr 0
3452 # define CODE_FOR_movstr CODE_FOR_nothing
3453 #endif
3454
3455 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3456 we failed, the caller should emit a normal call, otherwise try to
3457 get the result in TARGET, if convenient. If ENDP is 0 return the
3458 destination pointer, if ENDP is 1 return the end pointer ala
3459 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3460 stpcpy. */
3461
3462 static rtx
3463 expand_movstr (tree dest, tree src, rtx target, int endp)
3464 {
3465 struct expand_operand ops[3];
3466 rtx dest_mem;
3467 rtx src_mem;
3468
3469 if (!HAVE_movstr)
3470 return NULL_RTX;
3471
3472 dest_mem = get_memory_rtx (dest, NULL);
3473 src_mem = get_memory_rtx (src, NULL);
3474 if (!endp)
3475 {
3476 target = force_reg (Pmode, XEXP (dest_mem, 0));
3477 dest_mem = replace_equiv_address (dest_mem, target);
3478 }
3479
3480 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3481 create_fixed_operand (&ops[1], dest_mem);
3482 create_fixed_operand (&ops[2], src_mem);
3483 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3484 return NULL_RTX;
3485
3486 if (endp && target != const0_rtx)
3487 {
3488 target = ops[0].value;
3489 /* movstr is supposed to set end to the address of the NUL
3490 terminator. If the caller requested a mempcpy-like return value,
3491 adjust it. */
3492 if (endp == 1)
3493 {
3494 rtx tem = plus_constant (GET_MODE (target),
3495 gen_lowpart (GET_MODE (target), target), 1);
3496 emit_move_insn (target, force_operand (tem, NULL_RTX));
3497 }
3498 }
3499 return target;
3500 }
3501
3502 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3503 NULL_RTX if we failed the caller should emit a normal call, otherwise
3504 try to get the result in TARGET, if convenient (and in mode MODE if that's
3505 convenient). */
3506
3507 static rtx
3508 expand_builtin_strcpy (tree exp, rtx target)
3509 {
3510 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3511 {
3512 tree dest = CALL_EXPR_ARG (exp, 0);
3513 tree src = CALL_EXPR_ARG (exp, 1);
3514 return expand_builtin_strcpy_args (dest, src, target);
3515 }
3516 return NULL_RTX;
3517 }
3518
3519 /* Helper function to do the actual work for expand_builtin_strcpy. The
3520 arguments to the builtin_strcpy call DEST and SRC are broken out
3521 so that this can also be called without constructing an actual CALL_EXPR.
3522 The other arguments and return value are the same as for
3523 expand_builtin_strcpy. */
3524
3525 static rtx
3526 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3527 {
3528 return expand_movstr (dest, src, target, /*endp=*/0);
3529 }
3530
3531 /* Expand a call EXP to the stpcpy builtin.
3532 Return NULL_RTX if we failed the caller should emit a normal call,
3533 otherwise try to get the result in TARGET, if convenient (and in
3534 mode MODE if that's convenient). */
3535
3536 static rtx
3537 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3538 {
3539 tree dst, src;
3540 location_t loc = EXPR_LOCATION (exp);
3541
3542 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3543 return NULL_RTX;
3544
3545 dst = CALL_EXPR_ARG (exp, 0);
3546 src = CALL_EXPR_ARG (exp, 1);
3547
3548 /* If return value is ignored, transform stpcpy into strcpy. */
3549 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3550 {
3551 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3552 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3553 return expand_expr (result, target, mode, EXPAND_NORMAL);
3554 }
3555 else
3556 {
3557 tree len, lenp1;
3558 rtx ret;
3559
3560 /* Ensure we get an actual string whose length can be evaluated at
3561 compile-time, not an expression containing a string. This is
3562 because the latter will potentially produce pessimized code
3563 when used to produce the return value. */
3564 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3565 return expand_movstr (dst, src, target, /*endp=*/2);
3566
3567 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3568 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3569 target, mode, /*endp=*/2,
3570 exp);
3571
3572 if (ret)
3573 return ret;
3574
3575 if (TREE_CODE (len) == INTEGER_CST)
3576 {
3577 rtx len_rtx = expand_normal (len);
3578
3579 if (CONST_INT_P (len_rtx))
3580 {
3581 ret = expand_builtin_strcpy_args (dst, src, target);
3582
3583 if (ret)
3584 {
3585 if (! target)
3586 {
3587 if (mode != VOIDmode)
3588 target = gen_reg_rtx (mode);
3589 else
3590 target = gen_reg_rtx (GET_MODE (ret));
3591 }
3592 if (GET_MODE (target) != GET_MODE (ret))
3593 ret = gen_lowpart (GET_MODE (target), ret);
3594
3595 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3596 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3597 gcc_assert (ret);
3598
3599 return target;
3600 }
3601 }
3602 }
3603
3604 return expand_movstr (dst, src, target, /*endp=*/2);
3605 }
3606 }
3607
3608 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3609 bytes from constant string DATA + OFFSET and return it as target
3610 constant. */
3611
3612 rtx
3613 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3614 machine_mode mode)
3615 {
3616 const char *str = (const char *) data;
3617
3618 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3619 return const0_rtx;
3620
3621 return c_readstr (str + offset, mode);
3622 }
3623
3624 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3625 NULL_RTX if we failed the caller should emit a normal call. */
3626
3627 static rtx
3628 expand_builtin_strncpy (tree exp, rtx target)
3629 {
3630 location_t loc = EXPR_LOCATION (exp);
3631
3632 if (validate_arglist (exp,
3633 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3634 {
3635 tree dest = CALL_EXPR_ARG (exp, 0);
3636 tree src = CALL_EXPR_ARG (exp, 1);
3637 tree len = CALL_EXPR_ARG (exp, 2);
3638 tree slen = c_strlen (src, 1);
3639
3640 /* We must be passed a constant len and src parameter. */
3641 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3642 return NULL_RTX;
3643
3644 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3645
3646 /* We're required to pad with trailing zeros if the requested
3647 len is greater than strlen(s2)+1. In that case try to
3648 use store_by_pieces, if it fails, punt. */
3649 if (tree_int_cst_lt (slen, len))
3650 {
3651 unsigned int dest_align = get_pointer_alignment (dest);
3652 const char *p = c_getstr (src);
3653 rtx dest_mem;
3654
3655 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3656 || !can_store_by_pieces (tree_to_uhwi (len),
3657 builtin_strncpy_read_str,
3658 CONST_CAST (char *, p),
3659 dest_align, false))
3660 return NULL_RTX;
3661
3662 dest_mem = get_memory_rtx (dest, len);
3663 store_by_pieces (dest_mem, tree_to_uhwi (len),
3664 builtin_strncpy_read_str,
3665 CONST_CAST (char *, p), dest_align, false, 0);
3666 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3667 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3668 return dest_mem;
3669 }
3670 }
3671 return NULL_RTX;
3672 }
3673
3674 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3675 bytes from constant string DATA + OFFSET and return it as target
3676 constant. */
3677
3678 rtx
3679 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3680 machine_mode mode)
3681 {
3682 const char *c = (const char *) data;
3683 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3684
3685 memset (p, *c, GET_MODE_SIZE (mode));
3686
3687 return c_readstr (p, mode);
3688 }
3689
3690 /* Callback routine for store_by_pieces. Return the RTL of a register
3691 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3692 char value given in the RTL register data. For example, if mode is
3693 4 bytes wide, return the RTL for 0x01010101*data. */
3694
3695 static rtx
3696 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3697 machine_mode mode)
3698 {
3699 rtx target, coeff;
3700 size_t size;
3701 char *p;
3702
3703 size = GET_MODE_SIZE (mode);
3704 if (size == 1)
3705 return (rtx) data;
3706
3707 p = XALLOCAVEC (char, size);
3708 memset (p, 1, size);
3709 coeff = c_readstr (p, mode);
3710
3711 target = convert_to_mode (mode, (rtx) data, 1);
3712 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3713 return force_reg (mode, target);
3714 }
3715
3716 /* Expand expression EXP, which is a call to the memset builtin. Return
3717 NULL_RTX if we failed the caller should emit a normal call, otherwise
3718 try to get the result in TARGET, if convenient (and in mode MODE if that's
3719 convenient). */
3720
3721 static rtx
3722 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3723 {
3724 if (!validate_arglist (exp,
3725 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3726 return NULL_RTX;
3727 else
3728 {
3729 tree dest = CALL_EXPR_ARG (exp, 0);
3730 tree val = CALL_EXPR_ARG (exp, 1);
3731 tree len = CALL_EXPR_ARG (exp, 2);
3732 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3733 }
3734 }
3735
3736 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3737 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3738 try to get the result in TARGET, if convenient (and in mode MODE if that's
3739 convenient). */
3740
3741 static rtx
3742 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3743 {
3744 if (!validate_arglist (exp,
3745 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3746 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3747 return NULL_RTX;
3748 else
3749 {
3750 tree dest = CALL_EXPR_ARG (exp, 0);
3751 tree val = CALL_EXPR_ARG (exp, 2);
3752 tree len = CALL_EXPR_ARG (exp, 3);
3753 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3754
3755 /* Return src bounds with the result. */
3756 if (res)
3757 {
3758 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3759 expand_normal (CALL_EXPR_ARG (exp, 1)));
3760 res = chkp_join_splitted_slot (res, bnd);
3761 }
3762 return res;
3763 }
3764 }
3765
3766 /* Helper function to do the actual work for expand_builtin_memset. The
3767 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3768 so that this can also be called without constructing an actual CALL_EXPR.
3769 The other arguments and return value are the same as for
3770 expand_builtin_memset. */
3771
3772 static rtx
3773 expand_builtin_memset_args (tree dest, tree val, tree len,
3774 rtx target, machine_mode mode, tree orig_exp)
3775 {
3776 tree fndecl, fn;
3777 enum built_in_function fcode;
3778 machine_mode val_mode;
3779 char c;
3780 unsigned int dest_align;
3781 rtx dest_mem, dest_addr, len_rtx;
3782 HOST_WIDE_INT expected_size = -1;
3783 unsigned int expected_align = 0;
3784 unsigned HOST_WIDE_INT min_size;
3785 unsigned HOST_WIDE_INT max_size;
3786 unsigned HOST_WIDE_INT probable_max_size;
3787
3788 dest_align = get_pointer_alignment (dest);
3789
3790 /* If DEST is not a pointer type, don't do this operation in-line. */
3791 if (dest_align == 0)
3792 return NULL_RTX;
3793
3794 if (currently_expanding_gimple_stmt)
3795 stringop_block_profile (currently_expanding_gimple_stmt,
3796 &expected_align, &expected_size);
3797
3798 if (expected_align < dest_align)
3799 expected_align = dest_align;
3800
3801 /* If the LEN parameter is zero, return DEST. */
3802 if (integer_zerop (len))
3803 {
3804 /* Evaluate and ignore VAL in case it has side-effects. */
3805 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3806 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3807 }
3808
3809 /* Stabilize the arguments in case we fail. */
3810 dest = builtin_save_expr (dest);
3811 val = builtin_save_expr (val);
3812 len = builtin_save_expr (len);
3813
3814 len_rtx = expand_normal (len);
3815 determine_block_size (len, len_rtx, &min_size, &max_size,
3816 &probable_max_size);
3817 dest_mem = get_memory_rtx (dest, len);
3818 val_mode = TYPE_MODE (unsigned_char_type_node);
3819
3820 if (TREE_CODE (val) != INTEGER_CST)
3821 {
3822 rtx val_rtx;
3823
3824 val_rtx = expand_normal (val);
3825 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3826
3827 /* Assume that we can memset by pieces if we can store
3828 * the coefficients by pieces (in the required modes).
3829 * We can't pass builtin_memset_gen_str as that emits RTL. */
3830 c = 1;
3831 if (tree_fits_uhwi_p (len)
3832 && can_store_by_pieces (tree_to_uhwi (len),
3833 builtin_memset_read_str, &c, dest_align,
3834 true))
3835 {
3836 val_rtx = force_reg (val_mode, val_rtx);
3837 store_by_pieces (dest_mem, tree_to_uhwi (len),
3838 builtin_memset_gen_str, val_rtx, dest_align,
3839 true, 0);
3840 }
3841 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3842 dest_align, expected_align,
3843 expected_size, min_size, max_size,
3844 probable_max_size))
3845 goto do_libcall;
3846
3847 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3848 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3849 return dest_mem;
3850 }
3851
3852 if (target_char_cast (val, &c))
3853 goto do_libcall;
3854
3855 if (c)
3856 {
3857 if (tree_fits_uhwi_p (len)
3858 && can_store_by_pieces (tree_to_uhwi (len),
3859 builtin_memset_read_str, &c, dest_align,
3860 true))
3861 store_by_pieces (dest_mem, tree_to_uhwi (len),
3862 builtin_memset_read_str, &c, dest_align, true, 0);
3863 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3864 gen_int_mode (c, val_mode),
3865 dest_align, expected_align,
3866 expected_size, min_size, max_size,
3867 probable_max_size))
3868 goto do_libcall;
3869
3870 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3871 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3872 return dest_mem;
3873 }
3874
3875 set_mem_align (dest_mem, dest_align);
3876 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3877 CALL_EXPR_TAILCALL (orig_exp)
3878 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3879 expected_align, expected_size,
3880 min_size, max_size,
3881 probable_max_size);
3882
3883 if (dest_addr == 0)
3884 {
3885 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3886 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3887 }
3888
3889 return dest_addr;
3890
3891 do_libcall:
3892 fndecl = get_callee_fndecl (orig_exp);
3893 fcode = DECL_FUNCTION_CODE (fndecl);
3894 if (fcode == BUILT_IN_MEMSET
3895 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3896 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3897 dest, val, len);
3898 else if (fcode == BUILT_IN_BZERO)
3899 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3900 dest, len);
3901 else
3902 gcc_unreachable ();
3903 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3904 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3905 return expand_call (fn, target, target == const0_rtx);
3906 }
3907
3908 /* Expand expression EXP, which is a call to the bzero builtin. Return
3909 NULL_RTX if we failed the caller should emit a normal call. */
3910
3911 static rtx
3912 expand_builtin_bzero (tree exp)
3913 {
3914 tree dest, size;
3915 location_t loc = EXPR_LOCATION (exp);
3916
3917 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3918 return NULL_RTX;
3919
3920 dest = CALL_EXPR_ARG (exp, 0);
3921 size = CALL_EXPR_ARG (exp, 1);
3922
3923 /* New argument list transforming bzero(ptr x, int y) to
3924 memset(ptr x, int 0, size_t y). This is done this way
3925 so that if it isn't expanded inline, we fallback to
3926 calling bzero instead of memset. */
3927
3928 return expand_builtin_memset_args (dest, integer_zero_node,
3929 fold_convert_loc (loc,
3930 size_type_node, size),
3931 const0_rtx, VOIDmode, exp);
3932 }
3933
3934 /* Expand expression EXP, which is a call to the memcmp built-in function.
3935 Return NULL_RTX if we failed and the caller should emit a normal call,
3936 otherwise try to get the result in TARGET, if convenient (and in mode
3937 MODE, if that's convenient). */
3938
3939 static rtx
3940 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3941 ATTRIBUTE_UNUSED machine_mode mode)
3942 {
3943 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3944
3945 if (!validate_arglist (exp,
3946 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3947 return NULL_RTX;
3948
3949 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3950 implementing memcmp because it will stop if it encounters two
3951 zero bytes. */
3952 #if defined HAVE_cmpmemsi
3953 {
3954 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3955 rtx result;
3956 rtx insn;
3957 tree arg1 = CALL_EXPR_ARG (exp, 0);
3958 tree arg2 = CALL_EXPR_ARG (exp, 1);
3959 tree len = CALL_EXPR_ARG (exp, 2);
3960
3961 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3962 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3963 machine_mode insn_mode;
3964
3965 if (HAVE_cmpmemsi)
3966 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3967 else
3968 return NULL_RTX;
3969
3970 /* If we don't have POINTER_TYPE, call the function. */
3971 if (arg1_align == 0 || arg2_align == 0)
3972 return NULL_RTX;
3973
3974 /* Make a place to write the result of the instruction. */
3975 result = target;
3976 if (! (result != 0
3977 && REG_P (result) && GET_MODE (result) == insn_mode
3978 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3979 result = gen_reg_rtx (insn_mode);
3980
3981 arg1_rtx = get_memory_rtx (arg1, len);
3982 arg2_rtx = get_memory_rtx (arg2, len);
3983 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3984
3985 /* Set MEM_SIZE as appropriate. */
3986 if (CONST_INT_P (arg3_rtx))
3987 {
3988 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3989 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3990 }
3991
3992 if (HAVE_cmpmemsi)
3993 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3994 GEN_INT (MIN (arg1_align, arg2_align)));
3995 else
3996 gcc_unreachable ();
3997
3998 if (insn)
3999 emit_insn (insn);
4000 else
4001 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4002 TYPE_MODE (integer_type_node), 3,
4003 XEXP (arg1_rtx, 0), Pmode,
4004 XEXP (arg2_rtx, 0), Pmode,
4005 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4006 TYPE_UNSIGNED (sizetype)),
4007 TYPE_MODE (sizetype));
4008
4009 /* Return the value in the proper mode for this function. */
4010 mode = TYPE_MODE (TREE_TYPE (exp));
4011 if (GET_MODE (result) == mode)
4012 return result;
4013 else if (target != 0)
4014 {
4015 convert_move (target, result, 0);
4016 return target;
4017 }
4018 else
4019 return convert_to_mode (mode, result, 0);
4020 }
4021 #endif /* HAVE_cmpmemsi. */
4022
4023 return NULL_RTX;
4024 }
4025
4026 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4027 if we failed the caller should emit a normal call, otherwise try to get
4028 the result in TARGET, if convenient. */
4029
4030 static rtx
4031 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4032 {
4033 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4034 return NULL_RTX;
4035
4036 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4037 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
4038 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
4039 {
4040 rtx arg1_rtx, arg2_rtx;
4041 rtx result, insn = NULL_RTX;
4042 tree fndecl, fn;
4043 tree arg1 = CALL_EXPR_ARG (exp, 0);
4044 tree arg2 = CALL_EXPR_ARG (exp, 1);
4045
4046 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4047 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4048
4049 /* If we don't have POINTER_TYPE, call the function. */
4050 if (arg1_align == 0 || arg2_align == 0)
4051 return NULL_RTX;
4052
4053 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4054 arg1 = builtin_save_expr (arg1);
4055 arg2 = builtin_save_expr (arg2);
4056
4057 arg1_rtx = get_memory_rtx (arg1, NULL);
4058 arg2_rtx = get_memory_rtx (arg2, NULL);
4059
4060 #ifdef HAVE_cmpstrsi
4061 /* Try to call cmpstrsi. */
4062 if (HAVE_cmpstrsi)
4063 {
4064 machine_mode insn_mode
4065 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4066
4067 /* Make a place to write the result of the instruction. */
4068 result = target;
4069 if (! (result != 0
4070 && REG_P (result) && GET_MODE (result) == insn_mode
4071 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4072 result = gen_reg_rtx (insn_mode);
4073
4074 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4075 GEN_INT (MIN (arg1_align, arg2_align)));
4076 }
4077 #endif
4078 #ifdef HAVE_cmpstrnsi
4079 /* Try to determine at least one length and call cmpstrnsi. */
4080 if (!insn && HAVE_cmpstrnsi)
4081 {
4082 tree len;
4083 rtx arg3_rtx;
4084
4085 machine_mode insn_mode
4086 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4087 tree len1 = c_strlen (arg1, 1);
4088 tree len2 = c_strlen (arg2, 1);
4089
4090 if (len1)
4091 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4092 if (len2)
4093 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4094
4095 /* If we don't have a constant length for the first, use the length
4096 of the second, if we know it. We don't require a constant for
4097 this case; some cost analysis could be done if both are available
4098 but neither is constant. For now, assume they're equally cheap,
4099 unless one has side effects. If both strings have constant lengths,
4100 use the smaller. */
4101
4102 if (!len1)
4103 len = len2;
4104 else if (!len2)
4105 len = len1;
4106 else if (TREE_SIDE_EFFECTS (len1))
4107 len = len2;
4108 else if (TREE_SIDE_EFFECTS (len2))
4109 len = len1;
4110 else if (TREE_CODE (len1) != INTEGER_CST)
4111 len = len2;
4112 else if (TREE_CODE (len2) != INTEGER_CST)
4113 len = len1;
4114 else if (tree_int_cst_lt (len1, len2))
4115 len = len1;
4116 else
4117 len = len2;
4118
4119 /* If both arguments have side effects, we cannot optimize. */
4120 if (!len || TREE_SIDE_EFFECTS (len))
4121 goto do_libcall;
4122
4123 arg3_rtx = expand_normal (len);
4124
4125 /* Make a place to write the result of the instruction. */
4126 result = target;
4127 if (! (result != 0
4128 && REG_P (result) && GET_MODE (result) == insn_mode
4129 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4130 result = gen_reg_rtx (insn_mode);
4131
4132 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4133 GEN_INT (MIN (arg1_align, arg2_align)));
4134 }
4135 #endif
4136
4137 if (insn)
4138 {
4139 machine_mode mode;
4140 emit_insn (insn);
4141
4142 /* Return the value in the proper mode for this function. */
4143 mode = TYPE_MODE (TREE_TYPE (exp));
4144 if (GET_MODE (result) == mode)
4145 return result;
4146 if (target == 0)
4147 return convert_to_mode (mode, result, 0);
4148 convert_move (target, result, 0);
4149 return target;
4150 }
4151
4152 /* Expand the library call ourselves using a stabilized argument
4153 list to avoid re-evaluating the function's arguments twice. */
4154 #ifdef HAVE_cmpstrnsi
4155 do_libcall:
4156 #endif
4157 fndecl = get_callee_fndecl (exp);
4158 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4159 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4160 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4161 return expand_call (fn, target, target == const0_rtx);
4162 }
4163 #endif
4164 return NULL_RTX;
4165 }
4166
4167 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4168 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4169 the result in TARGET, if convenient. */
4170
4171 static rtx
4172 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4173 ATTRIBUTE_UNUSED machine_mode mode)
4174 {
4175 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4176
4177 if (!validate_arglist (exp,
4178 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4179 return NULL_RTX;
4180
4181 /* If c_strlen can determine an expression for one of the string
4182 lengths, and it doesn't have side effects, then emit cmpstrnsi
4183 using length MIN(strlen(string)+1, arg3). */
4184 #ifdef HAVE_cmpstrnsi
4185 if (HAVE_cmpstrnsi)
4186 {
4187 tree len, len1, len2;
4188 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4189 rtx result, insn;
4190 tree fndecl, fn;
4191 tree arg1 = CALL_EXPR_ARG (exp, 0);
4192 tree arg2 = CALL_EXPR_ARG (exp, 1);
4193 tree arg3 = CALL_EXPR_ARG (exp, 2);
4194
4195 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4196 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4197 machine_mode insn_mode
4198 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4199
4200 len1 = c_strlen (arg1, 1);
4201 len2 = c_strlen (arg2, 1);
4202
4203 if (len1)
4204 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4205 if (len2)
4206 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4207
4208 /* If we don't have a constant length for the first, use the length
4209 of the second, if we know it. We don't require a constant for
4210 this case; some cost analysis could be done if both are available
4211 but neither is constant. For now, assume they're equally cheap,
4212 unless one has side effects. If both strings have constant lengths,
4213 use the smaller. */
4214
4215 if (!len1)
4216 len = len2;
4217 else if (!len2)
4218 len = len1;
4219 else if (TREE_SIDE_EFFECTS (len1))
4220 len = len2;
4221 else if (TREE_SIDE_EFFECTS (len2))
4222 len = len1;
4223 else if (TREE_CODE (len1) != INTEGER_CST)
4224 len = len2;
4225 else if (TREE_CODE (len2) != INTEGER_CST)
4226 len = len1;
4227 else if (tree_int_cst_lt (len1, len2))
4228 len = len1;
4229 else
4230 len = len2;
4231
4232 /* If both arguments have side effects, we cannot optimize. */
4233 if (!len || TREE_SIDE_EFFECTS (len))
4234 return NULL_RTX;
4235
4236 /* The actual new length parameter is MIN(len,arg3). */
4237 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4238 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4239
4240 /* If we don't have POINTER_TYPE, call the function. */
4241 if (arg1_align == 0 || arg2_align == 0)
4242 return NULL_RTX;
4243
4244 /* Make a place to write the result of the instruction. */
4245 result = target;
4246 if (! (result != 0
4247 && REG_P (result) && GET_MODE (result) == insn_mode
4248 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4249 result = gen_reg_rtx (insn_mode);
4250
4251 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4252 arg1 = builtin_save_expr (arg1);
4253 arg2 = builtin_save_expr (arg2);
4254 len = builtin_save_expr (len);
4255
4256 arg1_rtx = get_memory_rtx (arg1, len);
4257 arg2_rtx = get_memory_rtx (arg2, len);
4258 arg3_rtx = expand_normal (len);
4259 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4260 GEN_INT (MIN (arg1_align, arg2_align)));
4261 if (insn)
4262 {
4263 emit_insn (insn);
4264
4265 /* Return the value in the proper mode for this function. */
4266 mode = TYPE_MODE (TREE_TYPE (exp));
4267 if (GET_MODE (result) == mode)
4268 return result;
4269 if (target == 0)
4270 return convert_to_mode (mode, result, 0);
4271 convert_move (target, result, 0);
4272 return target;
4273 }
4274
4275 /* Expand the library call ourselves using a stabilized argument
4276 list to avoid re-evaluating the function's arguments twice. */
4277 fndecl = get_callee_fndecl (exp);
4278 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4279 arg1, arg2, len);
4280 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4281 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4282 return expand_call (fn, target, target == const0_rtx);
4283 }
4284 #endif
4285 return NULL_RTX;
4286 }
4287
4288 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4289 if that's convenient. */
4290
4291 rtx
4292 expand_builtin_saveregs (void)
4293 {
4294 rtx val;
4295 rtx_insn *seq;
4296
4297 /* Don't do __builtin_saveregs more than once in a function.
4298 Save the result of the first call and reuse it. */
4299 if (saveregs_value != 0)
4300 return saveregs_value;
4301
4302 /* When this function is called, it means that registers must be
4303 saved on entry to this function. So we migrate the call to the
4304 first insn of this function. */
4305
4306 start_sequence ();
4307
4308 /* Do whatever the machine needs done in this case. */
4309 val = targetm.calls.expand_builtin_saveregs ();
4310
4311 seq = get_insns ();
4312 end_sequence ();
4313
4314 saveregs_value = val;
4315
4316 /* Put the insns after the NOTE that starts the function. If this
4317 is inside a start_sequence, make the outer-level insn chain current, so
4318 the code is placed at the start of the function. */
4319 push_topmost_sequence ();
4320 emit_insn_after (seq, entry_of_function ());
4321 pop_topmost_sequence ();
4322
4323 return val;
4324 }
4325
4326 /* Expand a call to __builtin_next_arg. */
4327
4328 static rtx
4329 expand_builtin_next_arg (void)
4330 {
4331 /* Checking arguments is already done in fold_builtin_next_arg
4332 that must be called before this function. */
4333 return expand_binop (ptr_mode, add_optab,
4334 crtl->args.internal_arg_pointer,
4335 crtl->args.arg_offset_rtx,
4336 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4337 }
4338
4339 /* Make it easier for the backends by protecting the valist argument
4340 from multiple evaluations. */
4341
4342 static tree
4343 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4344 {
4345 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4346
4347 /* The current way of determining the type of valist is completely
4348 bogus. We should have the information on the va builtin instead. */
4349 if (!vatype)
4350 vatype = targetm.fn_abi_va_list (cfun->decl);
4351
4352 if (TREE_CODE (vatype) == ARRAY_TYPE)
4353 {
4354 if (TREE_SIDE_EFFECTS (valist))
4355 valist = save_expr (valist);
4356
4357 /* For this case, the backends will be expecting a pointer to
4358 vatype, but it's possible we've actually been given an array
4359 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4360 So fix it. */
4361 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4362 {
4363 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4364 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4365 }
4366 }
4367 else
4368 {
4369 tree pt = build_pointer_type (vatype);
4370
4371 if (! needs_lvalue)
4372 {
4373 if (! TREE_SIDE_EFFECTS (valist))
4374 return valist;
4375
4376 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4377 TREE_SIDE_EFFECTS (valist) = 1;
4378 }
4379
4380 if (TREE_SIDE_EFFECTS (valist))
4381 valist = save_expr (valist);
4382 valist = fold_build2_loc (loc, MEM_REF,
4383 vatype, valist, build_int_cst (pt, 0));
4384 }
4385
4386 return valist;
4387 }
4388
4389 /* The "standard" definition of va_list is void*. */
4390
4391 tree
4392 std_build_builtin_va_list (void)
4393 {
4394 return ptr_type_node;
4395 }
4396
4397 /* The "standard" abi va_list is va_list_type_node. */
4398
4399 tree
4400 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4401 {
4402 return va_list_type_node;
4403 }
4404
4405 /* The "standard" type of va_list is va_list_type_node. */
4406
4407 tree
4408 std_canonical_va_list_type (tree type)
4409 {
4410 tree wtype, htype;
4411
4412 if (INDIRECT_REF_P (type))
4413 type = TREE_TYPE (type);
4414 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4415 type = TREE_TYPE (type);
4416 wtype = va_list_type_node;
4417 htype = type;
4418 /* Treat structure va_list types. */
4419 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4420 htype = TREE_TYPE (htype);
4421 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4422 {
4423 /* If va_list is an array type, the argument may have decayed
4424 to a pointer type, e.g. by being passed to another function.
4425 In that case, unwrap both types so that we can compare the
4426 underlying records. */
4427 if (TREE_CODE (htype) == ARRAY_TYPE
4428 || POINTER_TYPE_P (htype))
4429 {
4430 wtype = TREE_TYPE (wtype);
4431 htype = TREE_TYPE (htype);
4432 }
4433 }
4434 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4435 return va_list_type_node;
4436
4437 return NULL_TREE;
4438 }
4439
4440 /* The "standard" implementation of va_start: just assign `nextarg' to
4441 the variable. */
4442
4443 void
4444 std_expand_builtin_va_start (tree valist, rtx nextarg)
4445 {
4446 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4447 convert_move (va_r, nextarg, 0);
4448
4449 /* We do not have any valid bounds for the pointer, so
4450 just store zero bounds for it. */
4451 if (chkp_function_instrumented_p (current_function_decl))
4452 chkp_expand_bounds_reset_for_mem (valist,
4453 make_tree (TREE_TYPE (valist),
4454 nextarg));
4455 }
4456
4457 /* Expand EXP, a call to __builtin_va_start. */
4458
4459 static rtx
4460 expand_builtin_va_start (tree exp)
4461 {
4462 rtx nextarg;
4463 tree valist;
4464 location_t loc = EXPR_LOCATION (exp);
4465
4466 if (call_expr_nargs (exp) < 2)
4467 {
4468 error_at (loc, "too few arguments to function %<va_start%>");
4469 return const0_rtx;
4470 }
4471
4472 if (fold_builtin_next_arg (exp, true))
4473 return const0_rtx;
4474
4475 nextarg = expand_builtin_next_arg ();
4476 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4477
4478 if (targetm.expand_builtin_va_start)
4479 targetm.expand_builtin_va_start (valist, nextarg);
4480 else
4481 std_expand_builtin_va_start (valist, nextarg);
4482
4483 return const0_rtx;
4484 }
4485
4486 /* Expand EXP, a call to __builtin_va_end. */
4487
4488 static rtx
4489 expand_builtin_va_end (tree exp)
4490 {
4491 tree valist = CALL_EXPR_ARG (exp, 0);
4492
4493 /* Evaluate for side effects, if needed. I hate macros that don't
4494 do that. */
4495 if (TREE_SIDE_EFFECTS (valist))
4496 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4497
4498 return const0_rtx;
4499 }
4500
4501 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4502 builtin rather than just as an assignment in stdarg.h because of the
4503 nastiness of array-type va_list types. */
4504
4505 static rtx
4506 expand_builtin_va_copy (tree exp)
4507 {
4508 tree dst, src, t;
4509 location_t loc = EXPR_LOCATION (exp);
4510
4511 dst = CALL_EXPR_ARG (exp, 0);
4512 src = CALL_EXPR_ARG (exp, 1);
4513
4514 dst = stabilize_va_list_loc (loc, dst, 1);
4515 src = stabilize_va_list_loc (loc, src, 0);
4516
4517 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4518
4519 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4520 {
4521 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4522 TREE_SIDE_EFFECTS (t) = 1;
4523 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4524 }
4525 else
4526 {
4527 rtx dstb, srcb, size;
4528
4529 /* Evaluate to pointers. */
4530 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4531 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4532 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4533 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4534
4535 dstb = convert_memory_address (Pmode, dstb);
4536 srcb = convert_memory_address (Pmode, srcb);
4537
4538 /* "Dereference" to BLKmode memories. */
4539 dstb = gen_rtx_MEM (BLKmode, dstb);
4540 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4541 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4542 srcb = gen_rtx_MEM (BLKmode, srcb);
4543 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4544 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4545
4546 /* Copy. */
4547 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4548 }
4549
4550 return const0_rtx;
4551 }
4552
4553 /* Expand a call to one of the builtin functions __builtin_frame_address or
4554 __builtin_return_address. */
4555
4556 static rtx
4557 expand_builtin_frame_address (tree fndecl, tree exp)
4558 {
4559 /* The argument must be a nonnegative integer constant.
4560 It counts the number of frames to scan up the stack.
4561 The value is the return address saved in that frame. */
4562 if (call_expr_nargs (exp) == 0)
4563 /* Warning about missing arg was already issued. */
4564 return const0_rtx;
4565 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4566 {
4567 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4568 error ("invalid argument to %<__builtin_frame_address%>");
4569 else
4570 error ("invalid argument to %<__builtin_return_address%>");
4571 return const0_rtx;
4572 }
4573 else
4574 {
4575 rtx tem
4576 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4577 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4578
4579 /* Some ports cannot access arbitrary stack frames. */
4580 if (tem == NULL)
4581 {
4582 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4583 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4584 else
4585 warning (0, "unsupported argument to %<__builtin_return_address%>");
4586 return const0_rtx;
4587 }
4588
4589 /* For __builtin_frame_address, return what we've got. */
4590 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4591 return tem;
4592
4593 if (!REG_P (tem)
4594 && ! CONSTANT_P (tem))
4595 tem = copy_addr_to_reg (tem);
4596 return tem;
4597 }
4598 }
4599
4600 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4601 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4602 is the same as for allocate_dynamic_stack_space. */
4603
4604 static rtx
4605 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4606 {
4607 rtx op0;
4608 rtx result;
4609 bool valid_arglist;
4610 unsigned int align;
4611 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4612 == BUILT_IN_ALLOCA_WITH_ALIGN);
4613
4614 valid_arglist
4615 = (alloca_with_align
4616 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4617 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4618
4619 if (!valid_arglist)
4620 return NULL_RTX;
4621
4622 /* Compute the argument. */
4623 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4624
4625 /* Compute the alignment. */
4626 align = (alloca_with_align
4627 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4628 : BIGGEST_ALIGNMENT);
4629
4630 /* Allocate the desired space. */
4631 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4632 result = convert_memory_address (ptr_mode, result);
4633
4634 return result;
4635 }
4636
4637 /* Expand a call to bswap builtin in EXP.
4638 Return NULL_RTX if a normal call should be emitted rather than expanding the
4639 function in-line. If convenient, the result should be placed in TARGET.
4640 SUBTARGET may be used as the target for computing one of EXP's operands. */
4641
4642 static rtx
4643 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4644 rtx subtarget)
4645 {
4646 tree arg;
4647 rtx op0;
4648
4649 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4650 return NULL_RTX;
4651
4652 arg = CALL_EXPR_ARG (exp, 0);
4653 op0 = expand_expr (arg,
4654 subtarget && GET_MODE (subtarget) == target_mode
4655 ? subtarget : NULL_RTX,
4656 target_mode, EXPAND_NORMAL);
4657 if (GET_MODE (op0) != target_mode)
4658 op0 = convert_to_mode (target_mode, op0, 1);
4659
4660 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4661
4662 gcc_assert (target);
4663
4664 return convert_to_mode (target_mode, target, 1);
4665 }
4666
4667 /* Expand a call to a unary builtin in EXP.
4668 Return NULL_RTX if a normal call should be emitted rather than expanding the
4669 function in-line. If convenient, the result should be placed in TARGET.
4670 SUBTARGET may be used as the target for computing one of EXP's operands. */
4671
4672 static rtx
4673 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4674 rtx subtarget, optab op_optab)
4675 {
4676 rtx op0;
4677
4678 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4679 return NULL_RTX;
4680
4681 /* Compute the argument. */
4682 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4683 (subtarget
4684 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4685 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4686 VOIDmode, EXPAND_NORMAL);
4687 /* Compute op, into TARGET if possible.
4688 Set TARGET to wherever the result comes back. */
4689 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4690 op_optab, op0, target, op_optab != clrsb_optab);
4691 gcc_assert (target);
4692
4693 return convert_to_mode (target_mode, target, 0);
4694 }
4695
4696 /* Expand a call to __builtin_expect. We just return our argument
4697 as the builtin_expect semantic should've been already executed by
4698 tree branch prediction pass. */
4699
4700 static rtx
4701 expand_builtin_expect (tree exp, rtx target)
4702 {
4703 tree arg;
4704
4705 if (call_expr_nargs (exp) < 2)
4706 return const0_rtx;
4707 arg = CALL_EXPR_ARG (exp, 0);
4708
4709 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4710 /* When guessing was done, the hints should be already stripped away. */
4711 gcc_assert (!flag_guess_branch_prob
4712 || optimize == 0 || seen_error ());
4713 return target;
4714 }
4715
4716 /* Expand a call to __builtin_assume_aligned. We just return our first
4717 argument as the builtin_assume_aligned semantic should've been already
4718 executed by CCP. */
4719
4720 static rtx
4721 expand_builtin_assume_aligned (tree exp, rtx target)
4722 {
4723 if (call_expr_nargs (exp) < 2)
4724 return const0_rtx;
4725 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4726 EXPAND_NORMAL);
4727 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4728 && (call_expr_nargs (exp) < 3
4729 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4730 return target;
4731 }
4732
4733 void
4734 expand_builtin_trap (void)
4735 {
4736 #ifdef HAVE_trap
4737 if (HAVE_trap)
4738 {
4739 rtx insn = emit_insn (gen_trap ());
4740 /* For trap insns when not accumulating outgoing args force
4741 REG_ARGS_SIZE note to prevent crossjumping of calls with
4742 different args sizes. */
4743 if (!ACCUMULATE_OUTGOING_ARGS)
4744 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4745 }
4746 else
4747 #endif
4748 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4749 emit_barrier ();
4750 }
4751
4752 /* Expand a call to __builtin_unreachable. We do nothing except emit
4753 a barrier saying that control flow will not pass here.
4754
4755 It is the responsibility of the program being compiled to ensure
4756 that control flow does never reach __builtin_unreachable. */
4757 static void
4758 expand_builtin_unreachable (void)
4759 {
4760 emit_barrier ();
4761 }
4762
4763 /* Expand EXP, a call to fabs, fabsf or fabsl.
4764 Return NULL_RTX if a normal call should be emitted rather than expanding
4765 the function inline. If convenient, the result should be placed
4766 in TARGET. SUBTARGET may be used as the target for computing
4767 the operand. */
4768
4769 static rtx
4770 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4771 {
4772 machine_mode mode;
4773 tree arg;
4774 rtx op0;
4775
4776 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4777 return NULL_RTX;
4778
4779 arg = CALL_EXPR_ARG (exp, 0);
4780 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4781 mode = TYPE_MODE (TREE_TYPE (arg));
4782 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4783 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4784 }
4785
4786 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4787 Return NULL is a normal call should be emitted rather than expanding the
4788 function inline. If convenient, the result should be placed in TARGET.
4789 SUBTARGET may be used as the target for computing the operand. */
4790
4791 static rtx
4792 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4793 {
4794 rtx op0, op1;
4795 tree arg;
4796
4797 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4798 return NULL_RTX;
4799
4800 arg = CALL_EXPR_ARG (exp, 0);
4801 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4802
4803 arg = CALL_EXPR_ARG (exp, 1);
4804 op1 = expand_normal (arg);
4805
4806 return expand_copysign (op0, op1, target);
4807 }
4808
4809 /* Expand a call to __builtin___clear_cache. */
4810
4811 static rtx
4812 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4813 {
4814 #ifndef HAVE_clear_cache
4815 #ifdef CLEAR_INSN_CACHE
4816 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4817 does something. Just do the default expansion to a call to
4818 __clear_cache(). */
4819 return NULL_RTX;
4820 #else
4821 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4822 does nothing. There is no need to call it. Do nothing. */
4823 return const0_rtx;
4824 #endif /* CLEAR_INSN_CACHE */
4825 #else
4826 /* We have a "clear_cache" insn, and it will handle everything. */
4827 tree begin, end;
4828 rtx begin_rtx, end_rtx;
4829
4830 /* We must not expand to a library call. If we did, any
4831 fallback library function in libgcc that might contain a call to
4832 __builtin___clear_cache() would recurse infinitely. */
4833 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4834 {
4835 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4836 return const0_rtx;
4837 }
4838
4839 if (HAVE_clear_cache)
4840 {
4841 struct expand_operand ops[2];
4842
4843 begin = CALL_EXPR_ARG (exp, 0);
4844 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4845
4846 end = CALL_EXPR_ARG (exp, 1);
4847 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4848
4849 create_address_operand (&ops[0], begin_rtx);
4850 create_address_operand (&ops[1], end_rtx);
4851 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4852 return const0_rtx;
4853 }
4854 return const0_rtx;
4855 #endif /* HAVE_clear_cache */
4856 }
4857
4858 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4859
4860 static rtx
4861 round_trampoline_addr (rtx tramp)
4862 {
4863 rtx temp, addend, mask;
4864
4865 /* If we don't need too much alignment, we'll have been guaranteed
4866 proper alignment by get_trampoline_type. */
4867 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4868 return tramp;
4869
4870 /* Round address up to desired boundary. */
4871 temp = gen_reg_rtx (Pmode);
4872 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4873 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4874
4875 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4876 temp, 0, OPTAB_LIB_WIDEN);
4877 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4878 temp, 0, OPTAB_LIB_WIDEN);
4879
4880 return tramp;
4881 }
4882
4883 static rtx
4884 expand_builtin_init_trampoline (tree exp, bool onstack)
4885 {
4886 tree t_tramp, t_func, t_chain;
4887 rtx m_tramp, r_tramp, r_chain, tmp;
4888
4889 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4890 POINTER_TYPE, VOID_TYPE))
4891 return NULL_RTX;
4892
4893 t_tramp = CALL_EXPR_ARG (exp, 0);
4894 t_func = CALL_EXPR_ARG (exp, 1);
4895 t_chain = CALL_EXPR_ARG (exp, 2);
4896
4897 r_tramp = expand_normal (t_tramp);
4898 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4899 MEM_NOTRAP_P (m_tramp) = 1;
4900
4901 /* If ONSTACK, the TRAMP argument should be the address of a field
4902 within the local function's FRAME decl. Either way, let's see if
4903 we can fill in the MEM_ATTRs for this memory. */
4904 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4905 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4906
4907 /* Creator of a heap trampoline is responsible for making sure the
4908 address is aligned to at least STACK_BOUNDARY. Normally malloc
4909 will ensure this anyhow. */
4910 tmp = round_trampoline_addr (r_tramp);
4911 if (tmp != r_tramp)
4912 {
4913 m_tramp = change_address (m_tramp, BLKmode, tmp);
4914 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4915 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4916 }
4917
4918 /* The FUNC argument should be the address of the nested function.
4919 Extract the actual function decl to pass to the hook. */
4920 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4921 t_func = TREE_OPERAND (t_func, 0);
4922 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4923
4924 r_chain = expand_normal (t_chain);
4925
4926 /* Generate insns to initialize the trampoline. */
4927 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4928
4929 if (onstack)
4930 {
4931 trampolines_created = 1;
4932
4933 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4934 "trampoline generated for nested function %qD", t_func);
4935 }
4936
4937 return const0_rtx;
4938 }
4939
4940 static rtx
4941 expand_builtin_adjust_trampoline (tree exp)
4942 {
4943 rtx tramp;
4944
4945 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4946 return NULL_RTX;
4947
4948 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4949 tramp = round_trampoline_addr (tramp);
4950 if (targetm.calls.trampoline_adjust_address)
4951 tramp = targetm.calls.trampoline_adjust_address (tramp);
4952
4953 return tramp;
4954 }
4955
4956 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4957 function. The function first checks whether the back end provides
4958 an insn to implement signbit for the respective mode. If not, it
4959 checks whether the floating point format of the value is such that
4960 the sign bit can be extracted. If that is not the case, the
4961 function returns NULL_RTX to indicate that a normal call should be
4962 emitted rather than expanding the function in-line. EXP is the
4963 expression that is a call to the builtin function; if convenient,
4964 the result should be placed in TARGET. */
4965 static rtx
4966 expand_builtin_signbit (tree exp, rtx target)
4967 {
4968 const struct real_format *fmt;
4969 machine_mode fmode, imode, rmode;
4970 tree arg;
4971 int word, bitpos;
4972 enum insn_code icode;
4973 rtx temp;
4974 location_t loc = EXPR_LOCATION (exp);
4975
4976 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4977 return NULL_RTX;
4978
4979 arg = CALL_EXPR_ARG (exp, 0);
4980 fmode = TYPE_MODE (TREE_TYPE (arg));
4981 rmode = TYPE_MODE (TREE_TYPE (exp));
4982 fmt = REAL_MODE_FORMAT (fmode);
4983
4984 arg = builtin_save_expr (arg);
4985
4986 /* Expand the argument yielding a RTX expression. */
4987 temp = expand_normal (arg);
4988
4989 /* Check if the back end provides an insn that handles signbit for the
4990 argument's mode. */
4991 icode = optab_handler (signbit_optab, fmode);
4992 if (icode != CODE_FOR_nothing)
4993 {
4994 rtx_insn *last = get_last_insn ();
4995 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4996 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4997 return target;
4998 delete_insns_since (last);
4999 }
5000
5001 /* For floating point formats without a sign bit, implement signbit
5002 as "ARG < 0.0". */
5003 bitpos = fmt->signbit_ro;
5004 if (bitpos < 0)
5005 {
5006 /* But we can't do this if the format supports signed zero. */
5007 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5008 return NULL_RTX;
5009
5010 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5011 build_real (TREE_TYPE (arg), dconst0));
5012 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5013 }
5014
5015 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5016 {
5017 imode = int_mode_for_mode (fmode);
5018 if (imode == BLKmode)
5019 return NULL_RTX;
5020 temp = gen_lowpart (imode, temp);
5021 }
5022 else
5023 {
5024 imode = word_mode;
5025 /* Handle targets with different FP word orders. */
5026 if (FLOAT_WORDS_BIG_ENDIAN)
5027 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5028 else
5029 word = bitpos / BITS_PER_WORD;
5030 temp = operand_subword_force (temp, word, fmode);
5031 bitpos = bitpos % BITS_PER_WORD;
5032 }
5033
5034 /* Force the intermediate word_mode (or narrower) result into a
5035 register. This avoids attempting to create paradoxical SUBREGs
5036 of floating point modes below. */
5037 temp = force_reg (imode, temp);
5038
5039 /* If the bitpos is within the "result mode" lowpart, the operation
5040 can be implement with a single bitwise AND. Otherwise, we need
5041 a right shift and an AND. */
5042
5043 if (bitpos < GET_MODE_BITSIZE (rmode))
5044 {
5045 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5046
5047 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5048 temp = gen_lowpart (rmode, temp);
5049 temp = expand_binop (rmode, and_optab, temp,
5050 immed_wide_int_const (mask, rmode),
5051 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5052 }
5053 else
5054 {
5055 /* Perform a logical right shift to place the signbit in the least
5056 significant bit, then truncate the result to the desired mode
5057 and mask just this bit. */
5058 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5059 temp = gen_lowpart (rmode, temp);
5060 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5061 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5062 }
5063
5064 return temp;
5065 }
5066
5067 /* Expand fork or exec calls. TARGET is the desired target of the
5068 call. EXP is the call. FN is the
5069 identificator of the actual function. IGNORE is nonzero if the
5070 value is to be ignored. */
5071
5072 static rtx
5073 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5074 {
5075 tree id, decl;
5076 tree call;
5077
5078 /* If we are not profiling, just call the function. */
5079 if (!profile_arc_flag)
5080 return NULL_RTX;
5081
5082 /* Otherwise call the wrapper. This should be equivalent for the rest of
5083 compiler, so the code does not diverge, and the wrapper may run the
5084 code necessary for keeping the profiling sane. */
5085
5086 switch (DECL_FUNCTION_CODE (fn))
5087 {
5088 case BUILT_IN_FORK:
5089 id = get_identifier ("__gcov_fork");
5090 break;
5091
5092 case BUILT_IN_EXECL:
5093 id = get_identifier ("__gcov_execl");
5094 break;
5095
5096 case BUILT_IN_EXECV:
5097 id = get_identifier ("__gcov_execv");
5098 break;
5099
5100 case BUILT_IN_EXECLP:
5101 id = get_identifier ("__gcov_execlp");
5102 break;
5103
5104 case BUILT_IN_EXECLE:
5105 id = get_identifier ("__gcov_execle");
5106 break;
5107
5108 case BUILT_IN_EXECVP:
5109 id = get_identifier ("__gcov_execvp");
5110 break;
5111
5112 case BUILT_IN_EXECVE:
5113 id = get_identifier ("__gcov_execve");
5114 break;
5115
5116 default:
5117 gcc_unreachable ();
5118 }
5119
5120 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5121 FUNCTION_DECL, id, TREE_TYPE (fn));
5122 DECL_EXTERNAL (decl) = 1;
5123 TREE_PUBLIC (decl) = 1;
5124 DECL_ARTIFICIAL (decl) = 1;
5125 TREE_NOTHROW (decl) = 1;
5126 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5127 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5128 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5129 return expand_call (call, target, ignore);
5130 }
5131
5132
5133 \f
5134 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5135 the pointer in these functions is void*, the tree optimizers may remove
5136 casts. The mode computed in expand_builtin isn't reliable either, due
5137 to __sync_bool_compare_and_swap.
5138
5139 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5140 group of builtins. This gives us log2 of the mode size. */
5141
5142 static inline machine_mode
5143 get_builtin_sync_mode (int fcode_diff)
5144 {
5145 /* The size is not negotiable, so ask not to get BLKmode in return
5146 if the target indicates that a smaller size would be better. */
5147 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5148 }
5149
5150 /* Expand the memory expression LOC and return the appropriate memory operand
5151 for the builtin_sync operations. */
5152
5153 static rtx
5154 get_builtin_sync_mem (tree loc, machine_mode mode)
5155 {
5156 rtx addr, mem;
5157
5158 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5159 addr = convert_memory_address (Pmode, addr);
5160
5161 /* Note that we explicitly do not want any alias information for this
5162 memory, so that we kill all other live memories. Otherwise we don't
5163 satisfy the full barrier semantics of the intrinsic. */
5164 mem = validize_mem (gen_rtx_MEM (mode, addr));
5165
5166 /* The alignment needs to be at least according to that of the mode. */
5167 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5168 get_pointer_alignment (loc)));
5169 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5170 MEM_VOLATILE_P (mem) = 1;
5171
5172 return mem;
5173 }
5174
5175 /* Make sure an argument is in the right mode.
5176 EXP is the tree argument.
5177 MODE is the mode it should be in. */
5178
5179 static rtx
5180 expand_expr_force_mode (tree exp, machine_mode mode)
5181 {
5182 rtx val;
5183 machine_mode old_mode;
5184
5185 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5186 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5187 of CONST_INTs, where we know the old_mode only from the call argument. */
5188
5189 old_mode = GET_MODE (val);
5190 if (old_mode == VOIDmode)
5191 old_mode = TYPE_MODE (TREE_TYPE (exp));
5192 val = convert_modes (mode, old_mode, val, 1);
5193 return val;
5194 }
5195
5196
5197 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5198 EXP is the CALL_EXPR. CODE is the rtx code
5199 that corresponds to the arithmetic or logical operation from the name;
5200 an exception here is that NOT actually means NAND. TARGET is an optional
5201 place for us to store the results; AFTER is true if this is the
5202 fetch_and_xxx form. */
5203
5204 static rtx
5205 expand_builtin_sync_operation (machine_mode mode, tree exp,
5206 enum rtx_code code, bool after,
5207 rtx target)
5208 {
5209 rtx val, mem;
5210 location_t loc = EXPR_LOCATION (exp);
5211
5212 if (code == NOT && warn_sync_nand)
5213 {
5214 tree fndecl = get_callee_fndecl (exp);
5215 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5216
5217 static bool warned_f_a_n, warned_n_a_f;
5218
5219 switch (fcode)
5220 {
5221 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5222 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5223 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5224 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5225 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5226 if (warned_f_a_n)
5227 break;
5228
5229 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5230 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5231 warned_f_a_n = true;
5232 break;
5233
5234 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5235 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5236 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5237 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5238 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5239 if (warned_n_a_f)
5240 break;
5241
5242 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5243 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5244 warned_n_a_f = true;
5245 break;
5246
5247 default:
5248 gcc_unreachable ();
5249 }
5250 }
5251
5252 /* Expand the operands. */
5253 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5254 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5255
5256 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5257 after);
5258 }
5259
5260 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5261 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5262 true if this is the boolean form. TARGET is a place for us to store the
5263 results; this is NOT optional if IS_BOOL is true. */
5264
5265 static rtx
5266 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5267 bool is_bool, rtx target)
5268 {
5269 rtx old_val, new_val, mem;
5270 rtx *pbool, *poval;
5271
5272 /* Expand the operands. */
5273 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5274 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5275 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5276
5277 pbool = poval = NULL;
5278 if (target != const0_rtx)
5279 {
5280 if (is_bool)
5281 pbool = &target;
5282 else
5283 poval = &target;
5284 }
5285 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5286 false, MEMMODEL_SEQ_CST,
5287 MEMMODEL_SEQ_CST))
5288 return NULL_RTX;
5289
5290 return target;
5291 }
5292
5293 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5294 general form is actually an atomic exchange, and some targets only
5295 support a reduced form with the second argument being a constant 1.
5296 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5297 the results. */
5298
5299 static rtx
5300 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5301 rtx target)
5302 {
5303 rtx val, mem;
5304
5305 /* Expand the operands. */
5306 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5307 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5308
5309 return expand_sync_lock_test_and_set (target, mem, val);
5310 }
5311
5312 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5313
5314 static void
5315 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5316 {
5317 rtx mem;
5318
5319 /* Expand the operands. */
5320 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5321
5322 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5323 }
5324
5325 /* Given an integer representing an ``enum memmodel'', verify its
5326 correctness and return the memory model enum. */
5327
5328 static enum memmodel
5329 get_memmodel (tree exp)
5330 {
5331 rtx op;
5332 unsigned HOST_WIDE_INT val;
5333
5334 /* If the parameter is not a constant, it's a run time value so we'll just
5335 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5336 if (TREE_CODE (exp) != INTEGER_CST)
5337 return MEMMODEL_SEQ_CST;
5338
5339 op = expand_normal (exp);
5340
5341 val = INTVAL (op);
5342 if (targetm.memmodel_check)
5343 val = targetm.memmodel_check (val);
5344 else if (val & ~MEMMODEL_MASK)
5345 {
5346 warning (OPT_Winvalid_memory_model,
5347 "Unknown architecture specifier in memory model to builtin.");
5348 return MEMMODEL_SEQ_CST;
5349 }
5350
5351 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5352 {
5353 warning (OPT_Winvalid_memory_model,
5354 "invalid memory model argument to builtin");
5355 return MEMMODEL_SEQ_CST;
5356 }
5357
5358 return (enum memmodel) val;
5359 }
5360
5361 /* Expand the __atomic_exchange intrinsic:
5362 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5363 EXP is the CALL_EXPR.
5364 TARGET is an optional place for us to store the results. */
5365
5366 static rtx
5367 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5368 {
5369 rtx val, mem;
5370 enum memmodel model;
5371
5372 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5373 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5374 {
5375 error ("invalid memory model for %<__atomic_exchange%>");
5376 return NULL_RTX;
5377 }
5378
5379 if (!flag_inline_atomics)
5380 return NULL_RTX;
5381
5382 /* Expand the operands. */
5383 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5384 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5385
5386 return expand_atomic_exchange (target, mem, val, model);
5387 }
5388
5389 /* Expand the __atomic_compare_exchange intrinsic:
5390 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5391 TYPE desired, BOOL weak,
5392 enum memmodel success,
5393 enum memmodel failure)
5394 EXP is the CALL_EXPR.
5395 TARGET is an optional place for us to store the results. */
5396
5397 static rtx
5398 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5399 rtx target)
5400 {
5401 rtx expect, desired, mem, oldval;
5402 rtx_code_label *label;
5403 enum memmodel success, failure;
5404 tree weak;
5405 bool is_weak;
5406
5407 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5408 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5409
5410 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5411 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5412 {
5413 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5414 return NULL_RTX;
5415 }
5416
5417 if (failure > success)
5418 {
5419 error ("failure memory model cannot be stronger than success "
5420 "memory model for %<__atomic_compare_exchange%>");
5421 return NULL_RTX;
5422 }
5423
5424 if (!flag_inline_atomics)
5425 return NULL_RTX;
5426
5427 /* Expand the operands. */
5428 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5429
5430 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5431 expect = convert_memory_address (Pmode, expect);
5432 expect = gen_rtx_MEM (mode, expect);
5433 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5434
5435 weak = CALL_EXPR_ARG (exp, 3);
5436 is_weak = false;
5437 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5438 is_weak = true;
5439
5440 if (target == const0_rtx)
5441 target = NULL;
5442
5443 /* Lest the rtl backend create a race condition with an imporoper store
5444 to memory, always create a new pseudo for OLDVAL. */
5445 oldval = NULL;
5446
5447 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5448 is_weak, success, failure))
5449 return NULL_RTX;
5450
5451 /* Conditionally store back to EXPECT, lest we create a race condition
5452 with an improper store to memory. */
5453 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5454 the normal case where EXPECT is totally private, i.e. a register. At
5455 which point the store can be unconditional. */
5456 label = gen_label_rtx ();
5457 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL, VOIDmode, 1, label);
5458 emit_move_insn (expect, oldval);
5459 emit_label (label);
5460
5461 return target;
5462 }
5463
5464 /* Expand the __atomic_load intrinsic:
5465 TYPE __atomic_load (TYPE *object, enum memmodel)
5466 EXP is the CALL_EXPR.
5467 TARGET is an optional place for us to store the results. */
5468
5469 static rtx
5470 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5471 {
5472 rtx mem;
5473 enum memmodel model;
5474
5475 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5476 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5477 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5478 {
5479 error ("invalid memory model for %<__atomic_load%>");
5480 return NULL_RTX;
5481 }
5482
5483 if (!flag_inline_atomics)
5484 return NULL_RTX;
5485
5486 /* Expand the operand. */
5487 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5488
5489 return expand_atomic_load (target, mem, model);
5490 }
5491
5492
5493 /* Expand the __atomic_store intrinsic:
5494 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5495 EXP is the CALL_EXPR.
5496 TARGET is an optional place for us to store the results. */
5497
5498 static rtx
5499 expand_builtin_atomic_store (machine_mode mode, tree exp)
5500 {
5501 rtx mem, val;
5502 enum memmodel model;
5503
5504 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5505 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5506 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5507 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5508 {
5509 error ("invalid memory model for %<__atomic_store%>");
5510 return NULL_RTX;
5511 }
5512
5513 if (!flag_inline_atomics)
5514 return NULL_RTX;
5515
5516 /* Expand the operands. */
5517 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5518 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5519
5520 return expand_atomic_store (mem, val, model, false);
5521 }
5522
5523 /* Expand the __atomic_fetch_XXX intrinsic:
5524 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5525 EXP is the CALL_EXPR.
5526 TARGET is an optional place for us to store the results.
5527 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5528 FETCH_AFTER is true if returning the result of the operation.
5529 FETCH_AFTER is false if returning the value before the operation.
5530 IGNORE is true if the result is not used.
5531 EXT_CALL is the correct builtin for an external call if this cannot be
5532 resolved to an instruction sequence. */
5533
5534 static rtx
5535 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5536 enum rtx_code code, bool fetch_after,
5537 bool ignore, enum built_in_function ext_call)
5538 {
5539 rtx val, mem, ret;
5540 enum memmodel model;
5541 tree fndecl;
5542 tree addr;
5543
5544 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5545
5546 /* Expand the operands. */
5547 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5548 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5549
5550 /* Only try generating instructions if inlining is turned on. */
5551 if (flag_inline_atomics)
5552 {
5553 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5554 if (ret)
5555 return ret;
5556 }
5557
5558 /* Return if a different routine isn't needed for the library call. */
5559 if (ext_call == BUILT_IN_NONE)
5560 return NULL_RTX;
5561
5562 /* Change the call to the specified function. */
5563 fndecl = get_callee_fndecl (exp);
5564 addr = CALL_EXPR_FN (exp);
5565 STRIP_NOPS (addr);
5566
5567 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5568 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5569
5570 /* Expand the call here so we can emit trailing code. */
5571 ret = expand_call (exp, target, ignore);
5572
5573 /* Replace the original function just in case it matters. */
5574 TREE_OPERAND (addr, 0) = fndecl;
5575
5576 /* Then issue the arithmetic correction to return the right result. */
5577 if (!ignore)
5578 {
5579 if (code == NOT)
5580 {
5581 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5582 OPTAB_LIB_WIDEN);
5583 ret = expand_simple_unop (mode, NOT, ret, target, true);
5584 }
5585 else
5586 ret = expand_simple_binop (mode, code, ret, val, target, true,
5587 OPTAB_LIB_WIDEN);
5588 }
5589 return ret;
5590 }
5591
5592
5593 #ifndef HAVE_atomic_clear
5594 # define HAVE_atomic_clear 0
5595 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5596 #endif
5597
5598 /* Expand an atomic clear operation.
5599 void _atomic_clear (BOOL *obj, enum memmodel)
5600 EXP is the call expression. */
5601
5602 static rtx
5603 expand_builtin_atomic_clear (tree exp)
5604 {
5605 machine_mode mode;
5606 rtx mem, ret;
5607 enum memmodel model;
5608
5609 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5610 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5611 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5612
5613 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5614 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5615 {
5616 error ("invalid memory model for %<__atomic_store%>");
5617 return const0_rtx;
5618 }
5619
5620 if (HAVE_atomic_clear)
5621 {
5622 emit_insn (gen_atomic_clear (mem, model));
5623 return const0_rtx;
5624 }
5625
5626 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5627 Failing that, a store is issued by __atomic_store. The only way this can
5628 fail is if the bool type is larger than a word size. Unlikely, but
5629 handle it anyway for completeness. Assume a single threaded model since
5630 there is no atomic support in this case, and no barriers are required. */
5631 ret = expand_atomic_store (mem, const0_rtx, model, true);
5632 if (!ret)
5633 emit_move_insn (mem, const0_rtx);
5634 return const0_rtx;
5635 }
5636
5637 /* Expand an atomic test_and_set operation.
5638 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5639 EXP is the call expression. */
5640
5641 static rtx
5642 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5643 {
5644 rtx mem;
5645 enum memmodel model;
5646 machine_mode mode;
5647
5648 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5649 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5650 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5651
5652 return expand_atomic_test_and_set (target, mem, model);
5653 }
5654
5655
5656 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5657 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5658
5659 static tree
5660 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5661 {
5662 int size;
5663 machine_mode mode;
5664 unsigned int mode_align, type_align;
5665
5666 if (TREE_CODE (arg0) != INTEGER_CST)
5667 return NULL_TREE;
5668
5669 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5670 mode = mode_for_size (size, MODE_INT, 0);
5671 mode_align = GET_MODE_ALIGNMENT (mode);
5672
5673 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5674 type_align = mode_align;
5675 else
5676 {
5677 tree ttype = TREE_TYPE (arg1);
5678
5679 /* This function is usually invoked and folded immediately by the front
5680 end before anything else has a chance to look at it. The pointer
5681 parameter at this point is usually cast to a void *, so check for that
5682 and look past the cast. */
5683 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5684 && VOID_TYPE_P (TREE_TYPE (ttype)))
5685 arg1 = TREE_OPERAND (arg1, 0);
5686
5687 ttype = TREE_TYPE (arg1);
5688 gcc_assert (POINTER_TYPE_P (ttype));
5689
5690 /* Get the underlying type of the object. */
5691 ttype = TREE_TYPE (ttype);
5692 type_align = TYPE_ALIGN (ttype);
5693 }
5694
5695 /* If the object has smaller alignment, the the lock free routines cannot
5696 be used. */
5697 if (type_align < mode_align)
5698 return boolean_false_node;
5699
5700 /* Check if a compare_and_swap pattern exists for the mode which represents
5701 the required size. The pattern is not allowed to fail, so the existence
5702 of the pattern indicates support is present. */
5703 if (can_compare_and_swap_p (mode, true))
5704 return boolean_true_node;
5705 else
5706 return boolean_false_node;
5707 }
5708
5709 /* Return true if the parameters to call EXP represent an object which will
5710 always generate lock free instructions. The first argument represents the
5711 size of the object, and the second parameter is a pointer to the object
5712 itself. If NULL is passed for the object, then the result is based on
5713 typical alignment for an object of the specified size. Otherwise return
5714 false. */
5715
5716 static rtx
5717 expand_builtin_atomic_always_lock_free (tree exp)
5718 {
5719 tree size;
5720 tree arg0 = CALL_EXPR_ARG (exp, 0);
5721 tree arg1 = CALL_EXPR_ARG (exp, 1);
5722
5723 if (TREE_CODE (arg0) != INTEGER_CST)
5724 {
5725 error ("non-constant argument 1 to __atomic_always_lock_free");
5726 return const0_rtx;
5727 }
5728
5729 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5730 if (size == boolean_true_node)
5731 return const1_rtx;
5732 return const0_rtx;
5733 }
5734
5735 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5736 is lock free on this architecture. */
5737
5738 static tree
5739 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5740 {
5741 if (!flag_inline_atomics)
5742 return NULL_TREE;
5743
5744 /* If it isn't always lock free, don't generate a result. */
5745 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5746 return boolean_true_node;
5747
5748 return NULL_TREE;
5749 }
5750
5751 /* Return true if the parameters to call EXP represent an object which will
5752 always generate lock free instructions. The first argument represents the
5753 size of the object, and the second parameter is a pointer to the object
5754 itself. If NULL is passed for the object, then the result is based on
5755 typical alignment for an object of the specified size. Otherwise return
5756 NULL*/
5757
5758 static rtx
5759 expand_builtin_atomic_is_lock_free (tree exp)
5760 {
5761 tree size;
5762 tree arg0 = CALL_EXPR_ARG (exp, 0);
5763 tree arg1 = CALL_EXPR_ARG (exp, 1);
5764
5765 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5766 {
5767 error ("non-integer argument 1 to __atomic_is_lock_free");
5768 return NULL_RTX;
5769 }
5770
5771 if (!flag_inline_atomics)
5772 return NULL_RTX;
5773
5774 /* If the value is known at compile time, return the RTX for it. */
5775 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5776 if (size == boolean_true_node)
5777 return const1_rtx;
5778
5779 return NULL_RTX;
5780 }
5781
5782 /* Expand the __atomic_thread_fence intrinsic:
5783 void __atomic_thread_fence (enum memmodel)
5784 EXP is the CALL_EXPR. */
5785
5786 static void
5787 expand_builtin_atomic_thread_fence (tree exp)
5788 {
5789 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5790 expand_mem_thread_fence (model);
5791 }
5792
5793 /* Expand the __atomic_signal_fence intrinsic:
5794 void __atomic_signal_fence (enum memmodel)
5795 EXP is the CALL_EXPR. */
5796
5797 static void
5798 expand_builtin_atomic_signal_fence (tree exp)
5799 {
5800 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5801 expand_mem_signal_fence (model);
5802 }
5803
5804 /* Expand the __sync_synchronize intrinsic. */
5805
5806 static void
5807 expand_builtin_sync_synchronize (void)
5808 {
5809 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5810 }
5811
5812 static rtx
5813 expand_builtin_thread_pointer (tree exp, rtx target)
5814 {
5815 enum insn_code icode;
5816 if (!validate_arglist (exp, VOID_TYPE))
5817 return const0_rtx;
5818 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5819 if (icode != CODE_FOR_nothing)
5820 {
5821 struct expand_operand op;
5822 /* If the target is not sutitable then create a new target. */
5823 if (target == NULL_RTX
5824 || !REG_P (target)
5825 || GET_MODE (target) != Pmode)
5826 target = gen_reg_rtx (Pmode);
5827 create_output_operand (&op, target, Pmode);
5828 expand_insn (icode, 1, &op);
5829 return target;
5830 }
5831 error ("__builtin_thread_pointer is not supported on this target");
5832 return const0_rtx;
5833 }
5834
5835 static void
5836 expand_builtin_set_thread_pointer (tree exp)
5837 {
5838 enum insn_code icode;
5839 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5840 return;
5841 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5842 if (icode != CODE_FOR_nothing)
5843 {
5844 struct expand_operand op;
5845 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5846 Pmode, EXPAND_NORMAL);
5847 create_input_operand (&op, val, Pmode);
5848 expand_insn (icode, 1, &op);
5849 return;
5850 }
5851 error ("__builtin_set_thread_pointer is not supported on this target");
5852 }
5853
5854 \f
5855 /* Emit code to restore the current value of stack. */
5856
5857 static void
5858 expand_stack_restore (tree var)
5859 {
5860 rtx_insn *prev;
5861 rtx sa = expand_normal (var);
5862
5863 sa = convert_memory_address (Pmode, sa);
5864
5865 prev = get_last_insn ();
5866 emit_stack_restore (SAVE_BLOCK, sa);
5867 fixup_args_size_notes (prev, get_last_insn (), 0);
5868 }
5869
5870
5871 /* Emit code to save the current value of stack. */
5872
5873 static rtx
5874 expand_stack_save (void)
5875 {
5876 rtx ret = NULL_RTX;
5877
5878 do_pending_stack_adjust ();
5879 emit_stack_save (SAVE_BLOCK, &ret);
5880 return ret;
5881 }
5882
5883 /* Expand an expression EXP that calls a built-in function,
5884 with result going to TARGET if that's convenient
5885 (and in mode MODE if that's convenient).
5886 SUBTARGET may be used as the target for computing one of EXP's operands.
5887 IGNORE is nonzero if the value is to be ignored. */
5888
5889 rtx
5890 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5891 int ignore)
5892 {
5893 tree fndecl = get_callee_fndecl (exp);
5894 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5895 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5896 int flags;
5897
5898 /* When ASan is enabled, we don't want to expand some memory/string
5899 builtins and rely on libsanitizer's hooks. This allows us to avoid
5900 redundant checks and be sure, that possible overflow will be detected
5901 by ASan. */
5902
5903 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5904 return expand_call (exp, target, ignore);
5905
5906 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5907 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5908
5909 /* When not optimizing, generate calls to library functions for a certain
5910 set of builtins. */
5911 if (!optimize
5912 && !called_as_built_in (fndecl)
5913 && fcode != BUILT_IN_FORK
5914 && fcode != BUILT_IN_EXECL
5915 && fcode != BUILT_IN_EXECV
5916 && fcode != BUILT_IN_EXECLP
5917 && fcode != BUILT_IN_EXECLE
5918 && fcode != BUILT_IN_EXECVP
5919 && fcode != BUILT_IN_EXECVE
5920 && fcode != BUILT_IN_ALLOCA
5921 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5922 && fcode != BUILT_IN_FREE
5923 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5924 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5925 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5926 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5927 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5928 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5929 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5930 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5931 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5932 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5933 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5934 && fcode != BUILT_IN_CHKP_BNDRET)
5935 return expand_call (exp, target, ignore);
5936
5937 /* The built-in function expanders test for target == const0_rtx
5938 to determine whether the function's result will be ignored. */
5939 if (ignore)
5940 target = const0_rtx;
5941
5942 /* If the result of a pure or const built-in function is ignored, and
5943 none of its arguments are volatile, we can avoid expanding the
5944 built-in call and just evaluate the arguments for side-effects. */
5945 if (target == const0_rtx
5946 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5947 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5948 {
5949 bool volatilep = false;
5950 tree arg;
5951 call_expr_arg_iterator iter;
5952
5953 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5954 if (TREE_THIS_VOLATILE (arg))
5955 {
5956 volatilep = true;
5957 break;
5958 }
5959
5960 if (! volatilep)
5961 {
5962 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5963 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5964 return const0_rtx;
5965 }
5966 }
5967
5968 /* expand_builtin_with_bounds is supposed to be used for
5969 instrumented builtin calls. */
5970 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5971
5972 switch (fcode)
5973 {
5974 CASE_FLT_FN (BUILT_IN_FABS):
5975 case BUILT_IN_FABSD32:
5976 case BUILT_IN_FABSD64:
5977 case BUILT_IN_FABSD128:
5978 target = expand_builtin_fabs (exp, target, subtarget);
5979 if (target)
5980 return target;
5981 break;
5982
5983 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5984 target = expand_builtin_copysign (exp, target, subtarget);
5985 if (target)
5986 return target;
5987 break;
5988
5989 /* Just do a normal library call if we were unable to fold
5990 the values. */
5991 CASE_FLT_FN (BUILT_IN_CABS):
5992 break;
5993
5994 CASE_FLT_FN (BUILT_IN_EXP):
5995 CASE_FLT_FN (BUILT_IN_EXP10):
5996 CASE_FLT_FN (BUILT_IN_POW10):
5997 CASE_FLT_FN (BUILT_IN_EXP2):
5998 CASE_FLT_FN (BUILT_IN_EXPM1):
5999 CASE_FLT_FN (BUILT_IN_LOGB):
6000 CASE_FLT_FN (BUILT_IN_LOG):
6001 CASE_FLT_FN (BUILT_IN_LOG10):
6002 CASE_FLT_FN (BUILT_IN_LOG2):
6003 CASE_FLT_FN (BUILT_IN_LOG1P):
6004 CASE_FLT_FN (BUILT_IN_TAN):
6005 CASE_FLT_FN (BUILT_IN_ASIN):
6006 CASE_FLT_FN (BUILT_IN_ACOS):
6007 CASE_FLT_FN (BUILT_IN_ATAN):
6008 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
6009 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6010 because of possible accuracy problems. */
6011 if (! flag_unsafe_math_optimizations)
6012 break;
6013 CASE_FLT_FN (BUILT_IN_SQRT):
6014 CASE_FLT_FN (BUILT_IN_FLOOR):
6015 CASE_FLT_FN (BUILT_IN_CEIL):
6016 CASE_FLT_FN (BUILT_IN_TRUNC):
6017 CASE_FLT_FN (BUILT_IN_ROUND):
6018 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6019 CASE_FLT_FN (BUILT_IN_RINT):
6020 target = expand_builtin_mathfn (exp, target, subtarget);
6021 if (target)
6022 return target;
6023 break;
6024
6025 CASE_FLT_FN (BUILT_IN_FMA):
6026 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6027 if (target)
6028 return target;
6029 break;
6030
6031 CASE_FLT_FN (BUILT_IN_ILOGB):
6032 if (! flag_unsafe_math_optimizations)
6033 break;
6034 CASE_FLT_FN (BUILT_IN_ISINF):
6035 CASE_FLT_FN (BUILT_IN_FINITE):
6036 case BUILT_IN_ISFINITE:
6037 case BUILT_IN_ISNORMAL:
6038 target = expand_builtin_interclass_mathfn (exp, target);
6039 if (target)
6040 return target;
6041 break;
6042
6043 CASE_FLT_FN (BUILT_IN_ICEIL):
6044 CASE_FLT_FN (BUILT_IN_LCEIL):
6045 CASE_FLT_FN (BUILT_IN_LLCEIL):
6046 CASE_FLT_FN (BUILT_IN_LFLOOR):
6047 CASE_FLT_FN (BUILT_IN_IFLOOR):
6048 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6049 target = expand_builtin_int_roundingfn (exp, target);
6050 if (target)
6051 return target;
6052 break;
6053
6054 CASE_FLT_FN (BUILT_IN_IRINT):
6055 CASE_FLT_FN (BUILT_IN_LRINT):
6056 CASE_FLT_FN (BUILT_IN_LLRINT):
6057 CASE_FLT_FN (BUILT_IN_IROUND):
6058 CASE_FLT_FN (BUILT_IN_LROUND):
6059 CASE_FLT_FN (BUILT_IN_LLROUND):
6060 target = expand_builtin_int_roundingfn_2 (exp, target);
6061 if (target)
6062 return target;
6063 break;
6064
6065 CASE_FLT_FN (BUILT_IN_POWI):
6066 target = expand_builtin_powi (exp, target);
6067 if (target)
6068 return target;
6069 break;
6070
6071 CASE_FLT_FN (BUILT_IN_ATAN2):
6072 CASE_FLT_FN (BUILT_IN_LDEXP):
6073 CASE_FLT_FN (BUILT_IN_SCALB):
6074 CASE_FLT_FN (BUILT_IN_SCALBN):
6075 CASE_FLT_FN (BUILT_IN_SCALBLN):
6076 if (! flag_unsafe_math_optimizations)
6077 break;
6078
6079 CASE_FLT_FN (BUILT_IN_FMOD):
6080 CASE_FLT_FN (BUILT_IN_REMAINDER):
6081 CASE_FLT_FN (BUILT_IN_DREM):
6082 CASE_FLT_FN (BUILT_IN_POW):
6083 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6084 if (target)
6085 return target;
6086 break;
6087
6088 CASE_FLT_FN (BUILT_IN_CEXPI):
6089 target = expand_builtin_cexpi (exp, target);
6090 gcc_assert (target);
6091 return target;
6092
6093 CASE_FLT_FN (BUILT_IN_SIN):
6094 CASE_FLT_FN (BUILT_IN_COS):
6095 if (! flag_unsafe_math_optimizations)
6096 break;
6097 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6098 if (target)
6099 return target;
6100 break;
6101
6102 CASE_FLT_FN (BUILT_IN_SINCOS):
6103 if (! flag_unsafe_math_optimizations)
6104 break;
6105 target = expand_builtin_sincos (exp);
6106 if (target)
6107 return target;
6108 break;
6109
6110 case BUILT_IN_APPLY_ARGS:
6111 return expand_builtin_apply_args ();
6112
6113 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6114 FUNCTION with a copy of the parameters described by
6115 ARGUMENTS, and ARGSIZE. It returns a block of memory
6116 allocated on the stack into which is stored all the registers
6117 that might possibly be used for returning the result of a
6118 function. ARGUMENTS is the value returned by
6119 __builtin_apply_args. ARGSIZE is the number of bytes of
6120 arguments that must be copied. ??? How should this value be
6121 computed? We'll also need a safe worst case value for varargs
6122 functions. */
6123 case BUILT_IN_APPLY:
6124 if (!validate_arglist (exp, POINTER_TYPE,
6125 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6126 && !validate_arglist (exp, REFERENCE_TYPE,
6127 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6128 return const0_rtx;
6129 else
6130 {
6131 rtx ops[3];
6132
6133 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6134 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6135 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6136
6137 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6138 }
6139
6140 /* __builtin_return (RESULT) causes the function to return the
6141 value described by RESULT. RESULT is address of the block of
6142 memory returned by __builtin_apply. */
6143 case BUILT_IN_RETURN:
6144 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6145 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6146 return const0_rtx;
6147
6148 case BUILT_IN_SAVEREGS:
6149 return expand_builtin_saveregs ();
6150
6151 case BUILT_IN_VA_ARG_PACK:
6152 /* All valid uses of __builtin_va_arg_pack () are removed during
6153 inlining. */
6154 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6155 return const0_rtx;
6156
6157 case BUILT_IN_VA_ARG_PACK_LEN:
6158 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6159 inlining. */
6160 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6161 return const0_rtx;
6162
6163 /* Return the address of the first anonymous stack arg. */
6164 case BUILT_IN_NEXT_ARG:
6165 if (fold_builtin_next_arg (exp, false))
6166 return const0_rtx;
6167 return expand_builtin_next_arg ();
6168
6169 case BUILT_IN_CLEAR_CACHE:
6170 target = expand_builtin___clear_cache (exp);
6171 if (target)
6172 return target;
6173 break;
6174
6175 case BUILT_IN_CLASSIFY_TYPE:
6176 return expand_builtin_classify_type (exp);
6177
6178 case BUILT_IN_CONSTANT_P:
6179 return const0_rtx;
6180
6181 case BUILT_IN_FRAME_ADDRESS:
6182 case BUILT_IN_RETURN_ADDRESS:
6183 return expand_builtin_frame_address (fndecl, exp);
6184
6185 /* Returns the address of the area where the structure is returned.
6186 0 otherwise. */
6187 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6188 if (call_expr_nargs (exp) != 0
6189 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6190 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6191 return const0_rtx;
6192 else
6193 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6194
6195 case BUILT_IN_ALLOCA:
6196 case BUILT_IN_ALLOCA_WITH_ALIGN:
6197 /* If the allocation stems from the declaration of a variable-sized
6198 object, it cannot accumulate. */
6199 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6200 if (target)
6201 return target;
6202 break;
6203
6204 case BUILT_IN_STACK_SAVE:
6205 return expand_stack_save ();
6206
6207 case BUILT_IN_STACK_RESTORE:
6208 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6209 return const0_rtx;
6210
6211 case BUILT_IN_BSWAP16:
6212 case BUILT_IN_BSWAP32:
6213 case BUILT_IN_BSWAP64:
6214 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6215 if (target)
6216 return target;
6217 break;
6218
6219 CASE_INT_FN (BUILT_IN_FFS):
6220 target = expand_builtin_unop (target_mode, exp, target,
6221 subtarget, ffs_optab);
6222 if (target)
6223 return target;
6224 break;
6225
6226 CASE_INT_FN (BUILT_IN_CLZ):
6227 target = expand_builtin_unop (target_mode, exp, target,
6228 subtarget, clz_optab);
6229 if (target)
6230 return target;
6231 break;
6232
6233 CASE_INT_FN (BUILT_IN_CTZ):
6234 target = expand_builtin_unop (target_mode, exp, target,
6235 subtarget, ctz_optab);
6236 if (target)
6237 return target;
6238 break;
6239
6240 CASE_INT_FN (BUILT_IN_CLRSB):
6241 target = expand_builtin_unop (target_mode, exp, target,
6242 subtarget, clrsb_optab);
6243 if (target)
6244 return target;
6245 break;
6246
6247 CASE_INT_FN (BUILT_IN_POPCOUNT):
6248 target = expand_builtin_unop (target_mode, exp, target,
6249 subtarget, popcount_optab);
6250 if (target)
6251 return target;
6252 break;
6253
6254 CASE_INT_FN (BUILT_IN_PARITY):
6255 target = expand_builtin_unop (target_mode, exp, target,
6256 subtarget, parity_optab);
6257 if (target)
6258 return target;
6259 break;
6260
6261 case BUILT_IN_STRLEN:
6262 target = expand_builtin_strlen (exp, target, target_mode);
6263 if (target)
6264 return target;
6265 break;
6266
6267 case BUILT_IN_STRCPY:
6268 target = expand_builtin_strcpy (exp, target);
6269 if (target)
6270 return target;
6271 break;
6272
6273 case BUILT_IN_STRNCPY:
6274 target = expand_builtin_strncpy (exp, target);
6275 if (target)
6276 return target;
6277 break;
6278
6279 case BUILT_IN_STPCPY:
6280 target = expand_builtin_stpcpy (exp, target, mode);
6281 if (target)
6282 return target;
6283 break;
6284
6285 case BUILT_IN_MEMCPY:
6286 target = expand_builtin_memcpy (exp, target);
6287 if (target)
6288 return target;
6289 break;
6290
6291 case BUILT_IN_MEMPCPY:
6292 target = expand_builtin_mempcpy (exp, target, mode);
6293 if (target)
6294 return target;
6295 break;
6296
6297 case BUILT_IN_MEMSET:
6298 target = expand_builtin_memset (exp, target, mode);
6299 if (target)
6300 return target;
6301 break;
6302
6303 case BUILT_IN_BZERO:
6304 target = expand_builtin_bzero (exp);
6305 if (target)
6306 return target;
6307 break;
6308
6309 case BUILT_IN_STRCMP:
6310 target = expand_builtin_strcmp (exp, target);
6311 if (target)
6312 return target;
6313 break;
6314
6315 case BUILT_IN_STRNCMP:
6316 target = expand_builtin_strncmp (exp, target, mode);
6317 if (target)
6318 return target;
6319 break;
6320
6321 case BUILT_IN_BCMP:
6322 case BUILT_IN_MEMCMP:
6323 target = expand_builtin_memcmp (exp, target, mode);
6324 if (target)
6325 return target;
6326 break;
6327
6328 case BUILT_IN_SETJMP:
6329 /* This should have been lowered to the builtins below. */
6330 gcc_unreachable ();
6331
6332 case BUILT_IN_SETJMP_SETUP:
6333 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6334 and the receiver label. */
6335 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6336 {
6337 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6338 VOIDmode, EXPAND_NORMAL);
6339 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6340 rtx label_r = label_rtx (label);
6341
6342 /* This is copied from the handling of non-local gotos. */
6343 expand_builtin_setjmp_setup (buf_addr, label_r);
6344 nonlocal_goto_handler_labels
6345 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6346 nonlocal_goto_handler_labels);
6347 /* ??? Do not let expand_label treat us as such since we would
6348 not want to be both on the list of non-local labels and on
6349 the list of forced labels. */
6350 FORCED_LABEL (label) = 0;
6351 return const0_rtx;
6352 }
6353 break;
6354
6355 case BUILT_IN_SETJMP_RECEIVER:
6356 /* __builtin_setjmp_receiver is passed the receiver label. */
6357 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6358 {
6359 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6360 rtx label_r = label_rtx (label);
6361
6362 expand_builtin_setjmp_receiver (label_r);
6363 return const0_rtx;
6364 }
6365 break;
6366
6367 /* __builtin_longjmp is passed a pointer to an array of five words.
6368 It's similar to the C library longjmp function but works with
6369 __builtin_setjmp above. */
6370 case BUILT_IN_LONGJMP:
6371 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6372 {
6373 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6374 VOIDmode, EXPAND_NORMAL);
6375 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6376
6377 if (value != const1_rtx)
6378 {
6379 error ("%<__builtin_longjmp%> second argument must be 1");
6380 return const0_rtx;
6381 }
6382
6383 expand_builtin_longjmp (buf_addr, value);
6384 return const0_rtx;
6385 }
6386 break;
6387
6388 case BUILT_IN_NONLOCAL_GOTO:
6389 target = expand_builtin_nonlocal_goto (exp);
6390 if (target)
6391 return target;
6392 break;
6393
6394 /* This updates the setjmp buffer that is its argument with the value
6395 of the current stack pointer. */
6396 case BUILT_IN_UPDATE_SETJMP_BUF:
6397 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6398 {
6399 rtx buf_addr
6400 = expand_normal (CALL_EXPR_ARG (exp, 0));
6401
6402 expand_builtin_update_setjmp_buf (buf_addr);
6403 return const0_rtx;
6404 }
6405 break;
6406
6407 case BUILT_IN_TRAP:
6408 expand_builtin_trap ();
6409 return const0_rtx;
6410
6411 case BUILT_IN_UNREACHABLE:
6412 expand_builtin_unreachable ();
6413 return const0_rtx;
6414
6415 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6416 case BUILT_IN_SIGNBITD32:
6417 case BUILT_IN_SIGNBITD64:
6418 case BUILT_IN_SIGNBITD128:
6419 target = expand_builtin_signbit (exp, target);
6420 if (target)
6421 return target;
6422 break;
6423
6424 /* Various hooks for the DWARF 2 __throw routine. */
6425 case BUILT_IN_UNWIND_INIT:
6426 expand_builtin_unwind_init ();
6427 return const0_rtx;
6428 case BUILT_IN_DWARF_CFA:
6429 return virtual_cfa_rtx;
6430 #ifdef DWARF2_UNWIND_INFO
6431 case BUILT_IN_DWARF_SP_COLUMN:
6432 return expand_builtin_dwarf_sp_column ();
6433 case BUILT_IN_INIT_DWARF_REG_SIZES:
6434 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6435 return const0_rtx;
6436 #endif
6437 case BUILT_IN_FROB_RETURN_ADDR:
6438 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6439 case BUILT_IN_EXTRACT_RETURN_ADDR:
6440 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6441 case BUILT_IN_EH_RETURN:
6442 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6443 CALL_EXPR_ARG (exp, 1));
6444 return const0_rtx;
6445 #ifdef EH_RETURN_DATA_REGNO
6446 case BUILT_IN_EH_RETURN_DATA_REGNO:
6447 return expand_builtin_eh_return_data_regno (exp);
6448 #endif
6449 case BUILT_IN_EXTEND_POINTER:
6450 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6451 case BUILT_IN_EH_POINTER:
6452 return expand_builtin_eh_pointer (exp);
6453 case BUILT_IN_EH_FILTER:
6454 return expand_builtin_eh_filter (exp);
6455 case BUILT_IN_EH_COPY_VALUES:
6456 return expand_builtin_eh_copy_values (exp);
6457
6458 case BUILT_IN_VA_START:
6459 return expand_builtin_va_start (exp);
6460 case BUILT_IN_VA_END:
6461 return expand_builtin_va_end (exp);
6462 case BUILT_IN_VA_COPY:
6463 return expand_builtin_va_copy (exp);
6464 case BUILT_IN_EXPECT:
6465 return expand_builtin_expect (exp, target);
6466 case BUILT_IN_ASSUME_ALIGNED:
6467 return expand_builtin_assume_aligned (exp, target);
6468 case BUILT_IN_PREFETCH:
6469 expand_builtin_prefetch (exp);
6470 return const0_rtx;
6471
6472 case BUILT_IN_INIT_TRAMPOLINE:
6473 return expand_builtin_init_trampoline (exp, true);
6474 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6475 return expand_builtin_init_trampoline (exp, false);
6476 case BUILT_IN_ADJUST_TRAMPOLINE:
6477 return expand_builtin_adjust_trampoline (exp);
6478
6479 case BUILT_IN_FORK:
6480 case BUILT_IN_EXECL:
6481 case BUILT_IN_EXECV:
6482 case BUILT_IN_EXECLP:
6483 case BUILT_IN_EXECLE:
6484 case BUILT_IN_EXECVP:
6485 case BUILT_IN_EXECVE:
6486 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6487 if (target)
6488 return target;
6489 break;
6490
6491 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6492 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6493 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6494 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6495 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6496 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6497 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6498 if (target)
6499 return target;
6500 break;
6501
6502 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6503 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6504 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6505 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6506 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6507 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6508 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6509 if (target)
6510 return target;
6511 break;
6512
6513 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6514 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6515 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6516 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6517 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6518 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6519 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6520 if (target)
6521 return target;
6522 break;
6523
6524 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6525 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6526 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6527 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6528 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6529 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6530 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6531 if (target)
6532 return target;
6533 break;
6534
6535 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6536 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6537 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6538 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6539 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6540 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6541 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6542 if (target)
6543 return target;
6544 break;
6545
6546 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6547 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6548 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6549 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6550 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6551 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6552 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6553 if (target)
6554 return target;
6555 break;
6556
6557 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6558 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6559 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6560 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6561 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6562 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6563 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6564 if (target)
6565 return target;
6566 break;
6567
6568 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6569 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6570 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6571 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6572 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6573 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6574 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6575 if (target)
6576 return target;
6577 break;
6578
6579 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6580 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6581 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6582 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6583 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6584 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6585 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6586 if (target)
6587 return target;
6588 break;
6589
6590 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6591 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6592 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6593 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6594 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6595 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6596 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6597 if (target)
6598 return target;
6599 break;
6600
6601 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6602 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6603 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6604 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6605 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6606 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6607 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6608 if (target)
6609 return target;
6610 break;
6611
6612 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6613 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6614 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6615 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6616 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6617 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6618 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6619 if (target)
6620 return target;
6621 break;
6622
6623 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6624 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6625 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6626 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6627 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6628 if (mode == VOIDmode)
6629 mode = TYPE_MODE (boolean_type_node);
6630 if (!target || !register_operand (target, mode))
6631 target = gen_reg_rtx (mode);
6632
6633 mode = get_builtin_sync_mode
6634 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6635 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6636 if (target)
6637 return target;
6638 break;
6639
6640 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6641 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6642 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6643 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6644 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6645 mode = get_builtin_sync_mode
6646 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6647 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6648 if (target)
6649 return target;
6650 break;
6651
6652 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6653 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6654 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6655 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6656 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6657 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6658 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6659 if (target)
6660 return target;
6661 break;
6662
6663 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6664 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6665 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6666 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6667 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6668 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6669 expand_builtin_sync_lock_release (mode, exp);
6670 return const0_rtx;
6671
6672 case BUILT_IN_SYNC_SYNCHRONIZE:
6673 expand_builtin_sync_synchronize ();
6674 return const0_rtx;
6675
6676 case BUILT_IN_ATOMIC_EXCHANGE_1:
6677 case BUILT_IN_ATOMIC_EXCHANGE_2:
6678 case BUILT_IN_ATOMIC_EXCHANGE_4:
6679 case BUILT_IN_ATOMIC_EXCHANGE_8:
6680 case BUILT_IN_ATOMIC_EXCHANGE_16:
6681 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6682 target = expand_builtin_atomic_exchange (mode, exp, target);
6683 if (target)
6684 return target;
6685 break;
6686
6687 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6688 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6689 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6690 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6691 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6692 {
6693 unsigned int nargs, z;
6694 vec<tree, va_gc> *vec;
6695
6696 mode =
6697 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6698 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6699 if (target)
6700 return target;
6701
6702 /* If this is turned into an external library call, the weak parameter
6703 must be dropped to match the expected parameter list. */
6704 nargs = call_expr_nargs (exp);
6705 vec_alloc (vec, nargs - 1);
6706 for (z = 0; z < 3; z++)
6707 vec->quick_push (CALL_EXPR_ARG (exp, z));
6708 /* Skip the boolean weak parameter. */
6709 for (z = 4; z < 6; z++)
6710 vec->quick_push (CALL_EXPR_ARG (exp, z));
6711 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6712 break;
6713 }
6714
6715 case BUILT_IN_ATOMIC_LOAD_1:
6716 case BUILT_IN_ATOMIC_LOAD_2:
6717 case BUILT_IN_ATOMIC_LOAD_4:
6718 case BUILT_IN_ATOMIC_LOAD_8:
6719 case BUILT_IN_ATOMIC_LOAD_16:
6720 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6721 target = expand_builtin_atomic_load (mode, exp, target);
6722 if (target)
6723 return target;
6724 break;
6725
6726 case BUILT_IN_ATOMIC_STORE_1:
6727 case BUILT_IN_ATOMIC_STORE_2:
6728 case BUILT_IN_ATOMIC_STORE_4:
6729 case BUILT_IN_ATOMIC_STORE_8:
6730 case BUILT_IN_ATOMIC_STORE_16:
6731 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6732 target = expand_builtin_atomic_store (mode, exp);
6733 if (target)
6734 return const0_rtx;
6735 break;
6736
6737 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6738 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6739 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6740 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6741 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6742 {
6743 enum built_in_function lib;
6744 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6745 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6746 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6747 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6748 ignore, lib);
6749 if (target)
6750 return target;
6751 break;
6752 }
6753 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6754 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6755 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6756 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6757 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6758 {
6759 enum built_in_function lib;
6760 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6761 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6762 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6763 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6764 ignore, lib);
6765 if (target)
6766 return target;
6767 break;
6768 }
6769 case BUILT_IN_ATOMIC_AND_FETCH_1:
6770 case BUILT_IN_ATOMIC_AND_FETCH_2:
6771 case BUILT_IN_ATOMIC_AND_FETCH_4:
6772 case BUILT_IN_ATOMIC_AND_FETCH_8:
6773 case BUILT_IN_ATOMIC_AND_FETCH_16:
6774 {
6775 enum built_in_function lib;
6776 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6777 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6778 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6779 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6780 ignore, lib);
6781 if (target)
6782 return target;
6783 break;
6784 }
6785 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6786 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6787 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6788 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6789 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6790 {
6791 enum built_in_function lib;
6792 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6793 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6794 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6795 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6796 ignore, lib);
6797 if (target)
6798 return target;
6799 break;
6800 }
6801 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6802 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6803 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6804 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6805 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6806 {
6807 enum built_in_function lib;
6808 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6809 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6810 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6811 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6812 ignore, lib);
6813 if (target)
6814 return target;
6815 break;
6816 }
6817 case BUILT_IN_ATOMIC_OR_FETCH_1:
6818 case BUILT_IN_ATOMIC_OR_FETCH_2:
6819 case BUILT_IN_ATOMIC_OR_FETCH_4:
6820 case BUILT_IN_ATOMIC_OR_FETCH_8:
6821 case BUILT_IN_ATOMIC_OR_FETCH_16:
6822 {
6823 enum built_in_function lib;
6824 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6825 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6826 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6827 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6828 ignore, lib);
6829 if (target)
6830 return target;
6831 break;
6832 }
6833 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6834 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6835 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6836 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6837 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6838 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6839 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6840 ignore, BUILT_IN_NONE);
6841 if (target)
6842 return target;
6843 break;
6844
6845 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6846 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6847 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6848 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6849 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6850 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6851 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6852 ignore, BUILT_IN_NONE);
6853 if (target)
6854 return target;
6855 break;
6856
6857 case BUILT_IN_ATOMIC_FETCH_AND_1:
6858 case BUILT_IN_ATOMIC_FETCH_AND_2:
6859 case BUILT_IN_ATOMIC_FETCH_AND_4:
6860 case BUILT_IN_ATOMIC_FETCH_AND_8:
6861 case BUILT_IN_ATOMIC_FETCH_AND_16:
6862 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6863 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6864 ignore, BUILT_IN_NONE);
6865 if (target)
6866 return target;
6867 break;
6868
6869 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6870 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6871 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6872 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6873 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6874 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6875 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6876 ignore, BUILT_IN_NONE);
6877 if (target)
6878 return target;
6879 break;
6880
6881 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6882 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6883 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6884 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6885 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6886 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6887 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6888 ignore, BUILT_IN_NONE);
6889 if (target)
6890 return target;
6891 break;
6892
6893 case BUILT_IN_ATOMIC_FETCH_OR_1:
6894 case BUILT_IN_ATOMIC_FETCH_OR_2:
6895 case BUILT_IN_ATOMIC_FETCH_OR_4:
6896 case BUILT_IN_ATOMIC_FETCH_OR_8:
6897 case BUILT_IN_ATOMIC_FETCH_OR_16:
6898 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6899 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6900 ignore, BUILT_IN_NONE);
6901 if (target)
6902 return target;
6903 break;
6904
6905 case BUILT_IN_ATOMIC_TEST_AND_SET:
6906 return expand_builtin_atomic_test_and_set (exp, target);
6907
6908 case BUILT_IN_ATOMIC_CLEAR:
6909 return expand_builtin_atomic_clear (exp);
6910
6911 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6912 return expand_builtin_atomic_always_lock_free (exp);
6913
6914 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6915 target = expand_builtin_atomic_is_lock_free (exp);
6916 if (target)
6917 return target;
6918 break;
6919
6920 case BUILT_IN_ATOMIC_THREAD_FENCE:
6921 expand_builtin_atomic_thread_fence (exp);
6922 return const0_rtx;
6923
6924 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6925 expand_builtin_atomic_signal_fence (exp);
6926 return const0_rtx;
6927
6928 case BUILT_IN_OBJECT_SIZE:
6929 return expand_builtin_object_size (exp);
6930
6931 case BUILT_IN_MEMCPY_CHK:
6932 case BUILT_IN_MEMPCPY_CHK:
6933 case BUILT_IN_MEMMOVE_CHK:
6934 case BUILT_IN_MEMSET_CHK:
6935 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6936 if (target)
6937 return target;
6938 break;
6939
6940 case BUILT_IN_STRCPY_CHK:
6941 case BUILT_IN_STPCPY_CHK:
6942 case BUILT_IN_STRNCPY_CHK:
6943 case BUILT_IN_STPNCPY_CHK:
6944 case BUILT_IN_STRCAT_CHK:
6945 case BUILT_IN_STRNCAT_CHK:
6946 case BUILT_IN_SNPRINTF_CHK:
6947 case BUILT_IN_VSNPRINTF_CHK:
6948 maybe_emit_chk_warning (exp, fcode);
6949 break;
6950
6951 case BUILT_IN_SPRINTF_CHK:
6952 case BUILT_IN_VSPRINTF_CHK:
6953 maybe_emit_sprintf_chk_warning (exp, fcode);
6954 break;
6955
6956 case BUILT_IN_FREE:
6957 if (warn_free_nonheap_object)
6958 maybe_emit_free_warning (exp);
6959 break;
6960
6961 case BUILT_IN_THREAD_POINTER:
6962 return expand_builtin_thread_pointer (exp, target);
6963
6964 case BUILT_IN_SET_THREAD_POINTER:
6965 expand_builtin_set_thread_pointer (exp);
6966 return const0_rtx;
6967
6968 case BUILT_IN_CILK_DETACH:
6969 expand_builtin_cilk_detach (exp);
6970 return const0_rtx;
6971
6972 case BUILT_IN_CILK_POP_FRAME:
6973 expand_builtin_cilk_pop_frame (exp);
6974 return const0_rtx;
6975
6976 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6977 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6978 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6979 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6980 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6981 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6982 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6983 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6984 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6985 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6986 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6987 /* We allow user CHKP builtins if Pointer Bounds
6988 Checker is off. */
6989 if (!chkp_function_instrumented_p (current_function_decl))
6990 {
6991 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6992 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6993 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6994 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6995 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6996 return expand_normal (CALL_EXPR_ARG (exp, 0));
6997 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6998 return expand_normal (size_zero_node);
6999 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7000 return expand_normal (size_int (-1));
7001 else
7002 return const0_rtx;
7003 }
7004 /* FALLTHROUGH */
7005
7006 case BUILT_IN_CHKP_BNDMK:
7007 case BUILT_IN_CHKP_BNDSTX:
7008 case BUILT_IN_CHKP_BNDCL:
7009 case BUILT_IN_CHKP_BNDCU:
7010 case BUILT_IN_CHKP_BNDLDX:
7011 case BUILT_IN_CHKP_BNDRET:
7012 case BUILT_IN_CHKP_INTERSECT:
7013 case BUILT_IN_CHKP_NARROW:
7014 case BUILT_IN_CHKP_EXTRACT_LOWER:
7015 case BUILT_IN_CHKP_EXTRACT_UPPER:
7016 /* Software implementation of Pointer Bounds Checker is NYI.
7017 Target support is required. */
7018 error ("Your target platform does not support -fcheck-pointer-bounds");
7019 break;
7020
7021 default: /* just do library call, if unknown builtin */
7022 break;
7023 }
7024
7025 /* The switch statement above can drop through to cause the function
7026 to be called normally. */
7027 return expand_call (exp, target, ignore);
7028 }
7029
7030 /* Similar to expand_builtin but is used for instrumented calls. */
7031
7032 rtx
7033 expand_builtin_with_bounds (tree exp, rtx target,
7034 rtx subtarget ATTRIBUTE_UNUSED,
7035 machine_mode mode, int ignore)
7036 {
7037 tree fndecl = get_callee_fndecl (exp);
7038 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7039
7040 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7041
7042 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7043 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7044
7045 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7046 && fcode < END_CHKP_BUILTINS);
7047
7048 switch (fcode)
7049 {
7050 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7051 target = expand_builtin_memcpy_with_bounds (exp, target);
7052 if (target)
7053 return target;
7054 break;
7055
7056 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7057 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7058 if (target)
7059 return target;
7060 break;
7061
7062 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7063 target = expand_builtin_memset_with_bounds (exp, target, mode);
7064 if (target)
7065 return target;
7066 break;
7067
7068 default:
7069 break;
7070 }
7071
7072 /* The switch statement above can drop through to cause the function
7073 to be called normally. */
7074 return expand_call (exp, target, ignore);
7075 }
7076
7077 /* Determine whether a tree node represents a call to a built-in
7078 function. If the tree T is a call to a built-in function with
7079 the right number of arguments of the appropriate types, return
7080 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7081 Otherwise the return value is END_BUILTINS. */
7082
7083 enum built_in_function
7084 builtin_mathfn_code (const_tree t)
7085 {
7086 const_tree fndecl, arg, parmlist;
7087 const_tree argtype, parmtype;
7088 const_call_expr_arg_iterator iter;
7089
7090 if (TREE_CODE (t) != CALL_EXPR
7091 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7092 return END_BUILTINS;
7093
7094 fndecl = get_callee_fndecl (t);
7095 if (fndecl == NULL_TREE
7096 || TREE_CODE (fndecl) != FUNCTION_DECL
7097 || ! DECL_BUILT_IN (fndecl)
7098 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7099 return END_BUILTINS;
7100
7101 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7102 init_const_call_expr_arg_iterator (t, &iter);
7103 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7104 {
7105 /* If a function doesn't take a variable number of arguments,
7106 the last element in the list will have type `void'. */
7107 parmtype = TREE_VALUE (parmlist);
7108 if (VOID_TYPE_P (parmtype))
7109 {
7110 if (more_const_call_expr_args_p (&iter))
7111 return END_BUILTINS;
7112 return DECL_FUNCTION_CODE (fndecl);
7113 }
7114
7115 if (! more_const_call_expr_args_p (&iter))
7116 return END_BUILTINS;
7117
7118 arg = next_const_call_expr_arg (&iter);
7119 argtype = TREE_TYPE (arg);
7120
7121 if (SCALAR_FLOAT_TYPE_P (parmtype))
7122 {
7123 if (! SCALAR_FLOAT_TYPE_P (argtype))
7124 return END_BUILTINS;
7125 }
7126 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7127 {
7128 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7129 return END_BUILTINS;
7130 }
7131 else if (POINTER_TYPE_P (parmtype))
7132 {
7133 if (! POINTER_TYPE_P (argtype))
7134 return END_BUILTINS;
7135 }
7136 else if (INTEGRAL_TYPE_P (parmtype))
7137 {
7138 if (! INTEGRAL_TYPE_P (argtype))
7139 return END_BUILTINS;
7140 }
7141 else
7142 return END_BUILTINS;
7143 }
7144
7145 /* Variable-length argument list. */
7146 return DECL_FUNCTION_CODE (fndecl);
7147 }
7148
7149 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7150 evaluate to a constant. */
7151
7152 static tree
7153 fold_builtin_constant_p (tree arg)
7154 {
7155 /* We return 1 for a numeric type that's known to be a constant
7156 value at compile-time or for an aggregate type that's a
7157 literal constant. */
7158 STRIP_NOPS (arg);
7159
7160 /* If we know this is a constant, emit the constant of one. */
7161 if (CONSTANT_CLASS_P (arg)
7162 || (TREE_CODE (arg) == CONSTRUCTOR
7163 && TREE_CONSTANT (arg)))
7164 return integer_one_node;
7165 if (TREE_CODE (arg) == ADDR_EXPR)
7166 {
7167 tree op = TREE_OPERAND (arg, 0);
7168 if (TREE_CODE (op) == STRING_CST
7169 || (TREE_CODE (op) == ARRAY_REF
7170 && integer_zerop (TREE_OPERAND (op, 1))
7171 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7172 return integer_one_node;
7173 }
7174
7175 /* If this expression has side effects, show we don't know it to be a
7176 constant. Likewise if it's a pointer or aggregate type since in
7177 those case we only want literals, since those are only optimized
7178 when generating RTL, not later.
7179 And finally, if we are compiling an initializer, not code, we
7180 need to return a definite result now; there's not going to be any
7181 more optimization done. */
7182 if (TREE_SIDE_EFFECTS (arg)
7183 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7184 || POINTER_TYPE_P (TREE_TYPE (arg))
7185 || cfun == 0
7186 || folding_initializer
7187 || force_folding_builtin_constant_p)
7188 return integer_zero_node;
7189
7190 return NULL_TREE;
7191 }
7192
7193 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7194 return it as a truthvalue. */
7195
7196 static tree
7197 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7198 tree predictor)
7199 {
7200 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7201
7202 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7203 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7204 ret_type = TREE_TYPE (TREE_TYPE (fn));
7205 pred_type = TREE_VALUE (arg_types);
7206 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7207
7208 pred = fold_convert_loc (loc, pred_type, pred);
7209 expected = fold_convert_loc (loc, expected_type, expected);
7210 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7211 predictor);
7212
7213 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7214 build_int_cst (ret_type, 0));
7215 }
7216
7217 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7218 NULL_TREE if no simplification is possible. */
7219
7220 tree
7221 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7222 {
7223 tree inner, fndecl, inner_arg0;
7224 enum tree_code code;
7225
7226 /* Distribute the expected value over short-circuiting operators.
7227 See through the cast from truthvalue_type_node to long. */
7228 inner_arg0 = arg0;
7229 while (CONVERT_EXPR_P (inner_arg0)
7230 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7231 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7232 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7233
7234 /* If this is a builtin_expect within a builtin_expect keep the
7235 inner one. See through a comparison against a constant. It
7236 might have been added to create a thruthvalue. */
7237 inner = inner_arg0;
7238
7239 if (COMPARISON_CLASS_P (inner)
7240 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7241 inner = TREE_OPERAND (inner, 0);
7242
7243 if (TREE_CODE (inner) == CALL_EXPR
7244 && (fndecl = get_callee_fndecl (inner))
7245 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7246 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7247 return arg0;
7248
7249 inner = inner_arg0;
7250 code = TREE_CODE (inner);
7251 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7252 {
7253 tree op0 = TREE_OPERAND (inner, 0);
7254 tree op1 = TREE_OPERAND (inner, 1);
7255
7256 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7257 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7258 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7259
7260 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7261 }
7262
7263 /* If the argument isn't invariant then there's nothing else we can do. */
7264 if (!TREE_CONSTANT (inner_arg0))
7265 return NULL_TREE;
7266
7267 /* If we expect that a comparison against the argument will fold to
7268 a constant return the constant. In practice, this means a true
7269 constant or the address of a non-weak symbol. */
7270 inner = inner_arg0;
7271 STRIP_NOPS (inner);
7272 if (TREE_CODE (inner) == ADDR_EXPR)
7273 {
7274 do
7275 {
7276 inner = TREE_OPERAND (inner, 0);
7277 }
7278 while (TREE_CODE (inner) == COMPONENT_REF
7279 || TREE_CODE (inner) == ARRAY_REF);
7280 if ((TREE_CODE (inner) == VAR_DECL
7281 || TREE_CODE (inner) == FUNCTION_DECL)
7282 && DECL_WEAK (inner))
7283 return NULL_TREE;
7284 }
7285
7286 /* Otherwise, ARG0 already has the proper type for the return value. */
7287 return arg0;
7288 }
7289
7290 /* Fold a call to __builtin_classify_type with argument ARG. */
7291
7292 static tree
7293 fold_builtin_classify_type (tree arg)
7294 {
7295 if (arg == 0)
7296 return build_int_cst (integer_type_node, no_type_class);
7297
7298 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7299 }
7300
7301 /* Fold a call to __builtin_strlen with argument ARG. */
7302
7303 static tree
7304 fold_builtin_strlen (location_t loc, tree type, tree arg)
7305 {
7306 if (!validate_arg (arg, POINTER_TYPE))
7307 return NULL_TREE;
7308 else
7309 {
7310 tree len = c_strlen (arg, 0);
7311
7312 if (len)
7313 return fold_convert_loc (loc, type, len);
7314
7315 return NULL_TREE;
7316 }
7317 }
7318
7319 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7320
7321 static tree
7322 fold_builtin_inf (location_t loc, tree type, int warn)
7323 {
7324 REAL_VALUE_TYPE real;
7325
7326 /* __builtin_inff is intended to be usable to define INFINITY on all
7327 targets. If an infinity is not available, INFINITY expands "to a
7328 positive constant of type float that overflows at translation
7329 time", footnote "In this case, using INFINITY will violate the
7330 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7331 Thus we pedwarn to ensure this constraint violation is
7332 diagnosed. */
7333 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7334 pedwarn (loc, 0, "target format does not support infinity");
7335
7336 real_inf (&real);
7337 return build_real (type, real);
7338 }
7339
7340 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7341
7342 static tree
7343 fold_builtin_nan (tree arg, tree type, int quiet)
7344 {
7345 REAL_VALUE_TYPE real;
7346 const char *str;
7347
7348 if (!validate_arg (arg, POINTER_TYPE))
7349 return NULL_TREE;
7350 str = c_getstr (arg);
7351 if (!str)
7352 return NULL_TREE;
7353
7354 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7355 return NULL_TREE;
7356
7357 return build_real (type, real);
7358 }
7359
7360 /* Return true if the floating point expression T has an integer value.
7361 We also allow +Inf, -Inf and NaN to be considered integer values. */
7362
7363 static bool
7364 integer_valued_real_p (tree t)
7365 {
7366 switch (TREE_CODE (t))
7367 {
7368 case FLOAT_EXPR:
7369 return true;
7370
7371 case ABS_EXPR:
7372 case SAVE_EXPR:
7373 return integer_valued_real_p (TREE_OPERAND (t, 0));
7374
7375 case COMPOUND_EXPR:
7376 case MODIFY_EXPR:
7377 case BIND_EXPR:
7378 return integer_valued_real_p (TREE_OPERAND (t, 1));
7379
7380 case PLUS_EXPR:
7381 case MINUS_EXPR:
7382 case MULT_EXPR:
7383 case MIN_EXPR:
7384 case MAX_EXPR:
7385 return integer_valued_real_p (TREE_OPERAND (t, 0))
7386 && integer_valued_real_p (TREE_OPERAND (t, 1));
7387
7388 case COND_EXPR:
7389 return integer_valued_real_p (TREE_OPERAND (t, 1))
7390 && integer_valued_real_p (TREE_OPERAND (t, 2));
7391
7392 case REAL_CST:
7393 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7394
7395 CASE_CONVERT:
7396 {
7397 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7398 if (TREE_CODE (type) == INTEGER_TYPE)
7399 return true;
7400 if (TREE_CODE (type) == REAL_TYPE)
7401 return integer_valued_real_p (TREE_OPERAND (t, 0));
7402 break;
7403 }
7404
7405 case CALL_EXPR:
7406 switch (builtin_mathfn_code (t))
7407 {
7408 CASE_FLT_FN (BUILT_IN_CEIL):
7409 CASE_FLT_FN (BUILT_IN_FLOOR):
7410 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7411 CASE_FLT_FN (BUILT_IN_RINT):
7412 CASE_FLT_FN (BUILT_IN_ROUND):
7413 CASE_FLT_FN (BUILT_IN_TRUNC):
7414 return true;
7415
7416 CASE_FLT_FN (BUILT_IN_FMIN):
7417 CASE_FLT_FN (BUILT_IN_FMAX):
7418 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7419 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7420
7421 default:
7422 break;
7423 }
7424 break;
7425
7426 default:
7427 break;
7428 }
7429 return false;
7430 }
7431
7432 /* FNDECL is assumed to be a builtin where truncation can be propagated
7433 across (for instance floor((double)f) == (double)floorf (f).
7434 Do the transformation for a call with argument ARG. */
7435
7436 static tree
7437 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7438 {
7439 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7440
7441 if (!validate_arg (arg, REAL_TYPE))
7442 return NULL_TREE;
7443
7444 /* Integer rounding functions are idempotent. */
7445 if (fcode == builtin_mathfn_code (arg))
7446 return arg;
7447
7448 /* If argument is already integer valued, and we don't need to worry
7449 about setting errno, there's no need to perform rounding. */
7450 if (! flag_errno_math && integer_valued_real_p (arg))
7451 return arg;
7452
7453 if (optimize)
7454 {
7455 tree arg0 = strip_float_extensions (arg);
7456 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7457 tree newtype = TREE_TYPE (arg0);
7458 tree decl;
7459
7460 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7461 && (decl = mathfn_built_in (newtype, fcode)))
7462 return fold_convert_loc (loc, ftype,
7463 build_call_expr_loc (loc, decl, 1,
7464 fold_convert_loc (loc,
7465 newtype,
7466 arg0)));
7467 }
7468 return NULL_TREE;
7469 }
7470
7471 /* FNDECL is assumed to be builtin which can narrow the FP type of
7472 the argument, for instance lround((double)f) -> lroundf (f).
7473 Do the transformation for a call with argument ARG. */
7474
7475 static tree
7476 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7477 {
7478 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7479
7480 if (!validate_arg (arg, REAL_TYPE))
7481 return NULL_TREE;
7482
7483 /* If argument is already integer valued, and we don't need to worry
7484 about setting errno, there's no need to perform rounding. */
7485 if (! flag_errno_math && integer_valued_real_p (arg))
7486 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7487 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7488
7489 if (optimize)
7490 {
7491 tree ftype = TREE_TYPE (arg);
7492 tree arg0 = strip_float_extensions (arg);
7493 tree newtype = TREE_TYPE (arg0);
7494 tree decl;
7495
7496 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7497 && (decl = mathfn_built_in (newtype, fcode)))
7498 return build_call_expr_loc (loc, decl, 1,
7499 fold_convert_loc (loc, newtype, arg0));
7500 }
7501
7502 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7503 sizeof (int) == sizeof (long). */
7504 if (TYPE_PRECISION (integer_type_node)
7505 == TYPE_PRECISION (long_integer_type_node))
7506 {
7507 tree newfn = NULL_TREE;
7508 switch (fcode)
7509 {
7510 CASE_FLT_FN (BUILT_IN_ICEIL):
7511 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7512 break;
7513
7514 CASE_FLT_FN (BUILT_IN_IFLOOR):
7515 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7516 break;
7517
7518 CASE_FLT_FN (BUILT_IN_IROUND):
7519 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7520 break;
7521
7522 CASE_FLT_FN (BUILT_IN_IRINT):
7523 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7524 break;
7525
7526 default:
7527 break;
7528 }
7529
7530 if (newfn)
7531 {
7532 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7533 return fold_convert_loc (loc,
7534 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7535 }
7536 }
7537
7538 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7539 sizeof (long long) == sizeof (long). */
7540 if (TYPE_PRECISION (long_long_integer_type_node)
7541 == TYPE_PRECISION (long_integer_type_node))
7542 {
7543 tree newfn = NULL_TREE;
7544 switch (fcode)
7545 {
7546 CASE_FLT_FN (BUILT_IN_LLCEIL):
7547 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7548 break;
7549
7550 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7551 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7552 break;
7553
7554 CASE_FLT_FN (BUILT_IN_LLROUND):
7555 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7556 break;
7557
7558 CASE_FLT_FN (BUILT_IN_LLRINT):
7559 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7560 break;
7561
7562 default:
7563 break;
7564 }
7565
7566 if (newfn)
7567 {
7568 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7569 return fold_convert_loc (loc,
7570 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7571 }
7572 }
7573
7574 return NULL_TREE;
7575 }
7576
7577 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7578 return type. Return NULL_TREE if no simplification can be made. */
7579
7580 static tree
7581 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7582 {
7583 tree res;
7584
7585 if (!validate_arg (arg, COMPLEX_TYPE)
7586 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7587 return NULL_TREE;
7588
7589 /* Calculate the result when the argument is a constant. */
7590 if (TREE_CODE (arg) == COMPLEX_CST
7591 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7592 type, mpfr_hypot)))
7593 return res;
7594
7595 if (TREE_CODE (arg) == COMPLEX_EXPR)
7596 {
7597 tree real = TREE_OPERAND (arg, 0);
7598 tree imag = TREE_OPERAND (arg, 1);
7599
7600 /* If either part is zero, cabs is fabs of the other. */
7601 if (real_zerop (real))
7602 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7603 if (real_zerop (imag))
7604 return fold_build1_loc (loc, ABS_EXPR, type, real);
7605
7606 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7607 if (flag_unsafe_math_optimizations
7608 && operand_equal_p (real, imag, OEP_PURE_SAME))
7609 {
7610 const REAL_VALUE_TYPE sqrt2_trunc
7611 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7612 STRIP_NOPS (real);
7613 return fold_build2_loc (loc, MULT_EXPR, type,
7614 fold_build1_loc (loc, ABS_EXPR, type, real),
7615 build_real (type, sqrt2_trunc));
7616 }
7617 }
7618
7619 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7620 if (TREE_CODE (arg) == NEGATE_EXPR
7621 || TREE_CODE (arg) == CONJ_EXPR)
7622 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7623
7624 /* Don't do this when optimizing for size. */
7625 if (flag_unsafe_math_optimizations
7626 && optimize && optimize_function_for_speed_p (cfun))
7627 {
7628 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7629
7630 if (sqrtfn != NULL_TREE)
7631 {
7632 tree rpart, ipart, result;
7633
7634 arg = builtin_save_expr (arg);
7635
7636 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7637 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7638
7639 rpart = builtin_save_expr (rpart);
7640 ipart = builtin_save_expr (ipart);
7641
7642 result = fold_build2_loc (loc, PLUS_EXPR, type,
7643 fold_build2_loc (loc, MULT_EXPR, type,
7644 rpart, rpart),
7645 fold_build2_loc (loc, MULT_EXPR, type,
7646 ipart, ipart));
7647
7648 return build_call_expr_loc (loc, sqrtfn, 1, result);
7649 }
7650 }
7651
7652 return NULL_TREE;
7653 }
7654
7655 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7656 complex tree type of the result. If NEG is true, the imaginary
7657 zero is negative. */
7658
7659 static tree
7660 build_complex_cproj (tree type, bool neg)
7661 {
7662 REAL_VALUE_TYPE rinf, rzero = dconst0;
7663
7664 real_inf (&rinf);
7665 rzero.sign = neg;
7666 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7667 build_real (TREE_TYPE (type), rzero));
7668 }
7669
7670 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7671 return type. Return NULL_TREE if no simplification can be made. */
7672
7673 static tree
7674 fold_builtin_cproj (location_t loc, tree arg, tree type)
7675 {
7676 if (!validate_arg (arg, COMPLEX_TYPE)
7677 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7678 return NULL_TREE;
7679
7680 /* If there are no infinities, return arg. */
7681 if (! HONOR_INFINITIES (type))
7682 return non_lvalue_loc (loc, arg);
7683
7684 /* Calculate the result when the argument is a constant. */
7685 if (TREE_CODE (arg) == COMPLEX_CST)
7686 {
7687 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7688 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7689
7690 if (real_isinf (real) || real_isinf (imag))
7691 return build_complex_cproj (type, imag->sign);
7692 else
7693 return arg;
7694 }
7695 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7696 {
7697 tree real = TREE_OPERAND (arg, 0);
7698 tree imag = TREE_OPERAND (arg, 1);
7699
7700 STRIP_NOPS (real);
7701 STRIP_NOPS (imag);
7702
7703 /* If the real part is inf and the imag part is known to be
7704 nonnegative, return (inf + 0i). Remember side-effects are
7705 possible in the imag part. */
7706 if (TREE_CODE (real) == REAL_CST
7707 && real_isinf (TREE_REAL_CST_PTR (real))
7708 && tree_expr_nonnegative_p (imag))
7709 return omit_one_operand_loc (loc, type,
7710 build_complex_cproj (type, false),
7711 arg);
7712
7713 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7714 Remember side-effects are possible in the real part. */
7715 if (TREE_CODE (imag) == REAL_CST
7716 && real_isinf (TREE_REAL_CST_PTR (imag)))
7717 return
7718 omit_one_operand_loc (loc, type,
7719 build_complex_cproj (type, TREE_REAL_CST_PTR
7720 (imag)->sign), arg);
7721 }
7722
7723 return NULL_TREE;
7724 }
7725
7726 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7727 Return NULL_TREE if no simplification can be made. */
7728
7729 static tree
7730 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7731 {
7732
7733 enum built_in_function fcode;
7734 tree res;
7735
7736 if (!validate_arg (arg, REAL_TYPE))
7737 return NULL_TREE;
7738
7739 /* Calculate the result when the argument is a constant. */
7740 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7741 return res;
7742
7743 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7744 fcode = builtin_mathfn_code (arg);
7745 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7746 {
7747 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7748 arg = fold_build2_loc (loc, MULT_EXPR, type,
7749 CALL_EXPR_ARG (arg, 0),
7750 build_real (type, dconsthalf));
7751 return build_call_expr_loc (loc, expfn, 1, arg);
7752 }
7753
7754 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7755 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7756 {
7757 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7758
7759 if (powfn)
7760 {
7761 tree arg0 = CALL_EXPR_ARG (arg, 0);
7762 tree tree_root;
7763 /* The inner root was either sqrt or cbrt. */
7764 /* This was a conditional expression but it triggered a bug
7765 in Sun C 5.5. */
7766 REAL_VALUE_TYPE dconstroot;
7767 if (BUILTIN_SQRT_P (fcode))
7768 dconstroot = dconsthalf;
7769 else
7770 dconstroot = dconst_third ();
7771
7772 /* Adjust for the outer root. */
7773 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7774 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7775 tree_root = build_real (type, dconstroot);
7776 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7777 }
7778 }
7779
7780 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7781 if (flag_unsafe_math_optimizations
7782 && (fcode == BUILT_IN_POW
7783 || fcode == BUILT_IN_POWF
7784 || fcode == BUILT_IN_POWL))
7785 {
7786 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7787 tree arg0 = CALL_EXPR_ARG (arg, 0);
7788 tree arg1 = CALL_EXPR_ARG (arg, 1);
7789 tree narg1;
7790 if (!tree_expr_nonnegative_p (arg0))
7791 arg0 = build1 (ABS_EXPR, type, arg0);
7792 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7793 build_real (type, dconsthalf));
7794 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7795 }
7796
7797 return NULL_TREE;
7798 }
7799
7800 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7801 Return NULL_TREE if no simplification can be made. */
7802
7803 static tree
7804 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7805 {
7806 const enum built_in_function fcode = builtin_mathfn_code (arg);
7807 tree res;
7808
7809 if (!validate_arg (arg, REAL_TYPE))
7810 return NULL_TREE;
7811
7812 /* Calculate the result when the argument is a constant. */
7813 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7814 return res;
7815
7816 if (flag_unsafe_math_optimizations)
7817 {
7818 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7819 if (BUILTIN_EXPONENT_P (fcode))
7820 {
7821 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7822 const REAL_VALUE_TYPE third_trunc =
7823 real_value_truncate (TYPE_MODE (type), dconst_third ());
7824 arg = fold_build2_loc (loc, MULT_EXPR, type,
7825 CALL_EXPR_ARG (arg, 0),
7826 build_real (type, third_trunc));
7827 return build_call_expr_loc (loc, expfn, 1, arg);
7828 }
7829
7830 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7831 if (BUILTIN_SQRT_P (fcode))
7832 {
7833 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7834
7835 if (powfn)
7836 {
7837 tree arg0 = CALL_EXPR_ARG (arg, 0);
7838 tree tree_root;
7839 REAL_VALUE_TYPE dconstroot = dconst_third ();
7840
7841 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7842 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7843 tree_root = build_real (type, dconstroot);
7844 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7845 }
7846 }
7847
7848 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7849 if (BUILTIN_CBRT_P (fcode))
7850 {
7851 tree arg0 = CALL_EXPR_ARG (arg, 0);
7852 if (tree_expr_nonnegative_p (arg0))
7853 {
7854 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7855
7856 if (powfn)
7857 {
7858 tree tree_root;
7859 REAL_VALUE_TYPE dconstroot;
7860
7861 real_arithmetic (&dconstroot, MULT_EXPR,
7862 dconst_third_ptr (), dconst_third_ptr ());
7863 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7864 tree_root = build_real (type, dconstroot);
7865 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7866 }
7867 }
7868 }
7869
7870 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7871 if (fcode == BUILT_IN_POW
7872 || fcode == BUILT_IN_POWF
7873 || fcode == BUILT_IN_POWL)
7874 {
7875 tree arg00 = CALL_EXPR_ARG (arg, 0);
7876 tree arg01 = CALL_EXPR_ARG (arg, 1);
7877 if (tree_expr_nonnegative_p (arg00))
7878 {
7879 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7880 const REAL_VALUE_TYPE dconstroot
7881 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7882 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7883 build_real (type, dconstroot));
7884 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7885 }
7886 }
7887 }
7888 return NULL_TREE;
7889 }
7890
7891 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7892 TYPE is the type of the return value. Return NULL_TREE if no
7893 simplification can be made. */
7894
7895 static tree
7896 fold_builtin_cos (location_t loc,
7897 tree arg, tree type, tree fndecl)
7898 {
7899 tree res, narg;
7900
7901 if (!validate_arg (arg, REAL_TYPE))
7902 return NULL_TREE;
7903
7904 /* Calculate the result when the argument is a constant. */
7905 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7906 return res;
7907
7908 /* Optimize cos(-x) into cos (x). */
7909 if ((narg = fold_strip_sign_ops (arg)))
7910 return build_call_expr_loc (loc, fndecl, 1, narg);
7911
7912 return NULL_TREE;
7913 }
7914
7915 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7916 Return NULL_TREE if no simplification can be made. */
7917
7918 static tree
7919 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7920 {
7921 if (validate_arg (arg, REAL_TYPE))
7922 {
7923 tree res, narg;
7924
7925 /* Calculate the result when the argument is a constant. */
7926 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7927 return res;
7928
7929 /* Optimize cosh(-x) into cosh (x). */
7930 if ((narg = fold_strip_sign_ops (arg)))
7931 return build_call_expr_loc (loc, fndecl, 1, narg);
7932 }
7933
7934 return NULL_TREE;
7935 }
7936
7937 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7938 argument ARG. TYPE is the type of the return value. Return
7939 NULL_TREE if no simplification can be made. */
7940
7941 static tree
7942 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7943 bool hyper)
7944 {
7945 if (validate_arg (arg, COMPLEX_TYPE)
7946 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7947 {
7948 tree tmp;
7949
7950 /* Calculate the result when the argument is a constant. */
7951 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7952 return tmp;
7953
7954 /* Optimize fn(-x) into fn(x). */
7955 if ((tmp = fold_strip_sign_ops (arg)))
7956 return build_call_expr_loc (loc, fndecl, 1, tmp);
7957 }
7958
7959 return NULL_TREE;
7960 }
7961
7962 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7963 Return NULL_TREE if no simplification can be made. */
7964
7965 static tree
7966 fold_builtin_tan (tree arg, tree type)
7967 {
7968 enum built_in_function fcode;
7969 tree res;
7970
7971 if (!validate_arg (arg, REAL_TYPE))
7972 return NULL_TREE;
7973
7974 /* Calculate the result when the argument is a constant. */
7975 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7976 return res;
7977
7978 /* Optimize tan(atan(x)) = x. */
7979 fcode = builtin_mathfn_code (arg);
7980 if (flag_unsafe_math_optimizations
7981 && (fcode == BUILT_IN_ATAN
7982 || fcode == BUILT_IN_ATANF
7983 || fcode == BUILT_IN_ATANL))
7984 return CALL_EXPR_ARG (arg, 0);
7985
7986 return NULL_TREE;
7987 }
7988
7989 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7990 NULL_TREE if no simplification can be made. */
7991
7992 static tree
7993 fold_builtin_sincos (location_t loc,
7994 tree arg0, tree arg1, tree arg2)
7995 {
7996 tree type;
7997 tree res, fn, call;
7998
7999 if (!validate_arg (arg0, REAL_TYPE)
8000 || !validate_arg (arg1, POINTER_TYPE)
8001 || !validate_arg (arg2, POINTER_TYPE))
8002 return NULL_TREE;
8003
8004 type = TREE_TYPE (arg0);
8005
8006 /* Calculate the result when the argument is a constant. */
8007 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
8008 return res;
8009
8010 /* Canonicalize sincos to cexpi. */
8011 if (!targetm.libc_has_function (function_c99_math_complex))
8012 return NULL_TREE;
8013 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
8014 if (!fn)
8015 return NULL_TREE;
8016
8017 call = build_call_expr_loc (loc, fn, 1, arg0);
8018 call = builtin_save_expr (call);
8019
8020 return build2 (COMPOUND_EXPR, void_type_node,
8021 build2 (MODIFY_EXPR, void_type_node,
8022 build_fold_indirect_ref_loc (loc, arg1),
8023 build1 (IMAGPART_EXPR, type, call)),
8024 build2 (MODIFY_EXPR, void_type_node,
8025 build_fold_indirect_ref_loc (loc, arg2),
8026 build1 (REALPART_EXPR, type, call)));
8027 }
8028
8029 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
8030 NULL_TREE if no simplification can be made. */
8031
8032 static tree
8033 fold_builtin_cexp (location_t loc, tree arg0, tree type)
8034 {
8035 tree rtype;
8036 tree realp, imagp, ifn;
8037 tree res;
8038
8039 if (!validate_arg (arg0, COMPLEX_TYPE)
8040 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
8041 return NULL_TREE;
8042
8043 /* Calculate the result when the argument is a constant. */
8044 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8045 return res;
8046
8047 rtype = TREE_TYPE (TREE_TYPE (arg0));
8048
8049 /* In case we can figure out the real part of arg0 and it is constant zero
8050 fold to cexpi. */
8051 if (!targetm.libc_has_function (function_c99_math_complex))
8052 return NULL_TREE;
8053 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8054 if (!ifn)
8055 return NULL_TREE;
8056
8057 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
8058 && real_zerop (realp))
8059 {
8060 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
8061 return build_call_expr_loc (loc, ifn, 1, narg);
8062 }
8063
8064 /* In case we can easily decompose real and imaginary parts split cexp
8065 to exp (r) * cexpi (i). */
8066 if (flag_unsafe_math_optimizations
8067 && realp)
8068 {
8069 tree rfn, rcall, icall;
8070
8071 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8072 if (!rfn)
8073 return NULL_TREE;
8074
8075 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
8076 if (!imagp)
8077 return NULL_TREE;
8078
8079 icall = build_call_expr_loc (loc, ifn, 1, imagp);
8080 icall = builtin_save_expr (icall);
8081 rcall = build_call_expr_loc (loc, rfn, 1, realp);
8082 rcall = builtin_save_expr (rcall);
8083 return fold_build2_loc (loc, COMPLEX_EXPR, type,
8084 fold_build2_loc (loc, MULT_EXPR, rtype,
8085 rcall,
8086 fold_build1_loc (loc, REALPART_EXPR,
8087 rtype, icall)),
8088 fold_build2_loc (loc, MULT_EXPR, rtype,
8089 rcall,
8090 fold_build1_loc (loc, IMAGPART_EXPR,
8091 rtype, icall)));
8092 }
8093
8094 return NULL_TREE;
8095 }
8096
8097 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8098 Return NULL_TREE if no simplification can be made. */
8099
8100 static tree
8101 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
8102 {
8103 if (!validate_arg (arg, REAL_TYPE))
8104 return NULL_TREE;
8105
8106 /* Optimize trunc of constant value. */
8107 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8108 {
8109 REAL_VALUE_TYPE r, x;
8110 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8111
8112 x = TREE_REAL_CST (arg);
8113 real_trunc (&r, TYPE_MODE (type), &x);
8114 return build_real (type, r);
8115 }
8116
8117 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8118 }
8119
8120 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8121 Return NULL_TREE if no simplification can be made. */
8122
8123 static tree
8124 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
8125 {
8126 if (!validate_arg (arg, REAL_TYPE))
8127 return NULL_TREE;
8128
8129 /* Optimize floor of constant value. */
8130 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8131 {
8132 REAL_VALUE_TYPE x;
8133
8134 x = TREE_REAL_CST (arg);
8135 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8136 {
8137 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8138 REAL_VALUE_TYPE r;
8139
8140 real_floor (&r, TYPE_MODE (type), &x);
8141 return build_real (type, r);
8142 }
8143 }
8144
8145 /* Fold floor (x) where x is nonnegative to trunc (x). */
8146 if (tree_expr_nonnegative_p (arg))
8147 {
8148 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8149 if (truncfn)
8150 return build_call_expr_loc (loc, truncfn, 1, arg);
8151 }
8152
8153 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8154 }
8155
8156 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8157 Return NULL_TREE if no simplification can be made. */
8158
8159 static tree
8160 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
8161 {
8162 if (!validate_arg (arg, REAL_TYPE))
8163 return NULL_TREE;
8164
8165 /* Optimize ceil of constant value. */
8166 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8167 {
8168 REAL_VALUE_TYPE x;
8169
8170 x = TREE_REAL_CST (arg);
8171 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8172 {
8173 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8174 REAL_VALUE_TYPE r;
8175
8176 real_ceil (&r, TYPE_MODE (type), &x);
8177 return build_real (type, r);
8178 }
8179 }
8180
8181 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8182 }
8183
8184 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8185 Return NULL_TREE if no simplification can be made. */
8186
8187 static tree
8188 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8189 {
8190 if (!validate_arg (arg, REAL_TYPE))
8191 return NULL_TREE;
8192
8193 /* Optimize round of constant value. */
8194 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8195 {
8196 REAL_VALUE_TYPE x;
8197
8198 x = TREE_REAL_CST (arg);
8199 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8200 {
8201 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8202 REAL_VALUE_TYPE r;
8203
8204 real_round (&r, TYPE_MODE (type), &x);
8205 return build_real (type, r);
8206 }
8207 }
8208
8209 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8210 }
8211
8212 /* Fold function call to builtin lround, lroundf or lroundl (or the
8213 corresponding long long versions) and other rounding functions. ARG
8214 is the argument to the call. Return NULL_TREE if no simplification
8215 can be made. */
8216
8217 static tree
8218 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8219 {
8220 if (!validate_arg (arg, REAL_TYPE))
8221 return NULL_TREE;
8222
8223 /* Optimize lround of constant value. */
8224 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8225 {
8226 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8227
8228 if (real_isfinite (&x))
8229 {
8230 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8231 tree ftype = TREE_TYPE (arg);
8232 REAL_VALUE_TYPE r;
8233 bool fail = false;
8234
8235 switch (DECL_FUNCTION_CODE (fndecl))
8236 {
8237 CASE_FLT_FN (BUILT_IN_IFLOOR):
8238 CASE_FLT_FN (BUILT_IN_LFLOOR):
8239 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8240 real_floor (&r, TYPE_MODE (ftype), &x);
8241 break;
8242
8243 CASE_FLT_FN (BUILT_IN_ICEIL):
8244 CASE_FLT_FN (BUILT_IN_LCEIL):
8245 CASE_FLT_FN (BUILT_IN_LLCEIL):
8246 real_ceil (&r, TYPE_MODE (ftype), &x);
8247 break;
8248
8249 CASE_FLT_FN (BUILT_IN_IROUND):
8250 CASE_FLT_FN (BUILT_IN_LROUND):
8251 CASE_FLT_FN (BUILT_IN_LLROUND):
8252 real_round (&r, TYPE_MODE (ftype), &x);
8253 break;
8254
8255 default:
8256 gcc_unreachable ();
8257 }
8258
8259 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
8260 if (!fail)
8261 return wide_int_to_tree (itype, val);
8262 }
8263 }
8264
8265 switch (DECL_FUNCTION_CODE (fndecl))
8266 {
8267 CASE_FLT_FN (BUILT_IN_LFLOOR):
8268 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8269 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8270 if (tree_expr_nonnegative_p (arg))
8271 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8272 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8273 break;
8274 default:;
8275 }
8276
8277 return fold_fixed_mathfn (loc, fndecl, arg);
8278 }
8279
8280 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8281 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8282 the argument to the call. Return NULL_TREE if no simplification can
8283 be made. */
8284
8285 static tree
8286 fold_builtin_bitop (tree fndecl, tree arg)
8287 {
8288 if (!validate_arg (arg, INTEGER_TYPE))
8289 return NULL_TREE;
8290
8291 /* Optimize for constant argument. */
8292 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8293 {
8294 tree type = TREE_TYPE (arg);
8295 int result;
8296
8297 switch (DECL_FUNCTION_CODE (fndecl))
8298 {
8299 CASE_INT_FN (BUILT_IN_FFS):
8300 result = wi::ffs (arg);
8301 break;
8302
8303 CASE_INT_FN (BUILT_IN_CLZ):
8304 if (wi::ne_p (arg, 0))
8305 result = wi::clz (arg);
8306 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8307 result = TYPE_PRECISION (type);
8308 break;
8309
8310 CASE_INT_FN (BUILT_IN_CTZ):
8311 if (wi::ne_p (arg, 0))
8312 result = wi::ctz (arg);
8313 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8314 result = TYPE_PRECISION (type);
8315 break;
8316
8317 CASE_INT_FN (BUILT_IN_CLRSB):
8318 result = wi::clrsb (arg);
8319 break;
8320
8321 CASE_INT_FN (BUILT_IN_POPCOUNT):
8322 result = wi::popcount (arg);
8323 break;
8324
8325 CASE_INT_FN (BUILT_IN_PARITY):
8326 result = wi::parity (arg);
8327 break;
8328
8329 default:
8330 gcc_unreachable ();
8331 }
8332
8333 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8334 }
8335
8336 return NULL_TREE;
8337 }
8338
8339 /* Fold function call to builtin_bswap and the short, long and long long
8340 variants. Return NULL_TREE if no simplification can be made. */
8341 static tree
8342 fold_builtin_bswap (tree fndecl, tree arg)
8343 {
8344 if (! validate_arg (arg, INTEGER_TYPE))
8345 return NULL_TREE;
8346
8347 /* Optimize constant value. */
8348 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8349 {
8350 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8351
8352 switch (DECL_FUNCTION_CODE (fndecl))
8353 {
8354 case BUILT_IN_BSWAP16:
8355 case BUILT_IN_BSWAP32:
8356 case BUILT_IN_BSWAP64:
8357 {
8358 signop sgn = TYPE_SIGN (type);
8359 tree result =
8360 wide_int_to_tree (type,
8361 wide_int::from (arg, TYPE_PRECISION (type),
8362 sgn).bswap ());
8363 return result;
8364 }
8365 default:
8366 gcc_unreachable ();
8367 }
8368 }
8369
8370 return NULL_TREE;
8371 }
8372
8373 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8374 NULL_TREE if no simplification can be made. */
8375
8376 static tree
8377 fold_builtin_hypot (location_t loc, tree fndecl,
8378 tree arg0, tree arg1, tree type)
8379 {
8380 tree res, narg0, narg1;
8381
8382 if (!validate_arg (arg0, REAL_TYPE)
8383 || !validate_arg (arg1, REAL_TYPE))
8384 return NULL_TREE;
8385
8386 /* Calculate the result when the argument is a constant. */
8387 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8388 return res;
8389
8390 /* If either argument to hypot has a negate or abs, strip that off.
8391 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8392 narg0 = fold_strip_sign_ops (arg0);
8393 narg1 = fold_strip_sign_ops (arg1);
8394 if (narg0 || narg1)
8395 {
8396 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8397 narg1 ? narg1 : arg1);
8398 }
8399
8400 /* If either argument is zero, hypot is fabs of the other. */
8401 if (real_zerop (arg0))
8402 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8403 else if (real_zerop (arg1))
8404 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8405
8406 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8407 if (flag_unsafe_math_optimizations
8408 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8409 {
8410 const REAL_VALUE_TYPE sqrt2_trunc
8411 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8412 return fold_build2_loc (loc, MULT_EXPR, type,
8413 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8414 build_real (type, sqrt2_trunc));
8415 }
8416
8417 return NULL_TREE;
8418 }
8419
8420
8421 /* Fold a builtin function call to pow, powf, or powl. Return
8422 NULL_TREE if no simplification can be made. */
8423 static tree
8424 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8425 {
8426 tree res;
8427
8428 if (!validate_arg (arg0, REAL_TYPE)
8429 || !validate_arg (arg1, REAL_TYPE))
8430 return NULL_TREE;
8431
8432 /* Calculate the result when the argument is a constant. */
8433 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8434 return res;
8435
8436 /* Optimize pow(1.0,y) = 1.0. */
8437 if (real_onep (arg0))
8438 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8439
8440 if (TREE_CODE (arg1) == REAL_CST
8441 && !TREE_OVERFLOW (arg1))
8442 {
8443 REAL_VALUE_TYPE cint;
8444 REAL_VALUE_TYPE c;
8445 HOST_WIDE_INT n;
8446
8447 c = TREE_REAL_CST (arg1);
8448
8449 /* Optimize pow(x,0.0) = 1.0. */
8450 if (REAL_VALUES_EQUAL (c, dconst0))
8451 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8452 arg0);
8453
8454 /* Optimize pow(x,1.0) = x. */
8455 if (REAL_VALUES_EQUAL (c, dconst1))
8456 return arg0;
8457
8458 /* Optimize pow(x,-1.0) = 1.0/x. */
8459 if (REAL_VALUES_EQUAL (c, dconstm1))
8460 return fold_build2_loc (loc, RDIV_EXPR, type,
8461 build_real (type, dconst1), arg0);
8462
8463 /* Optimize pow(x,0.5) = sqrt(x). */
8464 if (flag_unsafe_math_optimizations
8465 && REAL_VALUES_EQUAL (c, dconsthalf))
8466 {
8467 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8468
8469 if (sqrtfn != NULL_TREE)
8470 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8471 }
8472
8473 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8474 if (flag_unsafe_math_optimizations)
8475 {
8476 const REAL_VALUE_TYPE dconstroot
8477 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8478
8479 if (REAL_VALUES_EQUAL (c, dconstroot))
8480 {
8481 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8482 if (cbrtfn != NULL_TREE)
8483 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8484 }
8485 }
8486
8487 /* Check for an integer exponent. */
8488 n = real_to_integer (&c);
8489 real_from_integer (&cint, VOIDmode, n, SIGNED);
8490 if (real_identical (&c, &cint))
8491 {
8492 /* Attempt to evaluate pow at compile-time, unless this should
8493 raise an exception. */
8494 if (TREE_CODE (arg0) == REAL_CST
8495 && !TREE_OVERFLOW (arg0)
8496 && (n > 0
8497 || (!flag_trapping_math && !flag_errno_math)
8498 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8499 {
8500 REAL_VALUE_TYPE x;
8501 bool inexact;
8502
8503 x = TREE_REAL_CST (arg0);
8504 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8505 if (flag_unsafe_math_optimizations || !inexact)
8506 return build_real (type, x);
8507 }
8508
8509 /* Strip sign ops from even integer powers. */
8510 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8511 {
8512 tree narg0 = fold_strip_sign_ops (arg0);
8513 if (narg0)
8514 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8515 }
8516 }
8517 }
8518
8519 if (flag_unsafe_math_optimizations)
8520 {
8521 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8522
8523 /* Optimize pow(expN(x),y) = expN(x*y). */
8524 if (BUILTIN_EXPONENT_P (fcode))
8525 {
8526 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8527 tree arg = CALL_EXPR_ARG (arg0, 0);
8528 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8529 return build_call_expr_loc (loc, expfn, 1, arg);
8530 }
8531
8532 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8533 if (BUILTIN_SQRT_P (fcode))
8534 {
8535 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8536 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8537 build_real (type, dconsthalf));
8538 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8539 }
8540
8541 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8542 if (BUILTIN_CBRT_P (fcode))
8543 {
8544 tree arg = CALL_EXPR_ARG (arg0, 0);
8545 if (tree_expr_nonnegative_p (arg))
8546 {
8547 const REAL_VALUE_TYPE dconstroot
8548 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8549 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8550 build_real (type, dconstroot));
8551 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8552 }
8553 }
8554
8555 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8556 if (fcode == BUILT_IN_POW
8557 || fcode == BUILT_IN_POWF
8558 || fcode == BUILT_IN_POWL)
8559 {
8560 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8561 if (tree_expr_nonnegative_p (arg00))
8562 {
8563 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8564 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8565 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8566 }
8567 }
8568 }
8569
8570 return NULL_TREE;
8571 }
8572
8573 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8574 Return NULL_TREE if no simplification can be made. */
8575 static tree
8576 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8577 tree arg0, tree arg1, tree type)
8578 {
8579 if (!validate_arg (arg0, REAL_TYPE)
8580 || !validate_arg (arg1, INTEGER_TYPE))
8581 return NULL_TREE;
8582
8583 /* Optimize pow(1.0,y) = 1.0. */
8584 if (real_onep (arg0))
8585 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8586
8587 if (tree_fits_shwi_p (arg1))
8588 {
8589 HOST_WIDE_INT c = tree_to_shwi (arg1);
8590
8591 /* Evaluate powi at compile-time. */
8592 if (TREE_CODE (arg0) == REAL_CST
8593 && !TREE_OVERFLOW (arg0))
8594 {
8595 REAL_VALUE_TYPE x;
8596 x = TREE_REAL_CST (arg0);
8597 real_powi (&x, TYPE_MODE (type), &x, c);
8598 return build_real (type, x);
8599 }
8600
8601 /* Optimize pow(x,0) = 1.0. */
8602 if (c == 0)
8603 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8604 arg0);
8605
8606 /* Optimize pow(x,1) = x. */
8607 if (c == 1)
8608 return arg0;
8609
8610 /* Optimize pow(x,-1) = 1.0/x. */
8611 if (c == -1)
8612 return fold_build2_loc (loc, RDIV_EXPR, type,
8613 build_real (type, dconst1), arg0);
8614 }
8615
8616 return NULL_TREE;
8617 }
8618
8619 /* A subroutine of fold_builtin to fold the various exponent
8620 functions. Return NULL_TREE if no simplification can be made.
8621 FUNC is the corresponding MPFR exponent function. */
8622
8623 static tree
8624 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8625 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8626 {
8627 if (validate_arg (arg, REAL_TYPE))
8628 {
8629 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8630 tree res;
8631
8632 /* Calculate the result when the argument is a constant. */
8633 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8634 return res;
8635
8636 /* Optimize expN(logN(x)) = x. */
8637 if (flag_unsafe_math_optimizations)
8638 {
8639 const enum built_in_function fcode = builtin_mathfn_code (arg);
8640
8641 if ((func == mpfr_exp
8642 && (fcode == BUILT_IN_LOG
8643 || fcode == BUILT_IN_LOGF
8644 || fcode == BUILT_IN_LOGL))
8645 || (func == mpfr_exp2
8646 && (fcode == BUILT_IN_LOG2
8647 || fcode == BUILT_IN_LOG2F
8648 || fcode == BUILT_IN_LOG2L))
8649 || (func == mpfr_exp10
8650 && (fcode == BUILT_IN_LOG10
8651 || fcode == BUILT_IN_LOG10F
8652 || fcode == BUILT_IN_LOG10L)))
8653 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8654 }
8655 }
8656
8657 return NULL_TREE;
8658 }
8659
8660 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8661 arguments to the call, and TYPE is its return type.
8662 Return NULL_TREE if no simplification can be made. */
8663
8664 static tree
8665 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8666 {
8667 if (!validate_arg (arg1, POINTER_TYPE)
8668 || !validate_arg (arg2, INTEGER_TYPE)
8669 || !validate_arg (len, INTEGER_TYPE))
8670 return NULL_TREE;
8671 else
8672 {
8673 const char *p1;
8674
8675 if (TREE_CODE (arg2) != INTEGER_CST
8676 || !tree_fits_uhwi_p (len))
8677 return NULL_TREE;
8678
8679 p1 = c_getstr (arg1);
8680 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8681 {
8682 char c;
8683 const char *r;
8684 tree tem;
8685
8686 if (target_char_cast (arg2, &c))
8687 return NULL_TREE;
8688
8689 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
8690
8691 if (r == NULL)
8692 return build_int_cst (TREE_TYPE (arg1), 0);
8693
8694 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8695 return fold_convert_loc (loc, type, tem);
8696 }
8697 return NULL_TREE;
8698 }
8699 }
8700
8701 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8702 Return NULL_TREE if no simplification can be made. */
8703
8704 static tree
8705 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8706 {
8707 const char *p1, *p2;
8708
8709 if (!validate_arg (arg1, POINTER_TYPE)
8710 || !validate_arg (arg2, POINTER_TYPE)
8711 || !validate_arg (len, INTEGER_TYPE))
8712 return NULL_TREE;
8713
8714 /* If the LEN parameter is zero, return zero. */
8715 if (integer_zerop (len))
8716 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8717 arg1, arg2);
8718
8719 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8720 if (operand_equal_p (arg1, arg2, 0))
8721 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8722
8723 p1 = c_getstr (arg1);
8724 p2 = c_getstr (arg2);
8725
8726 /* If all arguments are constant, and the value of len is not greater
8727 than the lengths of arg1 and arg2, evaluate at compile-time. */
8728 if (tree_fits_uhwi_p (len) && p1 && p2
8729 && compare_tree_int (len, strlen (p1) + 1) <= 0
8730 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8731 {
8732 const int r = memcmp (p1, p2, tree_to_uhwi (len));
8733
8734 if (r > 0)
8735 return integer_one_node;
8736 else if (r < 0)
8737 return integer_minus_one_node;
8738 else
8739 return integer_zero_node;
8740 }
8741
8742 /* If len parameter is one, return an expression corresponding to
8743 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8744 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8745 {
8746 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8747 tree cst_uchar_ptr_node
8748 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8749
8750 tree ind1
8751 = fold_convert_loc (loc, integer_type_node,
8752 build1 (INDIRECT_REF, cst_uchar_node,
8753 fold_convert_loc (loc,
8754 cst_uchar_ptr_node,
8755 arg1)));
8756 tree ind2
8757 = fold_convert_loc (loc, integer_type_node,
8758 build1 (INDIRECT_REF, cst_uchar_node,
8759 fold_convert_loc (loc,
8760 cst_uchar_ptr_node,
8761 arg2)));
8762 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8763 }
8764
8765 return NULL_TREE;
8766 }
8767
8768 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8769 Return NULL_TREE if no simplification can be made. */
8770
8771 static tree
8772 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8773 {
8774 const char *p1, *p2;
8775
8776 if (!validate_arg (arg1, POINTER_TYPE)
8777 || !validate_arg (arg2, POINTER_TYPE))
8778 return NULL_TREE;
8779
8780 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8781 if (operand_equal_p (arg1, arg2, 0))
8782 return integer_zero_node;
8783
8784 p1 = c_getstr (arg1);
8785 p2 = c_getstr (arg2);
8786
8787 if (p1 && p2)
8788 {
8789 const int i = strcmp (p1, p2);
8790 if (i < 0)
8791 return integer_minus_one_node;
8792 else if (i > 0)
8793 return integer_one_node;
8794 else
8795 return integer_zero_node;
8796 }
8797
8798 /* If the second arg is "", return *(const unsigned char*)arg1. */
8799 if (p2 && *p2 == '\0')
8800 {
8801 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8802 tree cst_uchar_ptr_node
8803 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8804
8805 return fold_convert_loc (loc, integer_type_node,
8806 build1 (INDIRECT_REF, cst_uchar_node,
8807 fold_convert_loc (loc,
8808 cst_uchar_ptr_node,
8809 arg1)));
8810 }
8811
8812 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8813 if (p1 && *p1 == '\0')
8814 {
8815 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8816 tree cst_uchar_ptr_node
8817 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8818
8819 tree temp
8820 = fold_convert_loc (loc, integer_type_node,
8821 build1 (INDIRECT_REF, cst_uchar_node,
8822 fold_convert_loc (loc,
8823 cst_uchar_ptr_node,
8824 arg2)));
8825 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8826 }
8827
8828 return NULL_TREE;
8829 }
8830
8831 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8832 Return NULL_TREE if no simplification can be made. */
8833
8834 static tree
8835 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8836 {
8837 const char *p1, *p2;
8838
8839 if (!validate_arg (arg1, POINTER_TYPE)
8840 || !validate_arg (arg2, POINTER_TYPE)
8841 || !validate_arg (len, INTEGER_TYPE))
8842 return NULL_TREE;
8843
8844 /* If the LEN parameter is zero, return zero. */
8845 if (integer_zerop (len))
8846 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8847 arg1, arg2);
8848
8849 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8850 if (operand_equal_p (arg1, arg2, 0))
8851 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8852
8853 p1 = c_getstr (arg1);
8854 p2 = c_getstr (arg2);
8855
8856 if (tree_fits_uhwi_p (len) && p1 && p2)
8857 {
8858 const int i = strncmp (p1, p2, tree_to_uhwi (len));
8859 if (i > 0)
8860 return integer_one_node;
8861 else if (i < 0)
8862 return integer_minus_one_node;
8863 else
8864 return integer_zero_node;
8865 }
8866
8867 /* If the second arg is "", and the length is greater than zero,
8868 return *(const unsigned char*)arg1. */
8869 if (p2 && *p2 == '\0'
8870 && TREE_CODE (len) == INTEGER_CST
8871 && tree_int_cst_sgn (len) == 1)
8872 {
8873 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8874 tree cst_uchar_ptr_node
8875 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8876
8877 return fold_convert_loc (loc, integer_type_node,
8878 build1 (INDIRECT_REF, cst_uchar_node,
8879 fold_convert_loc (loc,
8880 cst_uchar_ptr_node,
8881 arg1)));
8882 }
8883
8884 /* If the first arg is "", and the length is greater than zero,
8885 return -*(const unsigned char*)arg2. */
8886 if (p1 && *p1 == '\0'
8887 && TREE_CODE (len) == INTEGER_CST
8888 && tree_int_cst_sgn (len) == 1)
8889 {
8890 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8891 tree cst_uchar_ptr_node
8892 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8893
8894 tree temp = fold_convert_loc (loc, integer_type_node,
8895 build1 (INDIRECT_REF, cst_uchar_node,
8896 fold_convert_loc (loc,
8897 cst_uchar_ptr_node,
8898 arg2)));
8899 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8900 }
8901
8902 /* If len parameter is one, return an expression corresponding to
8903 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8904 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8905 {
8906 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8907 tree cst_uchar_ptr_node
8908 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8909
8910 tree ind1 = fold_convert_loc (loc, integer_type_node,
8911 build1 (INDIRECT_REF, cst_uchar_node,
8912 fold_convert_loc (loc,
8913 cst_uchar_ptr_node,
8914 arg1)));
8915 tree ind2 = fold_convert_loc (loc, integer_type_node,
8916 build1 (INDIRECT_REF, cst_uchar_node,
8917 fold_convert_loc (loc,
8918 cst_uchar_ptr_node,
8919 arg2)));
8920 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8921 }
8922
8923 return NULL_TREE;
8924 }
8925
8926 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8927 ARG. Return NULL_TREE if no simplification can be made. */
8928
8929 static tree
8930 fold_builtin_signbit (location_t loc, tree arg, tree type)
8931 {
8932 if (!validate_arg (arg, REAL_TYPE))
8933 return NULL_TREE;
8934
8935 /* If ARG is a compile-time constant, determine the result. */
8936 if (TREE_CODE (arg) == REAL_CST
8937 && !TREE_OVERFLOW (arg))
8938 {
8939 REAL_VALUE_TYPE c;
8940
8941 c = TREE_REAL_CST (arg);
8942 return (REAL_VALUE_NEGATIVE (c)
8943 ? build_one_cst (type)
8944 : build_zero_cst (type));
8945 }
8946
8947 /* If ARG is non-negative, the result is always zero. */
8948 if (tree_expr_nonnegative_p (arg))
8949 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8950
8951 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8952 if (!HONOR_SIGNED_ZEROS (arg))
8953 return fold_convert (type,
8954 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
8955 build_real (TREE_TYPE (arg), dconst0)));
8956
8957 return NULL_TREE;
8958 }
8959
8960 /* Fold function call to builtin copysign, copysignf or copysignl with
8961 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8962 be made. */
8963
8964 static tree
8965 fold_builtin_copysign (location_t loc, tree fndecl,
8966 tree arg1, tree arg2, tree type)
8967 {
8968 tree tem;
8969
8970 if (!validate_arg (arg1, REAL_TYPE)
8971 || !validate_arg (arg2, REAL_TYPE))
8972 return NULL_TREE;
8973
8974 /* copysign(X,X) is X. */
8975 if (operand_equal_p (arg1, arg2, 0))
8976 return fold_convert_loc (loc, type, arg1);
8977
8978 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8979 if (TREE_CODE (arg1) == REAL_CST
8980 && TREE_CODE (arg2) == REAL_CST
8981 && !TREE_OVERFLOW (arg1)
8982 && !TREE_OVERFLOW (arg2))
8983 {
8984 REAL_VALUE_TYPE c1, c2;
8985
8986 c1 = TREE_REAL_CST (arg1);
8987 c2 = TREE_REAL_CST (arg2);
8988 /* c1.sign := c2.sign. */
8989 real_copysign (&c1, &c2);
8990 return build_real (type, c1);
8991 }
8992
8993 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8994 Remember to evaluate Y for side-effects. */
8995 if (tree_expr_nonnegative_p (arg2))
8996 return omit_one_operand_loc (loc, type,
8997 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8998 arg2);
8999
9000 /* Strip sign changing operations for the first argument. */
9001 tem = fold_strip_sign_ops (arg1);
9002 if (tem)
9003 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9004
9005 return NULL_TREE;
9006 }
9007
9008 /* Fold a call to builtin isascii with argument ARG. */
9009
9010 static tree
9011 fold_builtin_isascii (location_t loc, tree arg)
9012 {
9013 if (!validate_arg (arg, INTEGER_TYPE))
9014 return NULL_TREE;
9015 else
9016 {
9017 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9018 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9019 build_int_cst (integer_type_node,
9020 ~ (unsigned HOST_WIDE_INT) 0x7f));
9021 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9022 arg, integer_zero_node);
9023 }
9024 }
9025
9026 /* Fold a call to builtin toascii with argument ARG. */
9027
9028 static tree
9029 fold_builtin_toascii (location_t loc, tree arg)
9030 {
9031 if (!validate_arg (arg, INTEGER_TYPE))
9032 return NULL_TREE;
9033
9034 /* Transform toascii(c) -> (c & 0x7f). */
9035 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9036 build_int_cst (integer_type_node, 0x7f));
9037 }
9038
9039 /* Fold a call to builtin isdigit with argument ARG. */
9040
9041 static tree
9042 fold_builtin_isdigit (location_t loc, tree arg)
9043 {
9044 if (!validate_arg (arg, INTEGER_TYPE))
9045 return NULL_TREE;
9046 else
9047 {
9048 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9049 /* According to the C standard, isdigit is unaffected by locale.
9050 However, it definitely is affected by the target character set. */
9051 unsigned HOST_WIDE_INT target_digit0
9052 = lang_hooks.to_target_charset ('0');
9053
9054 if (target_digit0 == 0)
9055 return NULL_TREE;
9056
9057 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9058 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9059 build_int_cst (unsigned_type_node, target_digit0));
9060 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9061 build_int_cst (unsigned_type_node, 9));
9062 }
9063 }
9064
9065 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9066
9067 static tree
9068 fold_builtin_fabs (location_t loc, tree arg, tree type)
9069 {
9070 if (!validate_arg (arg, REAL_TYPE))
9071 return NULL_TREE;
9072
9073 arg = fold_convert_loc (loc, type, arg);
9074 if (TREE_CODE (arg) == REAL_CST)
9075 return fold_abs_const (arg, type);
9076 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9077 }
9078
9079 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9080
9081 static tree
9082 fold_builtin_abs (location_t loc, tree arg, tree type)
9083 {
9084 if (!validate_arg (arg, INTEGER_TYPE))
9085 return NULL_TREE;
9086
9087 arg = fold_convert_loc (loc, type, arg);
9088 if (TREE_CODE (arg) == INTEGER_CST)
9089 return fold_abs_const (arg, type);
9090 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9091 }
9092
9093 /* Fold a fma operation with arguments ARG[012]. */
9094
9095 tree
9096 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9097 tree type, tree arg0, tree arg1, tree arg2)
9098 {
9099 if (TREE_CODE (arg0) == REAL_CST
9100 && TREE_CODE (arg1) == REAL_CST
9101 && TREE_CODE (arg2) == REAL_CST)
9102 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9103
9104 return NULL_TREE;
9105 }
9106
9107 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9108
9109 static tree
9110 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9111 {
9112 if (validate_arg (arg0, REAL_TYPE)
9113 && validate_arg (arg1, REAL_TYPE)
9114 && validate_arg (arg2, REAL_TYPE))
9115 {
9116 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9117 if (tem)
9118 return tem;
9119
9120 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9121 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9122 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9123 }
9124 return NULL_TREE;
9125 }
9126
9127 /* Fold a call to builtin fmin or fmax. */
9128
9129 static tree
9130 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9131 tree type, bool max)
9132 {
9133 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9134 {
9135 /* Calculate the result when the argument is a constant. */
9136 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9137
9138 if (res)
9139 return res;
9140
9141 /* If either argument is NaN, return the other one. Avoid the
9142 transformation if we get (and honor) a signalling NaN. Using
9143 omit_one_operand() ensures we create a non-lvalue. */
9144 if (TREE_CODE (arg0) == REAL_CST
9145 && real_isnan (&TREE_REAL_CST (arg0))
9146 && (! HONOR_SNANS (arg0)
9147 || ! TREE_REAL_CST (arg0).signalling))
9148 return omit_one_operand_loc (loc, type, arg1, arg0);
9149 if (TREE_CODE (arg1) == REAL_CST
9150 && real_isnan (&TREE_REAL_CST (arg1))
9151 && (! HONOR_SNANS (arg1)
9152 || ! TREE_REAL_CST (arg1).signalling))
9153 return omit_one_operand_loc (loc, type, arg0, arg1);
9154
9155 /* Transform fmin/fmax(x,x) -> x. */
9156 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9157 return omit_one_operand_loc (loc, type, arg0, arg1);
9158
9159 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9160 functions to return the numeric arg if the other one is NaN.
9161 These tree codes don't honor that, so only transform if
9162 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9163 handled, so we don't have to worry about it either. */
9164 if (flag_finite_math_only)
9165 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9166 fold_convert_loc (loc, type, arg0),
9167 fold_convert_loc (loc, type, arg1));
9168 }
9169 return NULL_TREE;
9170 }
9171
9172 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9173
9174 static tree
9175 fold_builtin_carg (location_t loc, tree arg, tree type)
9176 {
9177 if (validate_arg (arg, COMPLEX_TYPE)
9178 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9179 {
9180 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9181
9182 if (atan2_fn)
9183 {
9184 tree new_arg = builtin_save_expr (arg);
9185 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9186 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9187 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9188 }
9189 }
9190
9191 return NULL_TREE;
9192 }
9193
9194 /* Fold a call to builtin logb/ilogb. */
9195
9196 static tree
9197 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9198 {
9199 if (! validate_arg (arg, REAL_TYPE))
9200 return NULL_TREE;
9201
9202 STRIP_NOPS (arg);
9203
9204 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9205 {
9206 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9207
9208 switch (value->cl)
9209 {
9210 case rvc_nan:
9211 case rvc_inf:
9212 /* If arg is Inf or NaN and we're logb, return it. */
9213 if (TREE_CODE (rettype) == REAL_TYPE)
9214 {
9215 /* For logb(-Inf) we have to return +Inf. */
9216 if (real_isinf (value) && real_isneg (value))
9217 {
9218 REAL_VALUE_TYPE tem;
9219 real_inf (&tem);
9220 return build_real (rettype, tem);
9221 }
9222 return fold_convert_loc (loc, rettype, arg);
9223 }
9224 /* Fall through... */
9225 case rvc_zero:
9226 /* Zero may set errno and/or raise an exception for logb, also
9227 for ilogb we don't know FP_ILOGB0. */
9228 return NULL_TREE;
9229 case rvc_normal:
9230 /* For normal numbers, proceed iff radix == 2. In GCC,
9231 normalized significands are in the range [0.5, 1.0). We
9232 want the exponent as if they were [1.0, 2.0) so get the
9233 exponent and subtract 1. */
9234 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9235 return fold_convert_loc (loc, rettype,
9236 build_int_cst (integer_type_node,
9237 REAL_EXP (value)-1));
9238 break;
9239 }
9240 }
9241
9242 return NULL_TREE;
9243 }
9244
9245 /* Fold a call to builtin significand, if radix == 2. */
9246
9247 static tree
9248 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9249 {
9250 if (! validate_arg (arg, REAL_TYPE))
9251 return NULL_TREE;
9252
9253 STRIP_NOPS (arg);
9254
9255 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9256 {
9257 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9258
9259 switch (value->cl)
9260 {
9261 case rvc_zero:
9262 case rvc_nan:
9263 case rvc_inf:
9264 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9265 return fold_convert_loc (loc, rettype, arg);
9266 case rvc_normal:
9267 /* For normal numbers, proceed iff radix == 2. */
9268 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9269 {
9270 REAL_VALUE_TYPE result = *value;
9271 /* In GCC, normalized significands are in the range [0.5,
9272 1.0). We want them to be [1.0, 2.0) so set the
9273 exponent to 1. */
9274 SET_REAL_EXP (&result, 1);
9275 return build_real (rettype, result);
9276 }
9277 break;
9278 }
9279 }
9280
9281 return NULL_TREE;
9282 }
9283
9284 /* Fold a call to builtin frexp, we can assume the base is 2. */
9285
9286 static tree
9287 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9288 {
9289 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9290 return NULL_TREE;
9291
9292 STRIP_NOPS (arg0);
9293
9294 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9295 return NULL_TREE;
9296
9297 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9298
9299 /* Proceed if a valid pointer type was passed in. */
9300 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9301 {
9302 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9303 tree frac, exp;
9304
9305 switch (value->cl)
9306 {
9307 case rvc_zero:
9308 /* For +-0, return (*exp = 0, +-0). */
9309 exp = integer_zero_node;
9310 frac = arg0;
9311 break;
9312 case rvc_nan:
9313 case rvc_inf:
9314 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9315 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9316 case rvc_normal:
9317 {
9318 /* Since the frexp function always expects base 2, and in
9319 GCC normalized significands are already in the range
9320 [0.5, 1.0), we have exactly what frexp wants. */
9321 REAL_VALUE_TYPE frac_rvt = *value;
9322 SET_REAL_EXP (&frac_rvt, 0);
9323 frac = build_real (rettype, frac_rvt);
9324 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9325 }
9326 break;
9327 default:
9328 gcc_unreachable ();
9329 }
9330
9331 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9332 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9333 TREE_SIDE_EFFECTS (arg1) = 1;
9334 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9335 }
9336
9337 return NULL_TREE;
9338 }
9339
9340 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9341 then we can assume the base is two. If it's false, then we have to
9342 check the mode of the TYPE parameter in certain cases. */
9343
9344 static tree
9345 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9346 tree type, bool ldexp)
9347 {
9348 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9349 {
9350 STRIP_NOPS (arg0);
9351 STRIP_NOPS (arg1);
9352
9353 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9354 if (real_zerop (arg0) || integer_zerop (arg1)
9355 || (TREE_CODE (arg0) == REAL_CST
9356 && !real_isfinite (&TREE_REAL_CST (arg0))))
9357 return omit_one_operand_loc (loc, type, arg0, arg1);
9358
9359 /* If both arguments are constant, then try to evaluate it. */
9360 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9361 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9362 && tree_fits_shwi_p (arg1))
9363 {
9364 /* Bound the maximum adjustment to twice the range of the
9365 mode's valid exponents. Use abs to ensure the range is
9366 positive as a sanity check. */
9367 const long max_exp_adj = 2 *
9368 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9369 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9370
9371 /* Get the user-requested adjustment. */
9372 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9373
9374 /* The requested adjustment must be inside this range. This
9375 is a preliminary cap to avoid things like overflow, we
9376 may still fail to compute the result for other reasons. */
9377 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9378 {
9379 REAL_VALUE_TYPE initial_result;
9380
9381 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9382
9383 /* Ensure we didn't overflow. */
9384 if (! real_isinf (&initial_result))
9385 {
9386 const REAL_VALUE_TYPE trunc_result
9387 = real_value_truncate (TYPE_MODE (type), initial_result);
9388
9389 /* Only proceed if the target mode can hold the
9390 resulting value. */
9391 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9392 return build_real (type, trunc_result);
9393 }
9394 }
9395 }
9396 }
9397
9398 return NULL_TREE;
9399 }
9400
9401 /* Fold a call to builtin modf. */
9402
9403 static tree
9404 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9405 {
9406 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9407 return NULL_TREE;
9408
9409 STRIP_NOPS (arg0);
9410
9411 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9412 return NULL_TREE;
9413
9414 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9415
9416 /* Proceed if a valid pointer type was passed in. */
9417 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9418 {
9419 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9420 REAL_VALUE_TYPE trunc, frac;
9421
9422 switch (value->cl)
9423 {
9424 case rvc_nan:
9425 case rvc_zero:
9426 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9427 trunc = frac = *value;
9428 break;
9429 case rvc_inf:
9430 /* For +-Inf, return (*arg1 = arg0, +-0). */
9431 frac = dconst0;
9432 frac.sign = value->sign;
9433 trunc = *value;
9434 break;
9435 case rvc_normal:
9436 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9437 real_trunc (&trunc, VOIDmode, value);
9438 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9439 /* If the original number was negative and already
9440 integral, then the fractional part is -0.0. */
9441 if (value->sign && frac.cl == rvc_zero)
9442 frac.sign = value->sign;
9443 break;
9444 }
9445
9446 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9447 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9448 build_real (rettype, trunc));
9449 TREE_SIDE_EFFECTS (arg1) = 1;
9450 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9451 build_real (rettype, frac));
9452 }
9453
9454 return NULL_TREE;
9455 }
9456
9457 /* Given a location LOC, an interclass builtin function decl FNDECL
9458 and its single argument ARG, return an folded expression computing
9459 the same, or NULL_TREE if we either couldn't or didn't want to fold
9460 (the latter happen if there's an RTL instruction available). */
9461
9462 static tree
9463 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9464 {
9465 machine_mode mode;
9466
9467 if (!validate_arg (arg, REAL_TYPE))
9468 return NULL_TREE;
9469
9470 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9471 return NULL_TREE;
9472
9473 mode = TYPE_MODE (TREE_TYPE (arg));
9474
9475 /* If there is no optab, try generic code. */
9476 switch (DECL_FUNCTION_CODE (fndecl))
9477 {
9478 tree result;
9479
9480 CASE_FLT_FN (BUILT_IN_ISINF):
9481 {
9482 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9483 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9484 tree const type = TREE_TYPE (arg);
9485 REAL_VALUE_TYPE r;
9486 char buf[128];
9487
9488 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9489 real_from_string (&r, buf);
9490 result = build_call_expr (isgr_fn, 2,
9491 fold_build1_loc (loc, ABS_EXPR, type, arg),
9492 build_real (type, r));
9493 return result;
9494 }
9495 CASE_FLT_FN (BUILT_IN_FINITE):
9496 case BUILT_IN_ISFINITE:
9497 {
9498 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9499 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9500 tree const type = TREE_TYPE (arg);
9501 REAL_VALUE_TYPE r;
9502 char buf[128];
9503
9504 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9505 real_from_string (&r, buf);
9506 result = build_call_expr (isle_fn, 2,
9507 fold_build1_loc (loc, ABS_EXPR, type, arg),
9508 build_real (type, r));
9509 /*result = fold_build2_loc (loc, UNGT_EXPR,
9510 TREE_TYPE (TREE_TYPE (fndecl)),
9511 fold_build1_loc (loc, ABS_EXPR, type, arg),
9512 build_real (type, r));
9513 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9514 TREE_TYPE (TREE_TYPE (fndecl)),
9515 result);*/
9516 return result;
9517 }
9518 case BUILT_IN_ISNORMAL:
9519 {
9520 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9521 islessequal(fabs(x),DBL_MAX). */
9522 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9523 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9524 tree const type = TREE_TYPE (arg);
9525 REAL_VALUE_TYPE rmax, rmin;
9526 char buf[128];
9527
9528 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9529 real_from_string (&rmax, buf);
9530 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9531 real_from_string (&rmin, buf);
9532 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9533 result = build_call_expr (isle_fn, 2, arg,
9534 build_real (type, rmax));
9535 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9536 build_call_expr (isge_fn, 2, arg,
9537 build_real (type, rmin)));
9538 return result;
9539 }
9540 default:
9541 break;
9542 }
9543
9544 return NULL_TREE;
9545 }
9546
9547 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9548 ARG is the argument for the call. */
9549
9550 static tree
9551 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9552 {
9553 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9554 REAL_VALUE_TYPE r;
9555
9556 if (!validate_arg (arg, REAL_TYPE))
9557 return NULL_TREE;
9558
9559 switch (builtin_index)
9560 {
9561 case BUILT_IN_ISINF:
9562 if (!HONOR_INFINITIES (arg))
9563 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9564
9565 if (TREE_CODE (arg) == REAL_CST)
9566 {
9567 r = TREE_REAL_CST (arg);
9568 if (real_isinf (&r))
9569 return real_compare (GT_EXPR, &r, &dconst0)
9570 ? integer_one_node : integer_minus_one_node;
9571 else
9572 return integer_zero_node;
9573 }
9574
9575 return NULL_TREE;
9576
9577 case BUILT_IN_ISINF_SIGN:
9578 {
9579 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9580 /* In a boolean context, GCC will fold the inner COND_EXPR to
9581 1. So e.g. "if (isinf_sign(x))" would be folded to just
9582 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9583 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9584 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9585 tree tmp = NULL_TREE;
9586
9587 arg = builtin_save_expr (arg);
9588
9589 if (signbit_fn && isinf_fn)
9590 {
9591 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9592 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9593
9594 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9595 signbit_call, integer_zero_node);
9596 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9597 isinf_call, integer_zero_node);
9598
9599 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9600 integer_minus_one_node, integer_one_node);
9601 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9602 isinf_call, tmp,
9603 integer_zero_node);
9604 }
9605
9606 return tmp;
9607 }
9608
9609 case BUILT_IN_ISFINITE:
9610 if (!HONOR_NANS (arg)
9611 && !HONOR_INFINITIES (arg))
9612 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9613
9614 if (TREE_CODE (arg) == REAL_CST)
9615 {
9616 r = TREE_REAL_CST (arg);
9617 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9618 }
9619
9620 return NULL_TREE;
9621
9622 case BUILT_IN_ISNAN:
9623 if (!HONOR_NANS (arg))
9624 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9625
9626 if (TREE_CODE (arg) == REAL_CST)
9627 {
9628 r = TREE_REAL_CST (arg);
9629 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9630 }
9631
9632 arg = builtin_save_expr (arg);
9633 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9634
9635 default:
9636 gcc_unreachable ();
9637 }
9638 }
9639
9640 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9641 This builtin will generate code to return the appropriate floating
9642 point classification depending on the value of the floating point
9643 number passed in. The possible return values must be supplied as
9644 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9645 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9646 one floating point argument which is "type generic". */
9647
9648 static tree
9649 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9650 {
9651 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9652 arg, type, res, tmp;
9653 machine_mode mode;
9654 REAL_VALUE_TYPE r;
9655 char buf[128];
9656
9657 /* Verify the required arguments in the original call. */
9658 if (nargs != 6
9659 || !validate_arg (args[0], INTEGER_TYPE)
9660 || !validate_arg (args[1], INTEGER_TYPE)
9661 || !validate_arg (args[2], INTEGER_TYPE)
9662 || !validate_arg (args[3], INTEGER_TYPE)
9663 || !validate_arg (args[4], INTEGER_TYPE)
9664 || !validate_arg (args[5], REAL_TYPE))
9665 return NULL_TREE;
9666
9667 fp_nan = args[0];
9668 fp_infinite = args[1];
9669 fp_normal = args[2];
9670 fp_subnormal = args[3];
9671 fp_zero = args[4];
9672 arg = args[5];
9673 type = TREE_TYPE (arg);
9674 mode = TYPE_MODE (type);
9675 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9676
9677 /* fpclassify(x) ->
9678 isnan(x) ? FP_NAN :
9679 (fabs(x) == Inf ? FP_INFINITE :
9680 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9681 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9682
9683 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9684 build_real (type, dconst0));
9685 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9686 tmp, fp_zero, fp_subnormal);
9687
9688 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9689 real_from_string (&r, buf);
9690 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9691 arg, build_real (type, r));
9692 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9693
9694 if (HONOR_INFINITIES (mode))
9695 {
9696 real_inf (&r);
9697 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9698 build_real (type, r));
9699 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9700 fp_infinite, res);
9701 }
9702
9703 if (HONOR_NANS (mode))
9704 {
9705 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9706 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9707 }
9708
9709 return res;
9710 }
9711
9712 /* Fold a call to an unordered comparison function such as
9713 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9714 being called and ARG0 and ARG1 are the arguments for the call.
9715 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9716 the opposite of the desired result. UNORDERED_CODE is used
9717 for modes that can hold NaNs and ORDERED_CODE is used for
9718 the rest. */
9719
9720 static tree
9721 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9722 enum tree_code unordered_code,
9723 enum tree_code ordered_code)
9724 {
9725 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9726 enum tree_code code;
9727 tree type0, type1;
9728 enum tree_code code0, code1;
9729 tree cmp_type = NULL_TREE;
9730
9731 type0 = TREE_TYPE (arg0);
9732 type1 = TREE_TYPE (arg1);
9733
9734 code0 = TREE_CODE (type0);
9735 code1 = TREE_CODE (type1);
9736
9737 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9738 /* Choose the wider of two real types. */
9739 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9740 ? type0 : type1;
9741 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9742 cmp_type = type0;
9743 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9744 cmp_type = type1;
9745
9746 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9747 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9748
9749 if (unordered_code == UNORDERED_EXPR)
9750 {
9751 if (!HONOR_NANS (arg0))
9752 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9753 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9754 }
9755
9756 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9757 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9758 fold_build2_loc (loc, code, type, arg0, arg1));
9759 }
9760
9761 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9762 arithmetics if it can never overflow, or into internal functions that
9763 return both result of arithmetics and overflowed boolean flag in
9764 a complex integer result, or some other check for overflow. */
9765
9766 static tree
9767 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9768 tree arg0, tree arg1, tree arg2)
9769 {
9770 enum internal_fn ifn = IFN_LAST;
9771 tree type = TREE_TYPE (TREE_TYPE (arg2));
9772 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9773 switch (fcode)
9774 {
9775 case BUILT_IN_ADD_OVERFLOW:
9776 case BUILT_IN_SADD_OVERFLOW:
9777 case BUILT_IN_SADDL_OVERFLOW:
9778 case BUILT_IN_SADDLL_OVERFLOW:
9779 case BUILT_IN_UADD_OVERFLOW:
9780 case BUILT_IN_UADDL_OVERFLOW:
9781 case BUILT_IN_UADDLL_OVERFLOW:
9782 ifn = IFN_ADD_OVERFLOW;
9783 break;
9784 case BUILT_IN_SUB_OVERFLOW:
9785 case BUILT_IN_SSUB_OVERFLOW:
9786 case BUILT_IN_SSUBL_OVERFLOW:
9787 case BUILT_IN_SSUBLL_OVERFLOW:
9788 case BUILT_IN_USUB_OVERFLOW:
9789 case BUILT_IN_USUBL_OVERFLOW:
9790 case BUILT_IN_USUBLL_OVERFLOW:
9791 ifn = IFN_SUB_OVERFLOW;
9792 break;
9793 case BUILT_IN_MUL_OVERFLOW:
9794 case BUILT_IN_SMUL_OVERFLOW:
9795 case BUILT_IN_SMULL_OVERFLOW:
9796 case BUILT_IN_SMULLL_OVERFLOW:
9797 case BUILT_IN_UMUL_OVERFLOW:
9798 case BUILT_IN_UMULL_OVERFLOW:
9799 case BUILT_IN_UMULLL_OVERFLOW:
9800 ifn = IFN_MUL_OVERFLOW;
9801 break;
9802 default:
9803 gcc_unreachable ();
9804 }
9805 tree ctype = build_complex_type (type);
9806 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9807 2, arg0, arg1);
9808 tree tgt = save_expr (call);
9809 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9810 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9811 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9812 tree store
9813 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9814 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9815 }
9816
9817 /* Fold a call to built-in function FNDECL with 0 arguments.
9818 This function returns NULL_TREE if no simplification was possible. */
9819
9820 static tree
9821 fold_builtin_0 (location_t loc, tree fndecl)
9822 {
9823 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9824 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9825 switch (fcode)
9826 {
9827 CASE_FLT_FN (BUILT_IN_INF):
9828 case BUILT_IN_INFD32:
9829 case BUILT_IN_INFD64:
9830 case BUILT_IN_INFD128:
9831 return fold_builtin_inf (loc, type, true);
9832
9833 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9834 return fold_builtin_inf (loc, type, false);
9835
9836 case BUILT_IN_CLASSIFY_TYPE:
9837 return fold_builtin_classify_type (NULL_TREE);
9838
9839 default:
9840 break;
9841 }
9842 return NULL_TREE;
9843 }
9844
9845 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9846 This function returns NULL_TREE if no simplification was possible. */
9847
9848 static tree
9849 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9850 {
9851 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9852 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9853 switch (fcode)
9854 {
9855 case BUILT_IN_CONSTANT_P:
9856 {
9857 tree val = fold_builtin_constant_p (arg0);
9858
9859 /* Gimplification will pull the CALL_EXPR for the builtin out of
9860 an if condition. When not optimizing, we'll not CSE it back.
9861 To avoid link error types of regressions, return false now. */
9862 if (!val && !optimize)
9863 val = integer_zero_node;
9864
9865 return val;
9866 }
9867
9868 case BUILT_IN_CLASSIFY_TYPE:
9869 return fold_builtin_classify_type (arg0);
9870
9871 case BUILT_IN_STRLEN:
9872 return fold_builtin_strlen (loc, type, arg0);
9873
9874 CASE_FLT_FN (BUILT_IN_FABS):
9875 case BUILT_IN_FABSD32:
9876 case BUILT_IN_FABSD64:
9877 case BUILT_IN_FABSD128:
9878 return fold_builtin_fabs (loc, arg0, type);
9879
9880 case BUILT_IN_ABS:
9881 case BUILT_IN_LABS:
9882 case BUILT_IN_LLABS:
9883 case BUILT_IN_IMAXABS:
9884 return fold_builtin_abs (loc, arg0, type);
9885
9886 CASE_FLT_FN (BUILT_IN_CONJ):
9887 if (validate_arg (arg0, COMPLEX_TYPE)
9888 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9889 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9890 break;
9891
9892 CASE_FLT_FN (BUILT_IN_CREAL):
9893 if (validate_arg (arg0, COMPLEX_TYPE)
9894 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9895 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9896 break;
9897
9898 CASE_FLT_FN (BUILT_IN_CIMAG):
9899 if (validate_arg (arg0, COMPLEX_TYPE)
9900 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9901 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9902 break;
9903
9904 CASE_FLT_FN (BUILT_IN_CCOS):
9905 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
9906
9907 CASE_FLT_FN (BUILT_IN_CCOSH):
9908 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
9909
9910 CASE_FLT_FN (BUILT_IN_CPROJ):
9911 return fold_builtin_cproj (loc, arg0, type);
9912
9913 CASE_FLT_FN (BUILT_IN_CSIN):
9914 if (validate_arg (arg0, COMPLEX_TYPE)
9915 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9916 return do_mpc_arg1 (arg0, type, mpc_sin);
9917 break;
9918
9919 CASE_FLT_FN (BUILT_IN_CSINH):
9920 if (validate_arg (arg0, COMPLEX_TYPE)
9921 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9922 return do_mpc_arg1 (arg0, type, mpc_sinh);
9923 break;
9924
9925 CASE_FLT_FN (BUILT_IN_CTAN):
9926 if (validate_arg (arg0, COMPLEX_TYPE)
9927 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9928 return do_mpc_arg1 (arg0, type, mpc_tan);
9929 break;
9930
9931 CASE_FLT_FN (BUILT_IN_CTANH):
9932 if (validate_arg (arg0, COMPLEX_TYPE)
9933 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9934 return do_mpc_arg1 (arg0, type, mpc_tanh);
9935 break;
9936
9937 CASE_FLT_FN (BUILT_IN_CLOG):
9938 if (validate_arg (arg0, COMPLEX_TYPE)
9939 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9940 return do_mpc_arg1 (arg0, type, mpc_log);
9941 break;
9942
9943 CASE_FLT_FN (BUILT_IN_CSQRT):
9944 if (validate_arg (arg0, COMPLEX_TYPE)
9945 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9946 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9947 break;
9948
9949 CASE_FLT_FN (BUILT_IN_CASIN):
9950 if (validate_arg (arg0, COMPLEX_TYPE)
9951 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9952 return do_mpc_arg1 (arg0, type, mpc_asin);
9953 break;
9954
9955 CASE_FLT_FN (BUILT_IN_CACOS):
9956 if (validate_arg (arg0, COMPLEX_TYPE)
9957 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9958 return do_mpc_arg1 (arg0, type, mpc_acos);
9959 break;
9960
9961 CASE_FLT_FN (BUILT_IN_CATAN):
9962 if (validate_arg (arg0, COMPLEX_TYPE)
9963 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9964 return do_mpc_arg1 (arg0, type, mpc_atan);
9965 break;
9966
9967 CASE_FLT_FN (BUILT_IN_CASINH):
9968 if (validate_arg (arg0, COMPLEX_TYPE)
9969 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9970 return do_mpc_arg1 (arg0, type, mpc_asinh);
9971 break;
9972
9973 CASE_FLT_FN (BUILT_IN_CACOSH):
9974 if (validate_arg (arg0, COMPLEX_TYPE)
9975 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9976 return do_mpc_arg1 (arg0, type, mpc_acosh);
9977 break;
9978
9979 CASE_FLT_FN (BUILT_IN_CATANH):
9980 if (validate_arg (arg0, COMPLEX_TYPE)
9981 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9982 return do_mpc_arg1 (arg0, type, mpc_atanh);
9983 break;
9984
9985 CASE_FLT_FN (BUILT_IN_CABS):
9986 return fold_builtin_cabs (loc, arg0, type, fndecl);
9987
9988 CASE_FLT_FN (BUILT_IN_CARG):
9989 return fold_builtin_carg (loc, arg0, type);
9990
9991 CASE_FLT_FN (BUILT_IN_SQRT):
9992 return fold_builtin_sqrt (loc, arg0, type);
9993
9994 CASE_FLT_FN (BUILT_IN_CBRT):
9995 return fold_builtin_cbrt (loc, arg0, type);
9996
9997 CASE_FLT_FN (BUILT_IN_ASIN):
9998 if (validate_arg (arg0, REAL_TYPE))
9999 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10000 &dconstm1, &dconst1, true);
10001 break;
10002
10003 CASE_FLT_FN (BUILT_IN_ACOS):
10004 if (validate_arg (arg0, REAL_TYPE))
10005 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10006 &dconstm1, &dconst1, true);
10007 break;
10008
10009 CASE_FLT_FN (BUILT_IN_ATAN):
10010 if (validate_arg (arg0, REAL_TYPE))
10011 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10012 break;
10013
10014 CASE_FLT_FN (BUILT_IN_ASINH):
10015 if (validate_arg (arg0, REAL_TYPE))
10016 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10017 break;
10018
10019 CASE_FLT_FN (BUILT_IN_ACOSH):
10020 if (validate_arg (arg0, REAL_TYPE))
10021 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10022 &dconst1, NULL, true);
10023 break;
10024
10025 CASE_FLT_FN (BUILT_IN_ATANH):
10026 if (validate_arg (arg0, REAL_TYPE))
10027 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10028 &dconstm1, &dconst1, false);
10029 break;
10030
10031 CASE_FLT_FN (BUILT_IN_SIN):
10032 if (validate_arg (arg0, REAL_TYPE))
10033 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10034 break;
10035
10036 CASE_FLT_FN (BUILT_IN_COS):
10037 return fold_builtin_cos (loc, arg0, type, fndecl);
10038
10039 CASE_FLT_FN (BUILT_IN_TAN):
10040 return fold_builtin_tan (arg0, type);
10041
10042 CASE_FLT_FN (BUILT_IN_CEXP):
10043 return fold_builtin_cexp (loc, arg0, type);
10044
10045 CASE_FLT_FN (BUILT_IN_CEXPI):
10046 if (validate_arg (arg0, REAL_TYPE))
10047 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10048 break;
10049
10050 CASE_FLT_FN (BUILT_IN_SINH):
10051 if (validate_arg (arg0, REAL_TYPE))
10052 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10053 break;
10054
10055 CASE_FLT_FN (BUILT_IN_COSH):
10056 return fold_builtin_cosh (loc, arg0, type, fndecl);
10057
10058 CASE_FLT_FN (BUILT_IN_TANH):
10059 if (validate_arg (arg0, REAL_TYPE))
10060 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10061 break;
10062
10063 CASE_FLT_FN (BUILT_IN_ERF):
10064 if (validate_arg (arg0, REAL_TYPE))
10065 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10066 break;
10067
10068 CASE_FLT_FN (BUILT_IN_ERFC):
10069 if (validate_arg (arg0, REAL_TYPE))
10070 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10071 break;
10072
10073 CASE_FLT_FN (BUILT_IN_TGAMMA):
10074 if (validate_arg (arg0, REAL_TYPE))
10075 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10076 break;
10077
10078 CASE_FLT_FN (BUILT_IN_EXP):
10079 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10080
10081 CASE_FLT_FN (BUILT_IN_EXP2):
10082 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10083
10084 CASE_FLT_FN (BUILT_IN_EXP10):
10085 CASE_FLT_FN (BUILT_IN_POW10):
10086 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10087
10088 CASE_FLT_FN (BUILT_IN_EXPM1):
10089 if (validate_arg (arg0, REAL_TYPE))
10090 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10091 break;
10092
10093 CASE_FLT_FN (BUILT_IN_LOG):
10094 if (validate_arg (arg0, REAL_TYPE))
10095 return do_mpfr_arg1 (arg0, type, mpfr_log, &dconst0, NULL, false);
10096 break;
10097
10098 CASE_FLT_FN (BUILT_IN_LOG2):
10099 if (validate_arg (arg0, REAL_TYPE))
10100 return do_mpfr_arg1 (arg0, type, mpfr_log2, &dconst0, NULL, false);
10101 break;
10102
10103 CASE_FLT_FN (BUILT_IN_LOG10):
10104 if (validate_arg (arg0, REAL_TYPE))
10105 return do_mpfr_arg1 (arg0, type, mpfr_log10, &dconst0, NULL, false);
10106 break;
10107
10108 CASE_FLT_FN (BUILT_IN_LOG1P):
10109 if (validate_arg (arg0, REAL_TYPE))
10110 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10111 &dconstm1, NULL, false);
10112 break;
10113
10114 CASE_FLT_FN (BUILT_IN_J0):
10115 if (validate_arg (arg0, REAL_TYPE))
10116 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10117 NULL, NULL, 0);
10118 break;
10119
10120 CASE_FLT_FN (BUILT_IN_J1):
10121 if (validate_arg (arg0, REAL_TYPE))
10122 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10123 NULL, NULL, 0);
10124 break;
10125
10126 CASE_FLT_FN (BUILT_IN_Y0):
10127 if (validate_arg (arg0, REAL_TYPE))
10128 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10129 &dconst0, NULL, false);
10130 break;
10131
10132 CASE_FLT_FN (BUILT_IN_Y1):
10133 if (validate_arg (arg0, REAL_TYPE))
10134 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10135 &dconst0, NULL, false);
10136 break;
10137
10138 CASE_FLT_FN (BUILT_IN_NAN):
10139 case BUILT_IN_NAND32:
10140 case BUILT_IN_NAND64:
10141 case BUILT_IN_NAND128:
10142 return fold_builtin_nan (arg0, type, true);
10143
10144 CASE_FLT_FN (BUILT_IN_NANS):
10145 return fold_builtin_nan (arg0, type, false);
10146
10147 CASE_FLT_FN (BUILT_IN_FLOOR):
10148 return fold_builtin_floor (loc, fndecl, arg0);
10149
10150 CASE_FLT_FN (BUILT_IN_CEIL):
10151 return fold_builtin_ceil (loc, fndecl, arg0);
10152
10153 CASE_FLT_FN (BUILT_IN_TRUNC):
10154 return fold_builtin_trunc (loc, fndecl, arg0);
10155
10156 CASE_FLT_FN (BUILT_IN_ROUND):
10157 return fold_builtin_round (loc, fndecl, arg0);
10158
10159 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10160 CASE_FLT_FN (BUILT_IN_RINT):
10161 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10162
10163 CASE_FLT_FN (BUILT_IN_ICEIL):
10164 CASE_FLT_FN (BUILT_IN_LCEIL):
10165 CASE_FLT_FN (BUILT_IN_LLCEIL):
10166 CASE_FLT_FN (BUILT_IN_LFLOOR):
10167 CASE_FLT_FN (BUILT_IN_IFLOOR):
10168 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10169 CASE_FLT_FN (BUILT_IN_IROUND):
10170 CASE_FLT_FN (BUILT_IN_LROUND):
10171 CASE_FLT_FN (BUILT_IN_LLROUND):
10172 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10173
10174 CASE_FLT_FN (BUILT_IN_IRINT):
10175 CASE_FLT_FN (BUILT_IN_LRINT):
10176 CASE_FLT_FN (BUILT_IN_LLRINT):
10177 return fold_fixed_mathfn (loc, fndecl, arg0);
10178
10179 case BUILT_IN_BSWAP16:
10180 case BUILT_IN_BSWAP32:
10181 case BUILT_IN_BSWAP64:
10182 return fold_builtin_bswap (fndecl, arg0);
10183
10184 CASE_INT_FN (BUILT_IN_FFS):
10185 CASE_INT_FN (BUILT_IN_CLZ):
10186 CASE_INT_FN (BUILT_IN_CTZ):
10187 CASE_INT_FN (BUILT_IN_CLRSB):
10188 CASE_INT_FN (BUILT_IN_POPCOUNT):
10189 CASE_INT_FN (BUILT_IN_PARITY):
10190 return fold_builtin_bitop (fndecl, arg0);
10191
10192 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10193 return fold_builtin_signbit (loc, arg0, type);
10194
10195 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10196 return fold_builtin_significand (loc, arg0, type);
10197
10198 CASE_FLT_FN (BUILT_IN_ILOGB):
10199 CASE_FLT_FN (BUILT_IN_LOGB):
10200 return fold_builtin_logb (loc, arg0, type);
10201
10202 case BUILT_IN_ISASCII:
10203 return fold_builtin_isascii (loc, arg0);
10204
10205 case BUILT_IN_TOASCII:
10206 return fold_builtin_toascii (loc, arg0);
10207
10208 case BUILT_IN_ISDIGIT:
10209 return fold_builtin_isdigit (loc, arg0);
10210
10211 CASE_FLT_FN (BUILT_IN_FINITE):
10212 case BUILT_IN_FINITED32:
10213 case BUILT_IN_FINITED64:
10214 case BUILT_IN_FINITED128:
10215 case BUILT_IN_ISFINITE:
10216 {
10217 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10218 if (ret)
10219 return ret;
10220 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10221 }
10222
10223 CASE_FLT_FN (BUILT_IN_ISINF):
10224 case BUILT_IN_ISINFD32:
10225 case BUILT_IN_ISINFD64:
10226 case BUILT_IN_ISINFD128:
10227 {
10228 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10229 if (ret)
10230 return ret;
10231 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10232 }
10233
10234 case BUILT_IN_ISNORMAL:
10235 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10236
10237 case BUILT_IN_ISINF_SIGN:
10238 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10239
10240 CASE_FLT_FN (BUILT_IN_ISNAN):
10241 case BUILT_IN_ISNAND32:
10242 case BUILT_IN_ISNAND64:
10243 case BUILT_IN_ISNAND128:
10244 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10245
10246 case BUILT_IN_FREE:
10247 if (integer_zerop (arg0))
10248 return build_empty_stmt (loc);
10249 break;
10250
10251 default:
10252 break;
10253 }
10254
10255 return NULL_TREE;
10256
10257 }
10258
10259 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10260 This function returns NULL_TREE if no simplification was possible. */
10261
10262 static tree
10263 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
10264 {
10265 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10266 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10267
10268 switch (fcode)
10269 {
10270 CASE_FLT_FN (BUILT_IN_JN):
10271 if (validate_arg (arg0, INTEGER_TYPE)
10272 && validate_arg (arg1, REAL_TYPE))
10273 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10274 break;
10275
10276 CASE_FLT_FN (BUILT_IN_YN):
10277 if (validate_arg (arg0, INTEGER_TYPE)
10278 && validate_arg (arg1, REAL_TYPE))
10279 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10280 &dconst0, false);
10281 break;
10282
10283 CASE_FLT_FN (BUILT_IN_DREM):
10284 CASE_FLT_FN (BUILT_IN_REMAINDER):
10285 if (validate_arg (arg0, REAL_TYPE)
10286 && validate_arg (arg1, REAL_TYPE))
10287 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10288 break;
10289
10290 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10291 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10292 if (validate_arg (arg0, REAL_TYPE)
10293 && validate_arg (arg1, POINTER_TYPE))
10294 return do_mpfr_lgamma_r (arg0, arg1, type);
10295 break;
10296
10297 CASE_FLT_FN (BUILT_IN_ATAN2):
10298 if (validate_arg (arg0, REAL_TYPE)
10299 && validate_arg (arg1, REAL_TYPE))
10300 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10301 break;
10302
10303 CASE_FLT_FN (BUILT_IN_FDIM):
10304 if (validate_arg (arg0, REAL_TYPE)
10305 && validate_arg (arg1, REAL_TYPE))
10306 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10307 break;
10308
10309 CASE_FLT_FN (BUILT_IN_HYPOT):
10310 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10311
10312 CASE_FLT_FN (BUILT_IN_CPOW):
10313 if (validate_arg (arg0, COMPLEX_TYPE)
10314 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10315 && validate_arg (arg1, COMPLEX_TYPE)
10316 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10317 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10318 break;
10319
10320 CASE_FLT_FN (BUILT_IN_LDEXP):
10321 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10322 CASE_FLT_FN (BUILT_IN_SCALBN):
10323 CASE_FLT_FN (BUILT_IN_SCALBLN):
10324 return fold_builtin_load_exponent (loc, arg0, arg1,
10325 type, /*ldexp=*/false);
10326
10327 CASE_FLT_FN (BUILT_IN_FREXP):
10328 return fold_builtin_frexp (loc, arg0, arg1, type);
10329
10330 CASE_FLT_FN (BUILT_IN_MODF):
10331 return fold_builtin_modf (loc, arg0, arg1, type);
10332
10333 case BUILT_IN_STRSTR:
10334 return fold_builtin_strstr (loc, arg0, arg1, type);
10335
10336 case BUILT_IN_STRSPN:
10337 return fold_builtin_strspn (loc, arg0, arg1);
10338
10339 case BUILT_IN_STRCSPN:
10340 return fold_builtin_strcspn (loc, arg0, arg1);
10341
10342 case BUILT_IN_STRCHR:
10343 case BUILT_IN_INDEX:
10344 return fold_builtin_strchr (loc, arg0, arg1, type);
10345
10346 case BUILT_IN_STRRCHR:
10347 case BUILT_IN_RINDEX:
10348 return fold_builtin_strrchr (loc, arg0, arg1, type);
10349
10350 case BUILT_IN_STRCMP:
10351 return fold_builtin_strcmp (loc, arg0, arg1);
10352
10353 case BUILT_IN_STRPBRK:
10354 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10355
10356 case BUILT_IN_EXPECT:
10357 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10358
10359 CASE_FLT_FN (BUILT_IN_POW):
10360 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10361
10362 CASE_FLT_FN (BUILT_IN_POWI):
10363 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10364
10365 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10366 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10367
10368 CASE_FLT_FN (BUILT_IN_FMIN):
10369 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10370
10371 CASE_FLT_FN (BUILT_IN_FMAX):
10372 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10373
10374 case BUILT_IN_ISGREATER:
10375 return fold_builtin_unordered_cmp (loc, fndecl,
10376 arg0, arg1, UNLE_EXPR, LE_EXPR);
10377 case BUILT_IN_ISGREATEREQUAL:
10378 return fold_builtin_unordered_cmp (loc, fndecl,
10379 arg0, arg1, UNLT_EXPR, LT_EXPR);
10380 case BUILT_IN_ISLESS:
10381 return fold_builtin_unordered_cmp (loc, fndecl,
10382 arg0, arg1, UNGE_EXPR, GE_EXPR);
10383 case BUILT_IN_ISLESSEQUAL:
10384 return fold_builtin_unordered_cmp (loc, fndecl,
10385 arg0, arg1, UNGT_EXPR, GT_EXPR);
10386 case BUILT_IN_ISLESSGREATER:
10387 return fold_builtin_unordered_cmp (loc, fndecl,
10388 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10389 case BUILT_IN_ISUNORDERED:
10390 return fold_builtin_unordered_cmp (loc, fndecl,
10391 arg0, arg1, UNORDERED_EXPR,
10392 NOP_EXPR);
10393
10394 /* We do the folding for va_start in the expander. */
10395 case BUILT_IN_VA_START:
10396 break;
10397
10398 case BUILT_IN_OBJECT_SIZE:
10399 return fold_builtin_object_size (arg0, arg1);
10400
10401 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10402 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10403
10404 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10405 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10406
10407 default:
10408 break;
10409 }
10410 return NULL_TREE;
10411 }
10412
10413 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10414 and ARG2.
10415 This function returns NULL_TREE if no simplification was possible. */
10416
10417 static tree
10418 fold_builtin_3 (location_t loc, tree fndecl,
10419 tree arg0, tree arg1, tree arg2)
10420 {
10421 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10422 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10423 switch (fcode)
10424 {
10425
10426 CASE_FLT_FN (BUILT_IN_SINCOS):
10427 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10428
10429 CASE_FLT_FN (BUILT_IN_FMA):
10430 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10431 break;
10432
10433 CASE_FLT_FN (BUILT_IN_REMQUO):
10434 if (validate_arg (arg0, REAL_TYPE)
10435 && validate_arg (arg1, REAL_TYPE)
10436 && validate_arg (arg2, POINTER_TYPE))
10437 return do_mpfr_remquo (arg0, arg1, arg2);
10438 break;
10439
10440 case BUILT_IN_STRNCMP:
10441 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10442
10443 case BUILT_IN_MEMCHR:
10444 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10445
10446 case BUILT_IN_BCMP:
10447 case BUILT_IN_MEMCMP:
10448 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10449
10450 case BUILT_IN_EXPECT:
10451 return fold_builtin_expect (loc, arg0, arg1, arg2);
10452
10453 case BUILT_IN_ADD_OVERFLOW:
10454 case BUILT_IN_SUB_OVERFLOW:
10455 case BUILT_IN_MUL_OVERFLOW:
10456 case BUILT_IN_SADD_OVERFLOW:
10457 case BUILT_IN_SADDL_OVERFLOW:
10458 case BUILT_IN_SADDLL_OVERFLOW:
10459 case BUILT_IN_SSUB_OVERFLOW:
10460 case BUILT_IN_SSUBL_OVERFLOW:
10461 case BUILT_IN_SSUBLL_OVERFLOW:
10462 case BUILT_IN_SMUL_OVERFLOW:
10463 case BUILT_IN_SMULL_OVERFLOW:
10464 case BUILT_IN_SMULLL_OVERFLOW:
10465 case BUILT_IN_UADD_OVERFLOW:
10466 case BUILT_IN_UADDL_OVERFLOW:
10467 case BUILT_IN_UADDLL_OVERFLOW:
10468 case BUILT_IN_USUB_OVERFLOW:
10469 case BUILT_IN_USUBL_OVERFLOW:
10470 case BUILT_IN_USUBLL_OVERFLOW:
10471 case BUILT_IN_UMUL_OVERFLOW:
10472 case BUILT_IN_UMULL_OVERFLOW:
10473 case BUILT_IN_UMULLL_OVERFLOW:
10474 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10475
10476 default:
10477 break;
10478 }
10479 return NULL_TREE;
10480 }
10481
10482 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10483 arguments. IGNORE is true if the result of the
10484 function call is ignored. This function returns NULL_TREE if no
10485 simplification was possible. */
10486
10487 tree
10488 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
10489 {
10490 tree ret = NULL_TREE;
10491
10492 switch (nargs)
10493 {
10494 case 0:
10495 ret = fold_builtin_0 (loc, fndecl);
10496 break;
10497 case 1:
10498 ret = fold_builtin_1 (loc, fndecl, args[0]);
10499 break;
10500 case 2:
10501 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
10502 break;
10503 case 3:
10504 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10505 break;
10506 default:
10507 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10508 break;
10509 }
10510 if (ret)
10511 {
10512 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10513 SET_EXPR_LOCATION (ret, loc);
10514 TREE_NO_WARNING (ret) = 1;
10515 return ret;
10516 }
10517 return NULL_TREE;
10518 }
10519
10520 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10521 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10522 of arguments in ARGS to be omitted. OLDNARGS is the number of
10523 elements in ARGS. */
10524
10525 static tree
10526 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10527 int skip, tree fndecl, int n, va_list newargs)
10528 {
10529 int nargs = oldnargs - skip + n;
10530 tree *buffer;
10531
10532 if (n > 0)
10533 {
10534 int i, j;
10535
10536 buffer = XALLOCAVEC (tree, nargs);
10537 for (i = 0; i < n; i++)
10538 buffer[i] = va_arg (newargs, tree);
10539 for (j = skip; j < oldnargs; j++, i++)
10540 buffer[i] = args[j];
10541 }
10542 else
10543 buffer = args + skip;
10544
10545 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10546 }
10547
10548 /* Return true if FNDECL shouldn't be folded right now.
10549 If a built-in function has an inline attribute always_inline
10550 wrapper, defer folding it after always_inline functions have
10551 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10552 might not be performed. */
10553
10554 bool
10555 avoid_folding_inline_builtin (tree fndecl)
10556 {
10557 return (DECL_DECLARED_INLINE_P (fndecl)
10558 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10559 && cfun
10560 && !cfun->always_inline_functions_inlined
10561 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10562 }
10563
10564 /* A wrapper function for builtin folding that prevents warnings for
10565 "statement without effect" and the like, caused by removing the
10566 call node earlier than the warning is generated. */
10567
10568 tree
10569 fold_call_expr (location_t loc, tree exp, bool ignore)
10570 {
10571 tree ret = NULL_TREE;
10572 tree fndecl = get_callee_fndecl (exp);
10573 if (fndecl
10574 && TREE_CODE (fndecl) == FUNCTION_DECL
10575 && DECL_BUILT_IN (fndecl)
10576 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10577 yet. Defer folding until we see all the arguments
10578 (after inlining). */
10579 && !CALL_EXPR_VA_ARG_PACK (exp))
10580 {
10581 int nargs = call_expr_nargs (exp);
10582
10583 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10584 instead last argument is __builtin_va_arg_pack (). Defer folding
10585 even in that case, until arguments are finalized. */
10586 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10587 {
10588 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10589 if (fndecl2
10590 && TREE_CODE (fndecl2) == FUNCTION_DECL
10591 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10592 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10593 return NULL_TREE;
10594 }
10595
10596 if (avoid_folding_inline_builtin (fndecl))
10597 return NULL_TREE;
10598
10599 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10600 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10601 CALL_EXPR_ARGP (exp), ignore);
10602 else
10603 {
10604 tree *args = CALL_EXPR_ARGP (exp);
10605 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10606 if (ret)
10607 return ret;
10608 }
10609 }
10610 return NULL_TREE;
10611 }
10612
10613 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10614 N arguments are passed in the array ARGARRAY. Return a folded
10615 expression or NULL_TREE if no simplification was possible. */
10616
10617 tree
10618 fold_builtin_call_array (location_t loc, tree,
10619 tree fn,
10620 int n,
10621 tree *argarray)
10622 {
10623 if (TREE_CODE (fn) != ADDR_EXPR)
10624 return NULL_TREE;
10625
10626 tree fndecl = TREE_OPERAND (fn, 0);
10627 if (TREE_CODE (fndecl) == FUNCTION_DECL
10628 && DECL_BUILT_IN (fndecl))
10629 {
10630 /* If last argument is __builtin_va_arg_pack (), arguments to this
10631 function are not finalized yet. Defer folding until they are. */
10632 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10633 {
10634 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10635 if (fndecl2
10636 && TREE_CODE (fndecl2) == FUNCTION_DECL
10637 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10638 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10639 return NULL_TREE;
10640 }
10641 if (avoid_folding_inline_builtin (fndecl))
10642 return NULL_TREE;
10643 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10644 return targetm.fold_builtin (fndecl, n, argarray, false);
10645 else
10646 return fold_builtin_n (loc, fndecl, argarray, n, false);
10647 }
10648
10649 return NULL_TREE;
10650 }
10651
10652 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10653 along with N new arguments specified as the "..." parameters. SKIP
10654 is the number of arguments in EXP to be omitted. This function is used
10655 to do varargs-to-varargs transformations. */
10656
10657 static tree
10658 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10659 {
10660 va_list ap;
10661 tree t;
10662
10663 va_start (ap, n);
10664 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10665 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10666 va_end (ap);
10667
10668 return t;
10669 }
10670
10671 /* Validate a single argument ARG against a tree code CODE representing
10672 a type. */
10673
10674 static bool
10675 validate_arg (const_tree arg, enum tree_code code)
10676 {
10677 if (!arg)
10678 return false;
10679 else if (code == POINTER_TYPE)
10680 return POINTER_TYPE_P (TREE_TYPE (arg));
10681 else if (code == INTEGER_TYPE)
10682 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10683 return code == TREE_CODE (TREE_TYPE (arg));
10684 }
10685
10686 /* This function validates the types of a function call argument list
10687 against a specified list of tree_codes. If the last specifier is a 0,
10688 that represents an ellipses, otherwise the last specifier must be a
10689 VOID_TYPE.
10690
10691 This is the GIMPLE version of validate_arglist. Eventually we want to
10692 completely convert builtins.c to work from GIMPLEs and the tree based
10693 validate_arglist will then be removed. */
10694
10695 bool
10696 validate_gimple_arglist (const gcall *call, ...)
10697 {
10698 enum tree_code code;
10699 bool res = 0;
10700 va_list ap;
10701 const_tree arg;
10702 size_t i;
10703
10704 va_start (ap, call);
10705 i = 0;
10706
10707 do
10708 {
10709 code = (enum tree_code) va_arg (ap, int);
10710 switch (code)
10711 {
10712 case 0:
10713 /* This signifies an ellipses, any further arguments are all ok. */
10714 res = true;
10715 goto end;
10716 case VOID_TYPE:
10717 /* This signifies an endlink, if no arguments remain, return
10718 true, otherwise return false. */
10719 res = (i == gimple_call_num_args (call));
10720 goto end;
10721 default:
10722 /* If no parameters remain or the parameter's code does not
10723 match the specified code, return false. Otherwise continue
10724 checking any remaining arguments. */
10725 arg = gimple_call_arg (call, i++);
10726 if (!validate_arg (arg, code))
10727 goto end;
10728 break;
10729 }
10730 }
10731 while (1);
10732
10733 /* We need gotos here since we can only have one VA_CLOSE in a
10734 function. */
10735 end: ;
10736 va_end (ap);
10737
10738 return res;
10739 }
10740
10741 /* Default target-specific builtin expander that does nothing. */
10742
10743 rtx
10744 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10745 rtx target ATTRIBUTE_UNUSED,
10746 rtx subtarget ATTRIBUTE_UNUSED,
10747 machine_mode mode ATTRIBUTE_UNUSED,
10748 int ignore ATTRIBUTE_UNUSED)
10749 {
10750 return NULL_RTX;
10751 }
10752
10753 /* Returns true is EXP represents data that would potentially reside
10754 in a readonly section. */
10755
10756 bool
10757 readonly_data_expr (tree exp)
10758 {
10759 STRIP_NOPS (exp);
10760
10761 if (TREE_CODE (exp) != ADDR_EXPR)
10762 return false;
10763
10764 exp = get_base_address (TREE_OPERAND (exp, 0));
10765 if (!exp)
10766 return false;
10767
10768 /* Make sure we call decl_readonly_section only for trees it
10769 can handle (since it returns true for everything it doesn't
10770 understand). */
10771 if (TREE_CODE (exp) == STRING_CST
10772 || TREE_CODE (exp) == CONSTRUCTOR
10773 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10774 return decl_readonly_section (exp, 0);
10775 else
10776 return false;
10777 }
10778
10779 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10780 to the call, and TYPE is its return type.
10781
10782 Return NULL_TREE if no simplification was possible, otherwise return the
10783 simplified form of the call as a tree.
10784
10785 The simplified form may be a constant or other expression which
10786 computes the same value, but in a more efficient manner (including
10787 calls to other builtin functions).
10788
10789 The call may contain arguments which need to be evaluated, but
10790 which are not useful to determine the result of the call. In
10791 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10792 COMPOUND_EXPR will be an argument which must be evaluated.
10793 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10794 COMPOUND_EXPR in the chain will contain the tree for the simplified
10795 form of the builtin function call. */
10796
10797 static tree
10798 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10799 {
10800 if (!validate_arg (s1, POINTER_TYPE)
10801 || !validate_arg (s2, POINTER_TYPE))
10802 return NULL_TREE;
10803 else
10804 {
10805 tree fn;
10806 const char *p1, *p2;
10807
10808 p2 = c_getstr (s2);
10809 if (p2 == NULL)
10810 return NULL_TREE;
10811
10812 p1 = c_getstr (s1);
10813 if (p1 != NULL)
10814 {
10815 const char *r = strstr (p1, p2);
10816 tree tem;
10817
10818 if (r == NULL)
10819 return build_int_cst (TREE_TYPE (s1), 0);
10820
10821 /* Return an offset into the constant string argument. */
10822 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10823 return fold_convert_loc (loc, type, tem);
10824 }
10825
10826 /* The argument is const char *, and the result is char *, so we need
10827 a type conversion here to avoid a warning. */
10828 if (p2[0] == '\0')
10829 return fold_convert_loc (loc, type, s1);
10830
10831 if (p2[1] != '\0')
10832 return NULL_TREE;
10833
10834 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10835 if (!fn)
10836 return NULL_TREE;
10837
10838 /* New argument list transforming strstr(s1, s2) to
10839 strchr(s1, s2[0]). */
10840 return build_call_expr_loc (loc, fn, 2, s1,
10841 build_int_cst (integer_type_node, p2[0]));
10842 }
10843 }
10844
10845 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10846 the call, and TYPE is its return type.
10847
10848 Return NULL_TREE if no simplification was possible, otherwise return the
10849 simplified form of the call as a tree.
10850
10851 The simplified form may be a constant or other expression which
10852 computes the same value, but in a more efficient manner (including
10853 calls to other builtin functions).
10854
10855 The call may contain arguments which need to be evaluated, but
10856 which are not useful to determine the result of the call. In
10857 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10858 COMPOUND_EXPR will be an argument which must be evaluated.
10859 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10860 COMPOUND_EXPR in the chain will contain the tree for the simplified
10861 form of the builtin function call. */
10862
10863 static tree
10864 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10865 {
10866 if (!validate_arg (s1, POINTER_TYPE)
10867 || !validate_arg (s2, INTEGER_TYPE))
10868 return NULL_TREE;
10869 else
10870 {
10871 const char *p1;
10872
10873 if (TREE_CODE (s2) != INTEGER_CST)
10874 return NULL_TREE;
10875
10876 p1 = c_getstr (s1);
10877 if (p1 != NULL)
10878 {
10879 char c;
10880 const char *r;
10881 tree tem;
10882
10883 if (target_char_cast (s2, &c))
10884 return NULL_TREE;
10885
10886 r = strchr (p1, c);
10887
10888 if (r == NULL)
10889 return build_int_cst (TREE_TYPE (s1), 0);
10890
10891 /* Return an offset into the constant string argument. */
10892 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10893 return fold_convert_loc (loc, type, tem);
10894 }
10895 return NULL_TREE;
10896 }
10897 }
10898
10899 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10900 the call, and TYPE is its return type.
10901
10902 Return NULL_TREE if no simplification was possible, otherwise return the
10903 simplified form of the call as a tree.
10904
10905 The simplified form may be a constant or other expression which
10906 computes the same value, but in a more efficient manner (including
10907 calls to other builtin functions).
10908
10909 The call may contain arguments which need to be evaluated, but
10910 which are not useful to determine the result of the call. In
10911 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10912 COMPOUND_EXPR will be an argument which must be evaluated.
10913 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10914 COMPOUND_EXPR in the chain will contain the tree for the simplified
10915 form of the builtin function call. */
10916
10917 static tree
10918 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10919 {
10920 if (!validate_arg (s1, POINTER_TYPE)
10921 || !validate_arg (s2, INTEGER_TYPE))
10922 return NULL_TREE;
10923 else
10924 {
10925 tree fn;
10926 const char *p1;
10927
10928 if (TREE_CODE (s2) != INTEGER_CST)
10929 return NULL_TREE;
10930
10931 p1 = c_getstr (s1);
10932 if (p1 != NULL)
10933 {
10934 char c;
10935 const char *r;
10936 tree tem;
10937
10938 if (target_char_cast (s2, &c))
10939 return NULL_TREE;
10940
10941 r = strrchr (p1, c);
10942
10943 if (r == NULL)
10944 return build_int_cst (TREE_TYPE (s1), 0);
10945
10946 /* Return an offset into the constant string argument. */
10947 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10948 return fold_convert_loc (loc, type, tem);
10949 }
10950
10951 if (! integer_zerop (s2))
10952 return NULL_TREE;
10953
10954 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10955 if (!fn)
10956 return NULL_TREE;
10957
10958 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10959 return build_call_expr_loc (loc, fn, 2, s1, s2);
10960 }
10961 }
10962
10963 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10964 to the call, and TYPE is its return type.
10965
10966 Return NULL_TREE if no simplification was possible, otherwise return the
10967 simplified form of the call as a tree.
10968
10969 The simplified form may be a constant or other expression which
10970 computes the same value, but in a more efficient manner (including
10971 calls to other builtin functions).
10972
10973 The call may contain arguments which need to be evaluated, but
10974 which are not useful to determine the result of the call. In
10975 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10976 COMPOUND_EXPR will be an argument which must be evaluated.
10977 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10978 COMPOUND_EXPR in the chain will contain the tree for the simplified
10979 form of the builtin function call. */
10980
10981 static tree
10982 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10983 {
10984 if (!validate_arg (s1, POINTER_TYPE)
10985 || !validate_arg (s2, POINTER_TYPE))
10986 return NULL_TREE;
10987 else
10988 {
10989 tree fn;
10990 const char *p1, *p2;
10991
10992 p2 = c_getstr (s2);
10993 if (p2 == NULL)
10994 return NULL_TREE;
10995
10996 p1 = c_getstr (s1);
10997 if (p1 != NULL)
10998 {
10999 const char *r = strpbrk (p1, p2);
11000 tree tem;
11001
11002 if (r == NULL)
11003 return build_int_cst (TREE_TYPE (s1), 0);
11004
11005 /* Return an offset into the constant string argument. */
11006 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11007 return fold_convert_loc (loc, type, tem);
11008 }
11009
11010 if (p2[0] == '\0')
11011 /* strpbrk(x, "") == NULL.
11012 Evaluate and ignore s1 in case it had side-effects. */
11013 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11014
11015 if (p2[1] != '\0')
11016 return NULL_TREE; /* Really call strpbrk. */
11017
11018 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11019 if (!fn)
11020 return NULL_TREE;
11021
11022 /* New argument list transforming strpbrk(s1, s2) to
11023 strchr(s1, s2[0]). */
11024 return build_call_expr_loc (loc, fn, 2, s1,
11025 build_int_cst (integer_type_node, p2[0]));
11026 }
11027 }
11028
11029 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11030 to the call.
11031
11032 Return NULL_TREE if no simplification was possible, otherwise return the
11033 simplified form of the call as a tree.
11034
11035 The simplified form may be a constant or other expression which
11036 computes the same value, but in a more efficient manner (including
11037 calls to other builtin functions).
11038
11039 The call may contain arguments which need to be evaluated, but
11040 which are not useful to determine the result of the call. In
11041 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11042 COMPOUND_EXPR will be an argument which must be evaluated.
11043 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11044 COMPOUND_EXPR in the chain will contain the tree for the simplified
11045 form of the builtin function call. */
11046
11047 static tree
11048 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11049 {
11050 if (!validate_arg (s1, POINTER_TYPE)
11051 || !validate_arg (s2, POINTER_TYPE))
11052 return NULL_TREE;
11053 else
11054 {
11055 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11056
11057 /* If both arguments are constants, evaluate at compile-time. */
11058 if (p1 && p2)
11059 {
11060 const size_t r = strspn (p1, p2);
11061 return build_int_cst (size_type_node, r);
11062 }
11063
11064 /* If either argument is "", return NULL_TREE. */
11065 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11066 /* Evaluate and ignore both arguments in case either one has
11067 side-effects. */
11068 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11069 s1, s2);
11070 return NULL_TREE;
11071 }
11072 }
11073
11074 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11075 to the call.
11076
11077 Return NULL_TREE if no simplification was possible, otherwise return the
11078 simplified form of the call as a tree.
11079
11080 The simplified form may be a constant or other expression which
11081 computes the same value, but in a more efficient manner (including
11082 calls to other builtin functions).
11083
11084 The call may contain arguments which need to be evaluated, but
11085 which are not useful to determine the result of the call. In
11086 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11087 COMPOUND_EXPR will be an argument which must be evaluated.
11088 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11089 COMPOUND_EXPR in the chain will contain the tree for the simplified
11090 form of the builtin function call. */
11091
11092 static tree
11093 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11094 {
11095 if (!validate_arg (s1, POINTER_TYPE)
11096 || !validate_arg (s2, POINTER_TYPE))
11097 return NULL_TREE;
11098 else
11099 {
11100 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11101
11102 /* If both arguments are constants, evaluate at compile-time. */
11103 if (p1 && p2)
11104 {
11105 const size_t r = strcspn (p1, p2);
11106 return build_int_cst (size_type_node, r);
11107 }
11108
11109 /* If the first argument is "", return NULL_TREE. */
11110 if (p1 && *p1 == '\0')
11111 {
11112 /* Evaluate and ignore argument s2 in case it has
11113 side-effects. */
11114 return omit_one_operand_loc (loc, size_type_node,
11115 size_zero_node, s2);
11116 }
11117
11118 /* If the second argument is "", return __builtin_strlen(s1). */
11119 if (p2 && *p2 == '\0')
11120 {
11121 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11122
11123 /* If the replacement _DECL isn't initialized, don't do the
11124 transformation. */
11125 if (!fn)
11126 return NULL_TREE;
11127
11128 return build_call_expr_loc (loc, fn, 1, s1);
11129 }
11130 return NULL_TREE;
11131 }
11132 }
11133
11134 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11135 produced. False otherwise. This is done so that we don't output the error
11136 or warning twice or three times. */
11137
11138 bool
11139 fold_builtin_next_arg (tree exp, bool va_start_p)
11140 {
11141 tree fntype = TREE_TYPE (current_function_decl);
11142 int nargs = call_expr_nargs (exp);
11143 tree arg;
11144 /* There is good chance the current input_location points inside the
11145 definition of the va_start macro (perhaps on the token for
11146 builtin) in a system header, so warnings will not be emitted.
11147 Use the location in real source code. */
11148 source_location current_location =
11149 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11150 NULL);
11151
11152 if (!stdarg_p (fntype))
11153 {
11154 error ("%<va_start%> used in function with fixed args");
11155 return true;
11156 }
11157
11158 if (va_start_p)
11159 {
11160 if (va_start_p && (nargs != 2))
11161 {
11162 error ("wrong number of arguments to function %<va_start%>");
11163 return true;
11164 }
11165 arg = CALL_EXPR_ARG (exp, 1);
11166 }
11167 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11168 when we checked the arguments and if needed issued a warning. */
11169 else
11170 {
11171 if (nargs == 0)
11172 {
11173 /* Evidently an out of date version of <stdarg.h>; can't validate
11174 va_start's second argument, but can still work as intended. */
11175 warning_at (current_location,
11176 OPT_Wvarargs,
11177 "%<__builtin_next_arg%> called without an argument");
11178 return true;
11179 }
11180 else if (nargs > 1)
11181 {
11182 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11183 return true;
11184 }
11185 arg = CALL_EXPR_ARG (exp, 0);
11186 }
11187
11188 if (TREE_CODE (arg) == SSA_NAME)
11189 arg = SSA_NAME_VAR (arg);
11190
11191 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11192 or __builtin_next_arg (0) the first time we see it, after checking
11193 the arguments and if needed issuing a warning. */
11194 if (!integer_zerop (arg))
11195 {
11196 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11197
11198 /* Strip off all nops for the sake of the comparison. This
11199 is not quite the same as STRIP_NOPS. It does more.
11200 We must also strip off INDIRECT_EXPR for C++ reference
11201 parameters. */
11202 while (CONVERT_EXPR_P (arg)
11203 || TREE_CODE (arg) == INDIRECT_REF)
11204 arg = TREE_OPERAND (arg, 0);
11205 if (arg != last_parm)
11206 {
11207 /* FIXME: Sometimes with the tree optimizers we can get the
11208 not the last argument even though the user used the last
11209 argument. We just warn and set the arg to be the last
11210 argument so that we will get wrong-code because of
11211 it. */
11212 warning_at (current_location,
11213 OPT_Wvarargs,
11214 "second parameter of %<va_start%> not last named argument");
11215 }
11216
11217 /* Undefined by C99 7.15.1.4p4 (va_start):
11218 "If the parameter parmN is declared with the register storage
11219 class, with a function or array type, or with a type that is
11220 not compatible with the type that results after application of
11221 the default argument promotions, the behavior is undefined."
11222 */
11223 else if (DECL_REGISTER (arg))
11224 {
11225 warning_at (current_location,
11226 OPT_Wvarargs,
11227 "undefined behaviour when second parameter of "
11228 "%<va_start%> is declared with %<register%> storage");
11229 }
11230
11231 /* We want to verify the second parameter just once before the tree
11232 optimizers are run and then avoid keeping it in the tree,
11233 as otherwise we could warn even for correct code like:
11234 void foo (int i, ...)
11235 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11236 if (va_start_p)
11237 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11238 else
11239 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11240 }
11241 return false;
11242 }
11243
11244
11245 /* Expand a call EXP to __builtin_object_size. */
11246
11247 static rtx
11248 expand_builtin_object_size (tree exp)
11249 {
11250 tree ost;
11251 int object_size_type;
11252 tree fndecl = get_callee_fndecl (exp);
11253
11254 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11255 {
11256 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11257 exp, fndecl);
11258 expand_builtin_trap ();
11259 return const0_rtx;
11260 }
11261
11262 ost = CALL_EXPR_ARG (exp, 1);
11263 STRIP_NOPS (ost);
11264
11265 if (TREE_CODE (ost) != INTEGER_CST
11266 || tree_int_cst_sgn (ost) < 0
11267 || compare_tree_int (ost, 3) > 0)
11268 {
11269 error ("%Klast argument of %D is not integer constant between 0 and 3",
11270 exp, fndecl);
11271 expand_builtin_trap ();
11272 return const0_rtx;
11273 }
11274
11275 object_size_type = tree_to_shwi (ost);
11276
11277 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11278 }
11279
11280 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11281 FCODE is the BUILT_IN_* to use.
11282 Return NULL_RTX if we failed; the caller should emit a normal call,
11283 otherwise try to get the result in TARGET, if convenient (and in
11284 mode MODE if that's convenient). */
11285
11286 static rtx
11287 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11288 enum built_in_function fcode)
11289 {
11290 tree dest, src, len, size;
11291
11292 if (!validate_arglist (exp,
11293 POINTER_TYPE,
11294 fcode == BUILT_IN_MEMSET_CHK
11295 ? INTEGER_TYPE : POINTER_TYPE,
11296 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11297 return NULL_RTX;
11298
11299 dest = CALL_EXPR_ARG (exp, 0);
11300 src = CALL_EXPR_ARG (exp, 1);
11301 len = CALL_EXPR_ARG (exp, 2);
11302 size = CALL_EXPR_ARG (exp, 3);
11303
11304 if (! tree_fits_uhwi_p (size))
11305 return NULL_RTX;
11306
11307 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11308 {
11309 tree fn;
11310
11311 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11312 {
11313 warning_at (tree_nonartificial_location (exp),
11314 0, "%Kcall to %D will always overflow destination buffer",
11315 exp, get_callee_fndecl (exp));
11316 return NULL_RTX;
11317 }
11318
11319 fn = NULL_TREE;
11320 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11321 mem{cpy,pcpy,move,set} is available. */
11322 switch (fcode)
11323 {
11324 case BUILT_IN_MEMCPY_CHK:
11325 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11326 break;
11327 case BUILT_IN_MEMPCPY_CHK:
11328 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11329 break;
11330 case BUILT_IN_MEMMOVE_CHK:
11331 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11332 break;
11333 case BUILT_IN_MEMSET_CHK:
11334 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11335 break;
11336 default:
11337 break;
11338 }
11339
11340 if (! fn)
11341 return NULL_RTX;
11342
11343 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11344 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11345 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11346 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11347 }
11348 else if (fcode == BUILT_IN_MEMSET_CHK)
11349 return NULL_RTX;
11350 else
11351 {
11352 unsigned int dest_align = get_pointer_alignment (dest);
11353
11354 /* If DEST is not a pointer type, call the normal function. */
11355 if (dest_align == 0)
11356 return NULL_RTX;
11357
11358 /* If SRC and DEST are the same (and not volatile), do nothing. */
11359 if (operand_equal_p (src, dest, 0))
11360 {
11361 tree expr;
11362
11363 if (fcode != BUILT_IN_MEMPCPY_CHK)
11364 {
11365 /* Evaluate and ignore LEN in case it has side-effects. */
11366 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11367 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11368 }
11369
11370 expr = fold_build_pointer_plus (dest, len);
11371 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11372 }
11373
11374 /* __memmove_chk special case. */
11375 if (fcode == BUILT_IN_MEMMOVE_CHK)
11376 {
11377 unsigned int src_align = get_pointer_alignment (src);
11378
11379 if (src_align == 0)
11380 return NULL_RTX;
11381
11382 /* If src is categorized for a readonly section we can use
11383 normal __memcpy_chk. */
11384 if (readonly_data_expr (src))
11385 {
11386 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11387 if (!fn)
11388 return NULL_RTX;
11389 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11390 dest, src, len, size);
11391 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11392 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11393 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11394 }
11395 }
11396 return NULL_RTX;
11397 }
11398 }
11399
11400 /* Emit warning if a buffer overflow is detected at compile time. */
11401
11402 static void
11403 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11404 {
11405 int is_strlen = 0;
11406 tree len, size;
11407 location_t loc = tree_nonartificial_location (exp);
11408
11409 switch (fcode)
11410 {
11411 case BUILT_IN_STRCPY_CHK:
11412 case BUILT_IN_STPCPY_CHK:
11413 /* For __strcat_chk the warning will be emitted only if overflowing
11414 by at least strlen (dest) + 1 bytes. */
11415 case BUILT_IN_STRCAT_CHK:
11416 len = CALL_EXPR_ARG (exp, 1);
11417 size = CALL_EXPR_ARG (exp, 2);
11418 is_strlen = 1;
11419 break;
11420 case BUILT_IN_STRNCAT_CHK:
11421 case BUILT_IN_STRNCPY_CHK:
11422 case BUILT_IN_STPNCPY_CHK:
11423 len = CALL_EXPR_ARG (exp, 2);
11424 size = CALL_EXPR_ARG (exp, 3);
11425 break;
11426 case BUILT_IN_SNPRINTF_CHK:
11427 case BUILT_IN_VSNPRINTF_CHK:
11428 len = CALL_EXPR_ARG (exp, 1);
11429 size = CALL_EXPR_ARG (exp, 3);
11430 break;
11431 default:
11432 gcc_unreachable ();
11433 }
11434
11435 if (!len || !size)
11436 return;
11437
11438 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11439 return;
11440
11441 if (is_strlen)
11442 {
11443 len = c_strlen (len, 1);
11444 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11445 return;
11446 }
11447 else if (fcode == BUILT_IN_STRNCAT_CHK)
11448 {
11449 tree src = CALL_EXPR_ARG (exp, 1);
11450 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11451 return;
11452 src = c_strlen (src, 1);
11453 if (! src || ! tree_fits_uhwi_p (src))
11454 {
11455 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11456 exp, get_callee_fndecl (exp));
11457 return;
11458 }
11459 else if (tree_int_cst_lt (src, size))
11460 return;
11461 }
11462 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
11463 return;
11464
11465 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11466 exp, get_callee_fndecl (exp));
11467 }
11468
11469 /* Emit warning if a buffer overflow is detected at compile time
11470 in __sprintf_chk/__vsprintf_chk calls. */
11471
11472 static void
11473 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11474 {
11475 tree size, len, fmt;
11476 const char *fmt_str;
11477 int nargs = call_expr_nargs (exp);
11478
11479 /* Verify the required arguments in the original call. */
11480
11481 if (nargs < 4)
11482 return;
11483 size = CALL_EXPR_ARG (exp, 2);
11484 fmt = CALL_EXPR_ARG (exp, 3);
11485
11486 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11487 return;
11488
11489 /* Check whether the format is a literal string constant. */
11490 fmt_str = c_getstr (fmt);
11491 if (fmt_str == NULL)
11492 return;
11493
11494 if (!init_target_chars ())
11495 return;
11496
11497 /* If the format doesn't contain % args or %%, we know its size. */
11498 if (strchr (fmt_str, target_percent) == 0)
11499 len = build_int_cstu (size_type_node, strlen (fmt_str));
11500 /* If the format is "%s" and first ... argument is a string literal,
11501 we know it too. */
11502 else if (fcode == BUILT_IN_SPRINTF_CHK
11503 && strcmp (fmt_str, target_percent_s) == 0)
11504 {
11505 tree arg;
11506
11507 if (nargs < 5)
11508 return;
11509 arg = CALL_EXPR_ARG (exp, 4);
11510 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11511 return;
11512
11513 len = c_strlen (arg, 1);
11514 if (!len || ! tree_fits_uhwi_p (len))
11515 return;
11516 }
11517 else
11518 return;
11519
11520 if (! tree_int_cst_lt (len, size))
11521 warning_at (tree_nonartificial_location (exp),
11522 0, "%Kcall to %D will always overflow destination buffer",
11523 exp, get_callee_fndecl (exp));
11524 }
11525
11526 /* Emit warning if a free is called with address of a variable. */
11527
11528 static void
11529 maybe_emit_free_warning (tree exp)
11530 {
11531 tree arg = CALL_EXPR_ARG (exp, 0);
11532
11533 STRIP_NOPS (arg);
11534 if (TREE_CODE (arg) != ADDR_EXPR)
11535 return;
11536
11537 arg = get_base_address (TREE_OPERAND (arg, 0));
11538 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11539 return;
11540
11541 if (SSA_VAR_P (arg))
11542 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11543 "%Kattempt to free a non-heap object %qD", exp, arg);
11544 else
11545 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11546 "%Kattempt to free a non-heap object", exp);
11547 }
11548
11549 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11550 if possible. */
11551
11552 static tree
11553 fold_builtin_object_size (tree ptr, tree ost)
11554 {
11555 unsigned HOST_WIDE_INT bytes;
11556 int object_size_type;
11557
11558 if (!validate_arg (ptr, POINTER_TYPE)
11559 || !validate_arg (ost, INTEGER_TYPE))
11560 return NULL_TREE;
11561
11562 STRIP_NOPS (ost);
11563
11564 if (TREE_CODE (ost) != INTEGER_CST
11565 || tree_int_cst_sgn (ost) < 0
11566 || compare_tree_int (ost, 3) > 0)
11567 return NULL_TREE;
11568
11569 object_size_type = tree_to_shwi (ost);
11570
11571 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11572 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11573 and (size_t) 0 for types 2 and 3. */
11574 if (TREE_SIDE_EFFECTS (ptr))
11575 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11576
11577 if (TREE_CODE (ptr) == ADDR_EXPR)
11578 {
11579 bytes = compute_builtin_object_size (ptr, object_size_type);
11580 if (wi::fits_to_tree_p (bytes, size_type_node))
11581 return build_int_cstu (size_type_node, bytes);
11582 }
11583 else if (TREE_CODE (ptr) == SSA_NAME)
11584 {
11585 /* If object size is not known yet, delay folding until
11586 later. Maybe subsequent passes will help determining
11587 it. */
11588 bytes = compute_builtin_object_size (ptr, object_size_type);
11589 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11590 && wi::fits_to_tree_p (bytes, size_type_node))
11591 return build_int_cstu (size_type_node, bytes);
11592 }
11593
11594 return NULL_TREE;
11595 }
11596
11597 /* Builtins with folding operations that operate on "..." arguments
11598 need special handling; we need to store the arguments in a convenient
11599 data structure before attempting any folding. Fortunately there are
11600 only a few builtins that fall into this category. FNDECL is the
11601 function, EXP is the CALL_EXPR for the call. */
11602
11603 static tree
11604 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11605 {
11606 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11607 tree ret = NULL_TREE;
11608
11609 switch (fcode)
11610 {
11611 case BUILT_IN_FPCLASSIFY:
11612 ret = fold_builtin_fpclassify (loc, args, nargs);
11613 break;
11614
11615 default:
11616 break;
11617 }
11618 if (ret)
11619 {
11620 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11621 SET_EXPR_LOCATION (ret, loc);
11622 TREE_NO_WARNING (ret) = 1;
11623 return ret;
11624 }
11625 return NULL_TREE;
11626 }
11627
11628 /* Initialize format string characters in the target charset. */
11629
11630 bool
11631 init_target_chars (void)
11632 {
11633 static bool init;
11634 if (!init)
11635 {
11636 target_newline = lang_hooks.to_target_charset ('\n');
11637 target_percent = lang_hooks.to_target_charset ('%');
11638 target_c = lang_hooks.to_target_charset ('c');
11639 target_s = lang_hooks.to_target_charset ('s');
11640 if (target_newline == 0 || target_percent == 0 || target_c == 0
11641 || target_s == 0)
11642 return false;
11643
11644 target_percent_c[0] = target_percent;
11645 target_percent_c[1] = target_c;
11646 target_percent_c[2] = '\0';
11647
11648 target_percent_s[0] = target_percent;
11649 target_percent_s[1] = target_s;
11650 target_percent_s[2] = '\0';
11651
11652 target_percent_s_newline[0] = target_percent;
11653 target_percent_s_newline[1] = target_s;
11654 target_percent_s_newline[2] = target_newline;
11655 target_percent_s_newline[3] = '\0';
11656
11657 init = true;
11658 }
11659 return true;
11660 }
11661
11662 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11663 and no overflow/underflow occurred. INEXACT is true if M was not
11664 exactly calculated. TYPE is the tree type for the result. This
11665 function assumes that you cleared the MPFR flags and then
11666 calculated M to see if anything subsequently set a flag prior to
11667 entering this function. Return NULL_TREE if any checks fail. */
11668
11669 static tree
11670 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11671 {
11672 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11673 overflow/underflow occurred. If -frounding-math, proceed iff the
11674 result of calling FUNC was exact. */
11675 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11676 && (!flag_rounding_math || !inexact))
11677 {
11678 REAL_VALUE_TYPE rr;
11679
11680 real_from_mpfr (&rr, m, type, GMP_RNDN);
11681 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11682 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11683 but the mpft_t is not, then we underflowed in the
11684 conversion. */
11685 if (real_isfinite (&rr)
11686 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11687 {
11688 REAL_VALUE_TYPE rmode;
11689
11690 real_convert (&rmode, TYPE_MODE (type), &rr);
11691 /* Proceed iff the specified mode can hold the value. */
11692 if (real_identical (&rmode, &rr))
11693 return build_real (type, rmode);
11694 }
11695 }
11696 return NULL_TREE;
11697 }
11698
11699 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11700 number and no overflow/underflow occurred. INEXACT is true if M
11701 was not exactly calculated. TYPE is the tree type for the result.
11702 This function assumes that you cleared the MPFR flags and then
11703 calculated M to see if anything subsequently set a flag prior to
11704 entering this function. Return NULL_TREE if any checks fail, if
11705 FORCE_CONVERT is true, then bypass the checks. */
11706
11707 static tree
11708 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11709 {
11710 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11711 overflow/underflow occurred. If -frounding-math, proceed iff the
11712 result of calling FUNC was exact. */
11713 if (force_convert
11714 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11715 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11716 && (!flag_rounding_math || !inexact)))
11717 {
11718 REAL_VALUE_TYPE re, im;
11719
11720 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
11721 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
11722 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11723 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11724 but the mpft_t is not, then we underflowed in the
11725 conversion. */
11726 if (force_convert
11727 || (real_isfinite (&re) && real_isfinite (&im)
11728 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11729 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11730 {
11731 REAL_VALUE_TYPE re_mode, im_mode;
11732
11733 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11734 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11735 /* Proceed iff the specified mode can hold the value. */
11736 if (force_convert
11737 || (real_identical (&re_mode, &re)
11738 && real_identical (&im_mode, &im)))
11739 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11740 build_real (TREE_TYPE (type), im_mode));
11741 }
11742 }
11743 return NULL_TREE;
11744 }
11745
11746 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11747 FUNC on it and return the resulting value as a tree with type TYPE.
11748 If MIN and/or MAX are not NULL, then the supplied ARG must be
11749 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11750 acceptable values, otherwise they are not. The mpfr precision is
11751 set to the precision of TYPE. We assume that function FUNC returns
11752 zero if the result could be calculated exactly within the requested
11753 precision. */
11754
11755 static tree
11756 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
11757 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
11758 bool inclusive)
11759 {
11760 tree result = NULL_TREE;
11761
11762 STRIP_NOPS (arg);
11763
11764 /* To proceed, MPFR must exactly represent the target floating point
11765 format, which only happens when the target base equals two. */
11766 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11767 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
11768 {
11769 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11770
11771 if (real_isfinite (ra)
11772 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
11773 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
11774 {
11775 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11776 const int prec = fmt->p;
11777 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11778 int inexact;
11779 mpfr_t m;
11780
11781 mpfr_init2 (m, prec);
11782 mpfr_from_real (m, ra, GMP_RNDN);
11783 mpfr_clear_flags ();
11784 inexact = func (m, m, rnd);
11785 result = do_mpfr_ckconv (m, type, inexact);
11786 mpfr_clear (m);
11787 }
11788 }
11789
11790 return result;
11791 }
11792
11793 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
11794 FUNC on it and return the resulting value as a tree with type TYPE.
11795 The mpfr precision is set to the precision of TYPE. We assume that
11796 function FUNC returns zero if the result could be calculated
11797 exactly within the requested precision. */
11798
11799 static tree
11800 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
11801 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11802 {
11803 tree result = NULL_TREE;
11804
11805 STRIP_NOPS (arg1);
11806 STRIP_NOPS (arg2);
11807
11808 /* To proceed, MPFR must exactly represent the target floating point
11809 format, which only happens when the target base equals two. */
11810 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11811 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11812 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
11813 {
11814 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11815 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11816
11817 if (real_isfinite (ra1) && real_isfinite (ra2))
11818 {
11819 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11820 const int prec = fmt->p;
11821 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11822 int inexact;
11823 mpfr_t m1, m2;
11824
11825 mpfr_inits2 (prec, m1, m2, NULL);
11826 mpfr_from_real (m1, ra1, GMP_RNDN);
11827 mpfr_from_real (m2, ra2, GMP_RNDN);
11828 mpfr_clear_flags ();
11829 inexact = func (m1, m1, m2, rnd);
11830 result = do_mpfr_ckconv (m1, type, inexact);
11831 mpfr_clears (m1, m2, NULL);
11832 }
11833 }
11834
11835 return result;
11836 }
11837
11838 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
11839 FUNC on it and return the resulting value as a tree with type TYPE.
11840 The mpfr precision is set to the precision of TYPE. We assume that
11841 function FUNC returns zero if the result could be calculated
11842 exactly within the requested precision. */
11843
11844 static tree
11845 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
11846 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11847 {
11848 tree result = NULL_TREE;
11849
11850 STRIP_NOPS (arg1);
11851 STRIP_NOPS (arg2);
11852 STRIP_NOPS (arg3);
11853
11854 /* To proceed, MPFR must exactly represent the target floating point
11855 format, which only happens when the target base equals two. */
11856 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11857 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11858 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
11859 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
11860 {
11861 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11862 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11863 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
11864
11865 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
11866 {
11867 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11868 const int prec = fmt->p;
11869 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11870 int inexact;
11871 mpfr_t m1, m2, m3;
11872
11873 mpfr_inits2 (prec, m1, m2, m3, NULL);
11874 mpfr_from_real (m1, ra1, GMP_RNDN);
11875 mpfr_from_real (m2, ra2, GMP_RNDN);
11876 mpfr_from_real (m3, ra3, GMP_RNDN);
11877 mpfr_clear_flags ();
11878 inexact = func (m1, m1, m2, m3, rnd);
11879 result = do_mpfr_ckconv (m1, type, inexact);
11880 mpfr_clears (m1, m2, m3, NULL);
11881 }
11882 }
11883
11884 return result;
11885 }
11886
11887 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11888 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
11889 If ARG_SINP and ARG_COSP are NULL then the result is returned
11890 as a complex value.
11891 The type is taken from the type of ARG and is used for setting the
11892 precision of the calculation and results. */
11893
11894 static tree
11895 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
11896 {
11897 tree const type = TREE_TYPE (arg);
11898 tree result = NULL_TREE;
11899
11900 STRIP_NOPS (arg);
11901
11902 /* To proceed, MPFR must exactly represent the target floating point
11903 format, which only happens when the target base equals two. */
11904 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11905 && TREE_CODE (arg) == REAL_CST
11906 && !TREE_OVERFLOW (arg))
11907 {
11908 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11909
11910 if (real_isfinite (ra))
11911 {
11912 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11913 const int prec = fmt->p;
11914 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11915 tree result_s, result_c;
11916 int inexact;
11917 mpfr_t m, ms, mc;
11918
11919 mpfr_inits2 (prec, m, ms, mc, NULL);
11920 mpfr_from_real (m, ra, GMP_RNDN);
11921 mpfr_clear_flags ();
11922 inexact = mpfr_sin_cos (ms, mc, m, rnd);
11923 result_s = do_mpfr_ckconv (ms, type, inexact);
11924 result_c = do_mpfr_ckconv (mc, type, inexact);
11925 mpfr_clears (m, ms, mc, NULL);
11926 if (result_s && result_c)
11927 {
11928 /* If we are to return in a complex value do so. */
11929 if (!arg_sinp && !arg_cosp)
11930 return build_complex (build_complex_type (type),
11931 result_c, result_s);
11932
11933 /* Dereference the sin/cos pointer arguments. */
11934 arg_sinp = build_fold_indirect_ref (arg_sinp);
11935 arg_cosp = build_fold_indirect_ref (arg_cosp);
11936 /* Proceed if valid pointer type were passed in. */
11937 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
11938 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
11939 {
11940 /* Set the values. */
11941 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
11942 result_s);
11943 TREE_SIDE_EFFECTS (result_s) = 1;
11944 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
11945 result_c);
11946 TREE_SIDE_EFFECTS (result_c) = 1;
11947 /* Combine the assignments into a compound expr. */
11948 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11949 result_s, result_c));
11950 }
11951 }
11952 }
11953 }
11954 return result;
11955 }
11956
11957 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
11958 two-argument mpfr order N Bessel function FUNC on them and return
11959 the resulting value as a tree with type TYPE. The mpfr precision
11960 is set to the precision of TYPE. We assume that function FUNC
11961 returns zero if the result could be calculated exactly within the
11962 requested precision. */
11963 static tree
11964 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
11965 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
11966 const REAL_VALUE_TYPE *min, bool inclusive)
11967 {
11968 tree result = NULL_TREE;
11969
11970 STRIP_NOPS (arg1);
11971 STRIP_NOPS (arg2);
11972
11973 /* To proceed, MPFR must exactly represent the target floating point
11974 format, which only happens when the target base equals two. */
11975 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11976 && tree_fits_shwi_p (arg1)
11977 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
11978 {
11979 const HOST_WIDE_INT n = tree_to_shwi (arg1);
11980 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
11981
11982 if (n == (long)n
11983 && real_isfinite (ra)
11984 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
11985 {
11986 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11987 const int prec = fmt->p;
11988 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11989 int inexact;
11990 mpfr_t m;
11991
11992 mpfr_init2 (m, prec);
11993 mpfr_from_real (m, ra, GMP_RNDN);
11994 mpfr_clear_flags ();
11995 inexact = func (m, n, m, rnd);
11996 result = do_mpfr_ckconv (m, type, inexact);
11997 mpfr_clear (m);
11998 }
11999 }
12000
12001 return result;
12002 }
12003
12004 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12005 the pointer *(ARG_QUO) and return the result. The type is taken
12006 from the type of ARG0 and is used for setting the precision of the
12007 calculation and results. */
12008
12009 static tree
12010 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12011 {
12012 tree const type = TREE_TYPE (arg0);
12013 tree result = NULL_TREE;
12014
12015 STRIP_NOPS (arg0);
12016 STRIP_NOPS (arg1);
12017
12018 /* To proceed, MPFR must exactly represent the target floating point
12019 format, which only happens when the target base equals two. */
12020 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12021 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12022 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12023 {
12024 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12025 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12026
12027 if (real_isfinite (ra0) && real_isfinite (ra1))
12028 {
12029 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12030 const int prec = fmt->p;
12031 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12032 tree result_rem;
12033 long integer_quo;
12034 mpfr_t m0, m1;
12035
12036 mpfr_inits2 (prec, m0, m1, NULL);
12037 mpfr_from_real (m0, ra0, GMP_RNDN);
12038 mpfr_from_real (m1, ra1, GMP_RNDN);
12039 mpfr_clear_flags ();
12040 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12041 /* Remquo is independent of the rounding mode, so pass
12042 inexact=0 to do_mpfr_ckconv(). */
12043 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12044 mpfr_clears (m0, m1, NULL);
12045 if (result_rem)
12046 {
12047 /* MPFR calculates quo in the host's long so it may
12048 return more bits in quo than the target int can hold
12049 if sizeof(host long) > sizeof(target int). This can
12050 happen even for native compilers in LP64 mode. In
12051 these cases, modulo the quo value with the largest
12052 number that the target int can hold while leaving one
12053 bit for the sign. */
12054 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12055 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12056
12057 /* Dereference the quo pointer argument. */
12058 arg_quo = build_fold_indirect_ref (arg_quo);
12059 /* Proceed iff a valid pointer type was passed in. */
12060 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12061 {
12062 /* Set the value. */
12063 tree result_quo
12064 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12065 build_int_cst (TREE_TYPE (arg_quo),
12066 integer_quo));
12067 TREE_SIDE_EFFECTS (result_quo) = 1;
12068 /* Combine the quo assignment with the rem. */
12069 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12070 result_quo, result_rem));
12071 }
12072 }
12073 }
12074 }
12075 return result;
12076 }
12077
12078 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12079 resulting value as a tree with type TYPE. The mpfr precision is
12080 set to the precision of TYPE. We assume that this mpfr function
12081 returns zero if the result could be calculated exactly within the
12082 requested precision. In addition, the integer pointer represented
12083 by ARG_SG will be dereferenced and set to the appropriate signgam
12084 (-1,1) value. */
12085
12086 static tree
12087 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12088 {
12089 tree result = NULL_TREE;
12090
12091 STRIP_NOPS (arg);
12092
12093 /* To proceed, MPFR must exactly represent the target floating point
12094 format, which only happens when the target base equals two. Also
12095 verify ARG is a constant and that ARG_SG is an int pointer. */
12096 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12097 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12098 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12099 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12100 {
12101 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12102
12103 /* In addition to NaN and Inf, the argument cannot be zero or a
12104 negative integer. */
12105 if (real_isfinite (ra)
12106 && ra->cl != rvc_zero
12107 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12108 {
12109 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12110 const int prec = fmt->p;
12111 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12112 int inexact, sg;
12113 mpfr_t m;
12114 tree result_lg;
12115
12116 mpfr_init2 (m, prec);
12117 mpfr_from_real (m, ra, GMP_RNDN);
12118 mpfr_clear_flags ();
12119 inexact = mpfr_lgamma (m, &sg, m, rnd);
12120 result_lg = do_mpfr_ckconv (m, type, inexact);
12121 mpfr_clear (m);
12122 if (result_lg)
12123 {
12124 tree result_sg;
12125
12126 /* Dereference the arg_sg pointer argument. */
12127 arg_sg = build_fold_indirect_ref (arg_sg);
12128 /* Assign the signgam value into *arg_sg. */
12129 result_sg = fold_build2 (MODIFY_EXPR,
12130 TREE_TYPE (arg_sg), arg_sg,
12131 build_int_cst (TREE_TYPE (arg_sg), sg));
12132 TREE_SIDE_EFFECTS (result_sg) = 1;
12133 /* Combine the signgam assignment with the lgamma result. */
12134 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12135 result_sg, result_lg));
12136 }
12137 }
12138 }
12139
12140 return result;
12141 }
12142
12143 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12144 function FUNC on it and return the resulting value as a tree with
12145 type TYPE. The mpfr precision is set to the precision of TYPE. We
12146 assume that function FUNC returns zero if the result could be
12147 calculated exactly within the requested precision. */
12148
12149 static tree
12150 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12151 {
12152 tree result = NULL_TREE;
12153
12154 STRIP_NOPS (arg);
12155
12156 /* To proceed, MPFR must exactly represent the target floating point
12157 format, which only happens when the target base equals two. */
12158 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12159 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12160 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12161 {
12162 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12163 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12164
12165 if (real_isfinite (re) && real_isfinite (im))
12166 {
12167 const struct real_format *const fmt =
12168 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12169 const int prec = fmt->p;
12170 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12171 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12172 int inexact;
12173 mpc_t m;
12174
12175 mpc_init2 (m, prec);
12176 mpfr_from_real (mpc_realref (m), re, rnd);
12177 mpfr_from_real (mpc_imagref (m), im, rnd);
12178 mpfr_clear_flags ();
12179 inexact = func (m, m, crnd);
12180 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
12181 mpc_clear (m);
12182 }
12183 }
12184
12185 return result;
12186 }
12187
12188 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12189 mpc function FUNC on it and return the resulting value as a tree
12190 with type TYPE. The mpfr precision is set to the precision of
12191 TYPE. We assume that function FUNC returns zero if the result
12192 could be calculated exactly within the requested precision. If
12193 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12194 in the arguments and/or results. */
12195
12196 tree
12197 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12198 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12199 {
12200 tree result = NULL_TREE;
12201
12202 STRIP_NOPS (arg0);
12203 STRIP_NOPS (arg1);
12204
12205 /* To proceed, MPFR must exactly represent the target floating point
12206 format, which only happens when the target base equals two. */
12207 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12208 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12209 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12210 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12211 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12212 {
12213 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12214 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12215 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12216 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12217
12218 if (do_nonfinite
12219 || (real_isfinite (re0) && real_isfinite (im0)
12220 && real_isfinite (re1) && real_isfinite (im1)))
12221 {
12222 const struct real_format *const fmt =
12223 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12224 const int prec = fmt->p;
12225 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12226 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12227 int inexact;
12228 mpc_t m0, m1;
12229
12230 mpc_init2 (m0, prec);
12231 mpc_init2 (m1, prec);
12232 mpfr_from_real (mpc_realref (m0), re0, rnd);
12233 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12234 mpfr_from_real (mpc_realref (m1), re1, rnd);
12235 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12236 mpfr_clear_flags ();
12237 inexact = func (m0, m0, m1, crnd);
12238 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12239 mpc_clear (m0);
12240 mpc_clear (m1);
12241 }
12242 }
12243
12244 return result;
12245 }
12246
12247 /* A wrapper function for builtin folding that prevents warnings for
12248 "statement without effect" and the like, caused by removing the
12249 call node earlier than the warning is generated. */
12250
12251 tree
12252 fold_call_stmt (gcall *stmt, bool ignore)
12253 {
12254 tree ret = NULL_TREE;
12255 tree fndecl = gimple_call_fndecl (stmt);
12256 location_t loc = gimple_location (stmt);
12257 if (fndecl
12258 && TREE_CODE (fndecl) == FUNCTION_DECL
12259 && DECL_BUILT_IN (fndecl)
12260 && !gimple_call_va_arg_pack_p (stmt))
12261 {
12262 int nargs = gimple_call_num_args (stmt);
12263 tree *args = (nargs > 0
12264 ? gimple_call_arg_ptr (stmt, 0)
12265 : &error_mark_node);
12266
12267 if (avoid_folding_inline_builtin (fndecl))
12268 return NULL_TREE;
12269 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12270 {
12271 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12272 }
12273 else
12274 {
12275 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
12276 if (ret)
12277 {
12278 /* Propagate location information from original call to
12279 expansion of builtin. Otherwise things like
12280 maybe_emit_chk_warning, that operate on the expansion
12281 of a builtin, will use the wrong location information. */
12282 if (gimple_has_location (stmt))
12283 {
12284 tree realret = ret;
12285 if (TREE_CODE (ret) == NOP_EXPR)
12286 realret = TREE_OPERAND (ret, 0);
12287 if (CAN_HAVE_LOCATION_P (realret)
12288 && !EXPR_HAS_LOCATION (realret))
12289 SET_EXPR_LOCATION (realret, loc);
12290 return realret;
12291 }
12292 return ret;
12293 }
12294 }
12295 }
12296 return NULL_TREE;
12297 }
12298
12299 /* Look up the function in builtin_decl that corresponds to DECL
12300 and set ASMSPEC as its user assembler name. DECL must be a
12301 function decl that declares a builtin. */
12302
12303 void
12304 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12305 {
12306 tree builtin;
12307 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12308 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12309 && asmspec != 0);
12310
12311 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12312 set_user_assembler_name (builtin, asmspec);
12313 switch (DECL_FUNCTION_CODE (decl))
12314 {
12315 case BUILT_IN_MEMCPY:
12316 init_block_move_fn (asmspec);
12317 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12318 break;
12319 case BUILT_IN_MEMSET:
12320 init_block_clear_fn (asmspec);
12321 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12322 break;
12323 case BUILT_IN_MEMMOVE:
12324 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12325 break;
12326 case BUILT_IN_MEMCMP:
12327 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12328 break;
12329 case BUILT_IN_ABORT:
12330 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12331 break;
12332 case BUILT_IN_FFS:
12333 if (INT_TYPE_SIZE < BITS_PER_WORD)
12334 {
12335 set_user_assembler_libfunc ("ffs", asmspec);
12336 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12337 MODE_INT, 0), "ffs");
12338 }
12339 break;
12340 default:
12341 break;
12342 }
12343 }
12344
12345 /* Return true if DECL is a builtin that expands to a constant or similarly
12346 simple code. */
12347 bool
12348 is_simple_builtin (tree decl)
12349 {
12350 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12351 switch (DECL_FUNCTION_CODE (decl))
12352 {
12353 /* Builtins that expand to constants. */
12354 case BUILT_IN_CONSTANT_P:
12355 case BUILT_IN_EXPECT:
12356 case BUILT_IN_OBJECT_SIZE:
12357 case BUILT_IN_UNREACHABLE:
12358 /* Simple register moves or loads from stack. */
12359 case BUILT_IN_ASSUME_ALIGNED:
12360 case BUILT_IN_RETURN_ADDRESS:
12361 case BUILT_IN_EXTRACT_RETURN_ADDR:
12362 case BUILT_IN_FROB_RETURN_ADDR:
12363 case BUILT_IN_RETURN:
12364 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12365 case BUILT_IN_FRAME_ADDRESS:
12366 case BUILT_IN_VA_END:
12367 case BUILT_IN_STACK_SAVE:
12368 case BUILT_IN_STACK_RESTORE:
12369 /* Exception state returns or moves registers around. */
12370 case BUILT_IN_EH_FILTER:
12371 case BUILT_IN_EH_POINTER:
12372 case BUILT_IN_EH_COPY_VALUES:
12373 return true;
12374
12375 default:
12376 return false;
12377 }
12378
12379 return false;
12380 }
12381
12382 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12383 most probably expanded inline into reasonably simple code. This is a
12384 superset of is_simple_builtin. */
12385 bool
12386 is_inexpensive_builtin (tree decl)
12387 {
12388 if (!decl)
12389 return false;
12390 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12391 return true;
12392 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12393 switch (DECL_FUNCTION_CODE (decl))
12394 {
12395 case BUILT_IN_ABS:
12396 case BUILT_IN_ALLOCA:
12397 case BUILT_IN_ALLOCA_WITH_ALIGN:
12398 case BUILT_IN_BSWAP16:
12399 case BUILT_IN_BSWAP32:
12400 case BUILT_IN_BSWAP64:
12401 case BUILT_IN_CLZ:
12402 case BUILT_IN_CLZIMAX:
12403 case BUILT_IN_CLZL:
12404 case BUILT_IN_CLZLL:
12405 case BUILT_IN_CTZ:
12406 case BUILT_IN_CTZIMAX:
12407 case BUILT_IN_CTZL:
12408 case BUILT_IN_CTZLL:
12409 case BUILT_IN_FFS:
12410 case BUILT_IN_FFSIMAX:
12411 case BUILT_IN_FFSL:
12412 case BUILT_IN_FFSLL:
12413 case BUILT_IN_IMAXABS:
12414 case BUILT_IN_FINITE:
12415 case BUILT_IN_FINITEF:
12416 case BUILT_IN_FINITEL:
12417 case BUILT_IN_FINITED32:
12418 case BUILT_IN_FINITED64:
12419 case BUILT_IN_FINITED128:
12420 case BUILT_IN_FPCLASSIFY:
12421 case BUILT_IN_ISFINITE:
12422 case BUILT_IN_ISINF_SIGN:
12423 case BUILT_IN_ISINF:
12424 case BUILT_IN_ISINFF:
12425 case BUILT_IN_ISINFL:
12426 case BUILT_IN_ISINFD32:
12427 case BUILT_IN_ISINFD64:
12428 case BUILT_IN_ISINFD128:
12429 case BUILT_IN_ISNAN:
12430 case BUILT_IN_ISNANF:
12431 case BUILT_IN_ISNANL:
12432 case BUILT_IN_ISNAND32:
12433 case BUILT_IN_ISNAND64:
12434 case BUILT_IN_ISNAND128:
12435 case BUILT_IN_ISNORMAL:
12436 case BUILT_IN_ISGREATER:
12437 case BUILT_IN_ISGREATEREQUAL:
12438 case BUILT_IN_ISLESS:
12439 case BUILT_IN_ISLESSEQUAL:
12440 case BUILT_IN_ISLESSGREATER:
12441 case BUILT_IN_ISUNORDERED:
12442 case BUILT_IN_VA_ARG_PACK:
12443 case BUILT_IN_VA_ARG_PACK_LEN:
12444 case BUILT_IN_VA_COPY:
12445 case BUILT_IN_TRAP:
12446 case BUILT_IN_SAVEREGS:
12447 case BUILT_IN_POPCOUNTL:
12448 case BUILT_IN_POPCOUNTLL:
12449 case BUILT_IN_POPCOUNTIMAX:
12450 case BUILT_IN_POPCOUNT:
12451 case BUILT_IN_PARITYL:
12452 case BUILT_IN_PARITYLL:
12453 case BUILT_IN_PARITYIMAX:
12454 case BUILT_IN_PARITY:
12455 case BUILT_IN_LABS:
12456 case BUILT_IN_LLABS:
12457 case BUILT_IN_PREFETCH:
12458 return true;
12459
12460 default:
12461 return is_simple_builtin (decl);
12462 }
12463
12464 return false;
12465 }