genattrtab.c (write_header): Include hash-set.h...
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "hash-set.h"
27 #include "machmode.h"
28 #include "vec.h"
29 #include "double-int.h"
30 #include "input.h"
31 #include "alias.h"
32 #include "symtab.h"
33 #include "wide-int.h"
34 #include "inchash.h"
35 #include "tree.h"
36 #include "fold-const.h"
37 #include "stringpool.h"
38 #include "stor-layout.h"
39 #include "calls.h"
40 #include "varasm.h"
41 #include "tree-object-size.h"
42 #include "realmpfr.h"
43 #include "predict.h"
44 #include "vec.h"
45 #include "hashtab.h"
46 #include "hash-set.h"
47 #include "hard-reg-set.h"
48 #include "input.h"
49 #include "function.h"
50 #include "cfgrtl.h"
51 #include "basic-block.h"
52 #include "tree-ssa-alias.h"
53 #include "internal-fn.h"
54 #include "gimple-expr.h"
55 #include "is-a.h"
56 #include "gimple.h"
57 #include "flags.h"
58 #include "regs.h"
59 #include "except.h"
60 #include "insn-config.h"
61 #include "expr.h"
62 #include "insn-codes.h"
63 #include "optabs.h"
64 #include "libfuncs.h"
65 #include "recog.h"
66 #include "output.h"
67 #include "typeclass.h"
68 #include "tm_p.h"
69 #include "target.h"
70 #include "langhooks.h"
71 #include "tree-ssanames.h"
72 #include "tree-dfa.h"
73 #include "value-prof.h"
74 #include "diagnostic-core.h"
75 #include "builtins.h"
76 #include "asan.h"
77 #include "cilk.h"
78 #include "ipa-ref.h"
79 #include "lto-streamer.h"
80 #include "cgraph.h"
81 #include "tree-chkp.h"
82 #include "rtl-chkp.h"
83
84
85 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
86
87 struct target_builtins default_target_builtins;
88 #if SWITCHABLE_TARGET
89 struct target_builtins *this_target_builtins = &default_target_builtins;
90 #endif
91
92 /* Define the names of the builtin function types and codes. */
93 const char *const built_in_class_names[BUILT_IN_LAST]
94 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
95
96 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
97 const char * built_in_names[(int) END_BUILTINS] =
98 {
99 #include "builtins.def"
100 };
101 #undef DEF_BUILTIN
102
103 /* Setup an array of _DECL trees, make sure each element is
104 initialized to NULL_TREE. */
105 builtin_info_type builtin_info;
106
107 /* Non-zero if __builtin_constant_p should be folded right away. */
108 bool force_folding_builtin_constant_p;
109
110 static rtx c_readstr (const char *, machine_mode);
111 static int target_char_cast (tree, char *);
112 static rtx get_memory_rtx (tree, tree);
113 static int apply_args_size (void);
114 static int apply_result_size (void);
115 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
116 static rtx result_vector (int, rtx);
117 #endif
118 static void expand_builtin_update_setjmp_buf (rtx);
119 static void expand_builtin_prefetch (tree);
120 static rtx expand_builtin_apply_args (void);
121 static rtx expand_builtin_apply_args_1 (void);
122 static rtx expand_builtin_apply (rtx, rtx, rtx);
123 static void expand_builtin_return (rtx);
124 static enum type_class type_to_class (tree);
125 static rtx expand_builtin_classify_type (tree);
126 static void expand_errno_check (tree, rtx);
127 static rtx expand_builtin_mathfn (tree, rtx, rtx);
128 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
129 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
130 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
131 static rtx expand_builtin_interclass_mathfn (tree, rtx);
132 static rtx expand_builtin_sincos (tree);
133 static rtx expand_builtin_cexpi (tree, rtx);
134 static rtx expand_builtin_int_roundingfn (tree, rtx);
135 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
136 static rtx expand_builtin_next_arg (void);
137 static rtx expand_builtin_va_start (tree);
138 static rtx expand_builtin_va_end (tree);
139 static rtx expand_builtin_va_copy (tree);
140 static rtx expand_builtin_memcmp (tree, rtx, machine_mode);
141 static rtx expand_builtin_strcmp (tree, rtx);
142 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
143 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
144 static rtx expand_builtin_memcpy (tree, rtx);
145 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
146 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
147 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
148 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
149 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
150 machine_mode, int, tree);
151 static rtx expand_builtin_strcpy (tree, rtx);
152 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
153 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
154 static rtx expand_builtin_strncpy (tree, rtx);
155 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
156 static rtx expand_builtin_memset (tree, rtx, machine_mode);
157 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
158 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
159 static rtx expand_builtin_bzero (tree);
160 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
161 static rtx expand_builtin_alloca (tree, bool);
162 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
163 static rtx expand_builtin_frame_address (tree, tree);
164 static tree stabilize_va_list_loc (location_t, tree, int);
165 static rtx expand_builtin_expect (tree, rtx);
166 static tree fold_builtin_constant_p (tree);
167 static tree fold_builtin_classify_type (tree);
168 static tree fold_builtin_strlen (location_t, tree, tree);
169 static tree fold_builtin_inf (location_t, tree, int);
170 static tree fold_builtin_nan (tree, tree, int);
171 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
172 static bool validate_arg (const_tree, enum tree_code code);
173 static bool integer_valued_real_p (tree);
174 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
175 static rtx expand_builtin_fabs (tree, rtx, rtx);
176 static rtx expand_builtin_signbit (tree, rtx);
177 static tree fold_builtin_sqrt (location_t, tree, tree);
178 static tree fold_builtin_cbrt (location_t, tree, tree);
179 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
180 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
181 static tree fold_builtin_cos (location_t, tree, tree, tree);
182 static tree fold_builtin_cosh (location_t, tree, tree, tree);
183 static tree fold_builtin_tan (tree, tree);
184 static tree fold_builtin_trunc (location_t, tree, tree);
185 static tree fold_builtin_floor (location_t, tree, tree);
186 static tree fold_builtin_ceil (location_t, tree, tree);
187 static tree fold_builtin_round (location_t, tree, tree);
188 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
189 static tree fold_builtin_bitop (tree, tree);
190 static tree fold_builtin_strchr (location_t, tree, tree, tree);
191 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
192 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
193 static tree fold_builtin_strcmp (location_t, tree, tree);
194 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
195 static tree fold_builtin_signbit (location_t, tree, tree);
196 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
197 static tree fold_builtin_isascii (location_t, tree);
198 static tree fold_builtin_toascii (location_t, tree);
199 static tree fold_builtin_isdigit (location_t, tree);
200 static tree fold_builtin_fabs (location_t, tree, tree);
201 static tree fold_builtin_abs (location_t, tree, tree);
202 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
203 enum tree_code);
204 static tree fold_builtin_0 (location_t, tree);
205 static tree fold_builtin_1 (location_t, tree, tree);
206 static tree fold_builtin_2 (location_t, tree, tree, tree);
207 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
208 static tree fold_builtin_varargs (location_t, tree, tree*, int);
209
210 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
211 static tree fold_builtin_strstr (location_t, tree, tree, tree);
212 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
213 static tree fold_builtin_strspn (location_t, tree, tree);
214 static tree fold_builtin_strcspn (location_t, tree, tree);
215
216 static rtx expand_builtin_object_size (tree);
217 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
218 enum built_in_function);
219 static void maybe_emit_chk_warning (tree, enum built_in_function);
220 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
221 static void maybe_emit_free_warning (tree);
222 static tree fold_builtin_object_size (tree, tree);
223
224 unsigned HOST_WIDE_INT target_newline;
225 unsigned HOST_WIDE_INT target_percent;
226 static unsigned HOST_WIDE_INT target_c;
227 static unsigned HOST_WIDE_INT target_s;
228 char target_percent_c[3];
229 char target_percent_s[3];
230 char target_percent_s_newline[4];
231 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
232 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
233 static tree do_mpfr_arg2 (tree, tree, tree,
234 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
235 static tree do_mpfr_arg3 (tree, tree, tree, tree,
236 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
237 static tree do_mpfr_sincos (tree, tree, tree);
238 static tree do_mpfr_bessel_n (tree, tree, tree,
239 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
240 const REAL_VALUE_TYPE *, bool);
241 static tree do_mpfr_remquo (tree, tree, tree);
242 static tree do_mpfr_lgamma_r (tree, tree, tree);
243 static void expand_builtin_sync_synchronize (void);
244
245 /* Return true if NAME starts with __builtin_ or __sync_. */
246
247 static bool
248 is_builtin_name (const char *name)
249 {
250 if (strncmp (name, "__builtin_", 10) == 0)
251 return true;
252 if (strncmp (name, "__sync_", 7) == 0)
253 return true;
254 if (strncmp (name, "__atomic_", 9) == 0)
255 return true;
256 if (flag_cilkplus
257 && (!strcmp (name, "__cilkrts_detach")
258 || !strcmp (name, "__cilkrts_pop_frame")))
259 return true;
260 return false;
261 }
262
263
264 /* Return true if DECL is a function symbol representing a built-in. */
265
266 bool
267 is_builtin_fn (tree decl)
268 {
269 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
270 }
271
272 /* Return true if NODE should be considered for inline expansion regardless
273 of the optimization level. This means whenever a function is invoked with
274 its "internal" name, which normally contains the prefix "__builtin". */
275
276 static bool
277 called_as_built_in (tree node)
278 {
279 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
280 we want the name used to call the function, not the name it
281 will have. */
282 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
283 return is_builtin_name (name);
284 }
285
286 /* Compute values M and N such that M divides (address of EXP - N) and such
287 that N < M. If these numbers can be determined, store M in alignp and N in
288 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
289 *alignp and any bit-offset to *bitposp.
290
291 Note that the address (and thus the alignment) computed here is based
292 on the address to which a symbol resolves, whereas DECL_ALIGN is based
293 on the address at which an object is actually located. These two
294 addresses are not always the same. For example, on ARM targets,
295 the address &foo of a Thumb function foo() has the lowest bit set,
296 whereas foo() itself starts on an even address.
297
298 If ADDR_P is true we are taking the address of the memory reference EXP
299 and thus cannot rely on the access taking place. */
300
301 static bool
302 get_object_alignment_2 (tree exp, unsigned int *alignp,
303 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
304 {
305 HOST_WIDE_INT bitsize, bitpos;
306 tree offset;
307 machine_mode mode;
308 int unsignedp, volatilep;
309 unsigned int align = BITS_PER_UNIT;
310 bool known_alignment = false;
311
312 /* Get the innermost object and the constant (bitpos) and possibly
313 variable (offset) offset of the access. */
314 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
315 &mode, &unsignedp, &volatilep, true);
316
317 /* Extract alignment information from the innermost object and
318 possibly adjust bitpos and offset. */
319 if (TREE_CODE (exp) == FUNCTION_DECL)
320 {
321 /* Function addresses can encode extra information besides their
322 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
323 allows the low bit to be used as a virtual bit, we know
324 that the address itself must be at least 2-byte aligned. */
325 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
326 align = 2 * BITS_PER_UNIT;
327 }
328 else if (TREE_CODE (exp) == LABEL_DECL)
329 ;
330 else if (TREE_CODE (exp) == CONST_DECL)
331 {
332 /* The alignment of a CONST_DECL is determined by its initializer. */
333 exp = DECL_INITIAL (exp);
334 align = TYPE_ALIGN (TREE_TYPE (exp));
335 #ifdef CONSTANT_ALIGNMENT
336 if (CONSTANT_CLASS_P (exp))
337 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
338 #endif
339 known_alignment = true;
340 }
341 else if (DECL_P (exp))
342 {
343 align = DECL_ALIGN (exp);
344 known_alignment = true;
345 }
346 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
347 {
348 align = TYPE_ALIGN (TREE_TYPE (exp));
349 }
350 else if (TREE_CODE (exp) == INDIRECT_REF
351 || TREE_CODE (exp) == MEM_REF
352 || TREE_CODE (exp) == TARGET_MEM_REF)
353 {
354 tree addr = TREE_OPERAND (exp, 0);
355 unsigned ptr_align;
356 unsigned HOST_WIDE_INT ptr_bitpos;
357
358 if (TREE_CODE (addr) == BIT_AND_EXPR
359 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
360 {
361 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
362 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
363 align *= BITS_PER_UNIT;
364 addr = TREE_OPERAND (addr, 0);
365 }
366
367 known_alignment
368 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
369 align = MAX (ptr_align, align);
370
371 /* The alignment of the pointer operand in a TARGET_MEM_REF
372 has to take the variable offset parts into account. */
373 if (TREE_CODE (exp) == TARGET_MEM_REF)
374 {
375 if (TMR_INDEX (exp))
376 {
377 unsigned HOST_WIDE_INT step = 1;
378 if (TMR_STEP (exp))
379 step = TREE_INT_CST_LOW (TMR_STEP (exp));
380 align = MIN (align, (step & -step) * BITS_PER_UNIT);
381 }
382 if (TMR_INDEX2 (exp))
383 align = BITS_PER_UNIT;
384 known_alignment = false;
385 }
386
387 /* When EXP is an actual memory reference then we can use
388 TYPE_ALIGN of a pointer indirection to derive alignment.
389 Do so only if get_pointer_alignment_1 did not reveal absolute
390 alignment knowledge and if using that alignment would
391 improve the situation. */
392 if (!addr_p && !known_alignment
393 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
394 align = TYPE_ALIGN (TREE_TYPE (exp));
395 else
396 {
397 /* Else adjust bitpos accordingly. */
398 bitpos += ptr_bitpos;
399 if (TREE_CODE (exp) == MEM_REF
400 || TREE_CODE (exp) == TARGET_MEM_REF)
401 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
402 }
403 }
404 else if (TREE_CODE (exp) == STRING_CST)
405 {
406 /* STRING_CST are the only constant objects we allow to be not
407 wrapped inside a CONST_DECL. */
408 align = TYPE_ALIGN (TREE_TYPE (exp));
409 #ifdef CONSTANT_ALIGNMENT
410 if (CONSTANT_CLASS_P (exp))
411 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
412 #endif
413 known_alignment = true;
414 }
415
416 /* If there is a non-constant offset part extract the maximum
417 alignment that can prevail. */
418 if (offset)
419 {
420 unsigned int trailing_zeros = tree_ctz (offset);
421 if (trailing_zeros < HOST_BITS_PER_INT)
422 {
423 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
424 if (inner)
425 align = MIN (align, inner);
426 }
427 }
428
429 *alignp = align;
430 *bitposp = bitpos & (*alignp - 1);
431 return known_alignment;
432 }
433
434 /* For a memory reference expression EXP compute values M and N such that M
435 divides (&EXP - N) and such that N < M. If these numbers can be determined,
436 store M in alignp and N in *BITPOSP and return true. Otherwise return false
437 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
438
439 bool
440 get_object_alignment_1 (tree exp, unsigned int *alignp,
441 unsigned HOST_WIDE_INT *bitposp)
442 {
443 return get_object_alignment_2 (exp, alignp, bitposp, false);
444 }
445
446 /* Return the alignment in bits of EXP, an object. */
447
448 unsigned int
449 get_object_alignment (tree exp)
450 {
451 unsigned HOST_WIDE_INT bitpos = 0;
452 unsigned int align;
453
454 get_object_alignment_1 (exp, &align, &bitpos);
455
456 /* align and bitpos now specify known low bits of the pointer.
457 ptr & (align - 1) == bitpos. */
458
459 if (bitpos != 0)
460 align = (bitpos & -bitpos);
461 return align;
462 }
463
464 /* For a pointer valued expression EXP compute values M and N such that M
465 divides (EXP - N) and such that N < M. If these numbers can be determined,
466 store M in alignp and N in *BITPOSP and return true. Return false if
467 the results are just a conservative approximation.
468
469 If EXP is not a pointer, false is returned too. */
470
471 bool
472 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
473 unsigned HOST_WIDE_INT *bitposp)
474 {
475 STRIP_NOPS (exp);
476
477 if (TREE_CODE (exp) == ADDR_EXPR)
478 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
479 alignp, bitposp, true);
480 else if (TREE_CODE (exp) == SSA_NAME
481 && POINTER_TYPE_P (TREE_TYPE (exp)))
482 {
483 unsigned int ptr_align, ptr_misalign;
484 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
485
486 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
487 {
488 *bitposp = ptr_misalign * BITS_PER_UNIT;
489 *alignp = ptr_align * BITS_PER_UNIT;
490 /* We cannot really tell whether this result is an approximation. */
491 return true;
492 }
493 else
494 {
495 *bitposp = 0;
496 *alignp = BITS_PER_UNIT;
497 return false;
498 }
499 }
500 else if (TREE_CODE (exp) == INTEGER_CST)
501 {
502 *alignp = BIGGEST_ALIGNMENT;
503 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
504 & (BIGGEST_ALIGNMENT - 1));
505 return true;
506 }
507
508 *bitposp = 0;
509 *alignp = BITS_PER_UNIT;
510 return false;
511 }
512
513 /* Return the alignment in bits of EXP, a pointer valued expression.
514 The alignment returned is, by default, the alignment of the thing that
515 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
516
517 Otherwise, look at the expression to see if we can do better, i.e., if the
518 expression is actually pointing at an object whose alignment is tighter. */
519
520 unsigned int
521 get_pointer_alignment (tree exp)
522 {
523 unsigned HOST_WIDE_INT bitpos = 0;
524 unsigned int align;
525
526 get_pointer_alignment_1 (exp, &align, &bitpos);
527
528 /* align and bitpos now specify known low bits of the pointer.
529 ptr & (align - 1) == bitpos. */
530
531 if (bitpos != 0)
532 align = (bitpos & -bitpos);
533
534 return align;
535 }
536
537 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
538 way, because it could contain a zero byte in the middle.
539 TREE_STRING_LENGTH is the size of the character array, not the string.
540
541 ONLY_VALUE should be nonzero if the result is not going to be emitted
542 into the instruction stream and zero if it is going to be expanded.
543 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
544 is returned, otherwise NULL, since
545 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
546 evaluate the side-effects.
547
548 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
549 accesses. Note that this implies the result is not going to be emitted
550 into the instruction stream.
551
552 The value returned is of type `ssizetype'.
553
554 Unfortunately, string_constant can't access the values of const char
555 arrays with initializers, so neither can we do so here. */
556
557 tree
558 c_strlen (tree src, int only_value)
559 {
560 tree offset_node;
561 HOST_WIDE_INT offset;
562 int max;
563 const char *ptr;
564 location_t loc;
565
566 STRIP_NOPS (src);
567 if (TREE_CODE (src) == COND_EXPR
568 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
569 {
570 tree len1, len2;
571
572 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
573 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
574 if (tree_int_cst_equal (len1, len2))
575 return len1;
576 }
577
578 if (TREE_CODE (src) == COMPOUND_EXPR
579 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
580 return c_strlen (TREE_OPERAND (src, 1), only_value);
581
582 loc = EXPR_LOC_OR_LOC (src, input_location);
583
584 src = string_constant (src, &offset_node);
585 if (src == 0)
586 return NULL_TREE;
587
588 max = TREE_STRING_LENGTH (src) - 1;
589 ptr = TREE_STRING_POINTER (src);
590
591 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
592 {
593 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
594 compute the offset to the following null if we don't know where to
595 start searching for it. */
596 int i;
597
598 for (i = 0; i < max; i++)
599 if (ptr[i] == 0)
600 return NULL_TREE;
601
602 /* We don't know the starting offset, but we do know that the string
603 has no internal zero bytes. We can assume that the offset falls
604 within the bounds of the string; otherwise, the programmer deserves
605 what he gets. Subtract the offset from the length of the string,
606 and return that. This would perhaps not be valid if we were dealing
607 with named arrays in addition to literal string constants. */
608
609 return size_diffop_loc (loc, size_int (max), offset_node);
610 }
611
612 /* We have a known offset into the string. Start searching there for
613 a null character if we can represent it as a single HOST_WIDE_INT. */
614 if (offset_node == 0)
615 offset = 0;
616 else if (! tree_fits_shwi_p (offset_node))
617 offset = -1;
618 else
619 offset = tree_to_shwi (offset_node);
620
621 /* If the offset is known to be out of bounds, warn, and call strlen at
622 runtime. */
623 if (offset < 0 || offset > max)
624 {
625 /* Suppress multiple warnings for propagated constant strings. */
626 if (only_value != 2
627 && !TREE_NO_WARNING (src))
628 {
629 warning_at (loc, 0, "offset outside bounds of constant string");
630 TREE_NO_WARNING (src) = 1;
631 }
632 return NULL_TREE;
633 }
634
635 /* Use strlen to search for the first zero byte. Since any strings
636 constructed with build_string will have nulls appended, we win even
637 if we get handed something like (char[4])"abcd".
638
639 Since OFFSET is our starting index into the string, no further
640 calculation is needed. */
641 return ssize_int (strlen (ptr + offset));
642 }
643
644 /* Return a char pointer for a C string if it is a string constant
645 or sum of string constant and integer constant. */
646
647 const char *
648 c_getstr (tree src)
649 {
650 tree offset_node;
651
652 src = string_constant (src, &offset_node);
653 if (src == 0)
654 return 0;
655
656 if (offset_node == 0)
657 return TREE_STRING_POINTER (src);
658 else if (!tree_fits_uhwi_p (offset_node)
659 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
660 return 0;
661
662 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
663 }
664
665 /* Return a constant integer corresponding to target reading
666 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
667
668 static rtx
669 c_readstr (const char *str, machine_mode mode)
670 {
671 HOST_WIDE_INT ch;
672 unsigned int i, j;
673 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
674
675 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
676 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
677 / HOST_BITS_PER_WIDE_INT;
678
679 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
680 for (i = 0; i < len; i++)
681 tmp[i] = 0;
682
683 ch = 1;
684 for (i = 0; i < GET_MODE_SIZE (mode); i++)
685 {
686 j = i;
687 if (WORDS_BIG_ENDIAN)
688 j = GET_MODE_SIZE (mode) - i - 1;
689 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
690 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
691 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
692 j *= BITS_PER_UNIT;
693
694 if (ch)
695 ch = (unsigned char) str[i];
696 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
697 }
698
699 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
700 return immed_wide_int_const (c, mode);
701 }
702
703 /* Cast a target constant CST to target CHAR and if that value fits into
704 host char type, return zero and put that value into variable pointed to by
705 P. */
706
707 static int
708 target_char_cast (tree cst, char *p)
709 {
710 unsigned HOST_WIDE_INT val, hostval;
711
712 if (TREE_CODE (cst) != INTEGER_CST
713 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
714 return 1;
715
716 /* Do not care if it fits or not right here. */
717 val = TREE_INT_CST_LOW (cst);
718
719 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
720 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
721
722 hostval = val;
723 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
724 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
725
726 if (val != hostval)
727 return 1;
728
729 *p = hostval;
730 return 0;
731 }
732
733 /* Similar to save_expr, but assumes that arbitrary code is not executed
734 in between the multiple evaluations. In particular, we assume that a
735 non-addressable local variable will not be modified. */
736
737 static tree
738 builtin_save_expr (tree exp)
739 {
740 if (TREE_CODE (exp) == SSA_NAME
741 || (TREE_ADDRESSABLE (exp) == 0
742 && (TREE_CODE (exp) == PARM_DECL
743 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
744 return exp;
745
746 return save_expr (exp);
747 }
748
749 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
750 times to get the address of either a higher stack frame, or a return
751 address located within it (depending on FNDECL_CODE). */
752
753 static rtx
754 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
755 {
756 int i;
757
758 #ifdef INITIAL_FRAME_ADDRESS_RTX
759 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
760 #else
761 rtx tem;
762
763 /* For a zero count with __builtin_return_address, we don't care what
764 frame address we return, because target-specific definitions will
765 override us. Therefore frame pointer elimination is OK, and using
766 the soft frame pointer is OK.
767
768 For a nonzero count, or a zero count with __builtin_frame_address,
769 we require a stable offset from the current frame pointer to the
770 previous one, so we must use the hard frame pointer, and
771 we must disable frame pointer elimination. */
772 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
773 tem = frame_pointer_rtx;
774 else
775 {
776 tem = hard_frame_pointer_rtx;
777
778 /* Tell reload not to eliminate the frame pointer. */
779 crtl->accesses_prior_frames = 1;
780 }
781 #endif
782
783 /* Some machines need special handling before we can access
784 arbitrary frames. For example, on the SPARC, we must first flush
785 all register windows to the stack. */
786 #ifdef SETUP_FRAME_ADDRESSES
787 if (count > 0)
788 SETUP_FRAME_ADDRESSES ();
789 #endif
790
791 /* On the SPARC, the return address is not in the frame, it is in a
792 register. There is no way to access it off of the current frame
793 pointer, but it can be accessed off the previous frame pointer by
794 reading the value from the register window save area. */
795 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
796 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
797 count--;
798 #endif
799
800 /* Scan back COUNT frames to the specified frame. */
801 for (i = 0; i < count; i++)
802 {
803 /* Assume the dynamic chain pointer is in the word that the
804 frame address points to, unless otherwise specified. */
805 #ifdef DYNAMIC_CHAIN_ADDRESS
806 tem = DYNAMIC_CHAIN_ADDRESS (tem);
807 #endif
808 tem = memory_address (Pmode, tem);
809 tem = gen_frame_mem (Pmode, tem);
810 tem = copy_to_reg (tem);
811 }
812
813 /* For __builtin_frame_address, return what we've got. But, on
814 the SPARC for example, we may have to add a bias. */
815 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
816 #ifdef FRAME_ADDR_RTX
817 return FRAME_ADDR_RTX (tem);
818 #else
819 return tem;
820 #endif
821
822 /* For __builtin_return_address, get the return address from that frame. */
823 #ifdef RETURN_ADDR_RTX
824 tem = RETURN_ADDR_RTX (count, tem);
825 #else
826 tem = memory_address (Pmode,
827 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
828 tem = gen_frame_mem (Pmode, tem);
829 #endif
830 return tem;
831 }
832
833 /* Alias set used for setjmp buffer. */
834 static alias_set_type setjmp_alias_set = -1;
835
836 /* Construct the leading half of a __builtin_setjmp call. Control will
837 return to RECEIVER_LABEL. This is also called directly by the SJLJ
838 exception handling code. */
839
840 void
841 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
842 {
843 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
844 rtx stack_save;
845 rtx mem;
846
847 if (setjmp_alias_set == -1)
848 setjmp_alias_set = new_alias_set ();
849
850 buf_addr = convert_memory_address (Pmode, buf_addr);
851
852 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
853
854 /* We store the frame pointer and the address of receiver_label in
855 the buffer and use the rest of it for the stack save area, which
856 is machine-dependent. */
857
858 mem = gen_rtx_MEM (Pmode, buf_addr);
859 set_mem_alias_set (mem, setjmp_alias_set);
860 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
861
862 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
863 GET_MODE_SIZE (Pmode))),
864 set_mem_alias_set (mem, setjmp_alias_set);
865
866 emit_move_insn (validize_mem (mem),
867 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
868
869 stack_save = gen_rtx_MEM (sa_mode,
870 plus_constant (Pmode, buf_addr,
871 2 * GET_MODE_SIZE (Pmode)));
872 set_mem_alias_set (stack_save, setjmp_alias_set);
873 emit_stack_save (SAVE_NONLOCAL, &stack_save);
874
875 /* If there is further processing to do, do it. */
876 #ifdef HAVE_builtin_setjmp_setup
877 if (HAVE_builtin_setjmp_setup)
878 emit_insn (gen_builtin_setjmp_setup (buf_addr));
879 #endif
880
881 /* We have a nonlocal label. */
882 cfun->has_nonlocal_label = 1;
883 }
884
885 /* Construct the trailing part of a __builtin_setjmp call. This is
886 also called directly by the SJLJ exception handling code.
887 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
888
889 void
890 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
891 {
892 rtx chain;
893
894 /* Mark the FP as used when we get here, so we have to make sure it's
895 marked as used by this function. */
896 emit_use (hard_frame_pointer_rtx);
897
898 /* Mark the static chain as clobbered here so life information
899 doesn't get messed up for it. */
900 chain = targetm.calls.static_chain (current_function_decl, true);
901 if (chain && REG_P (chain))
902 emit_clobber (chain);
903
904 /* Now put in the code to restore the frame pointer, and argument
905 pointer, if needed. */
906 #ifdef HAVE_nonlocal_goto
907 if (! HAVE_nonlocal_goto)
908 #endif
909 {
910 /* First adjust our frame pointer to its actual value. It was
911 previously set to the start of the virtual area corresponding to
912 the stacked variables when we branched here and now needs to be
913 adjusted to the actual hardware fp value.
914
915 Assignments to virtual registers are converted by
916 instantiate_virtual_regs into the corresponding assignment
917 to the underlying register (fp in this case) that makes
918 the original assignment true.
919 So the following insn will actually be decrementing fp by
920 STARTING_FRAME_OFFSET. */
921 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
922
923 /* Restoring the frame pointer also modifies the hard frame pointer.
924 Mark it used (so that the previous assignment remains live once
925 the frame pointer is eliminated) and clobbered (to represent the
926 implicit update from the assignment). */
927 emit_use (hard_frame_pointer_rtx);
928 emit_clobber (hard_frame_pointer_rtx);
929 }
930
931 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
932 if (fixed_regs[ARG_POINTER_REGNUM])
933 {
934 #ifdef ELIMINABLE_REGS
935 /* If the argument pointer can be eliminated in favor of the
936 frame pointer, we don't need to restore it. We assume here
937 that if such an elimination is present, it can always be used.
938 This is the case on all known machines; if we don't make this
939 assumption, we do unnecessary saving on many machines. */
940 size_t i;
941 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
942
943 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
944 if (elim_regs[i].from == ARG_POINTER_REGNUM
945 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
946 break;
947
948 if (i == ARRAY_SIZE (elim_regs))
949 #endif
950 {
951 /* Now restore our arg pointer from the address at which it
952 was saved in our stack frame. */
953 emit_move_insn (crtl->args.internal_arg_pointer,
954 copy_to_reg (get_arg_pointer_save_area ()));
955 }
956 }
957 #endif
958
959 #ifdef HAVE_builtin_setjmp_receiver
960 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
961 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
962 else
963 #endif
964 #ifdef HAVE_nonlocal_goto_receiver
965 if (HAVE_nonlocal_goto_receiver)
966 emit_insn (gen_nonlocal_goto_receiver ());
967 else
968 #endif
969 { /* Nothing */ }
970
971 /* We must not allow the code we just generated to be reordered by
972 scheduling. Specifically, the update of the frame pointer must
973 happen immediately, not later. */
974 emit_insn (gen_blockage ());
975 }
976
977 /* __builtin_longjmp is passed a pointer to an array of five words (not
978 all will be used on all machines). It operates similarly to the C
979 library function of the same name, but is more efficient. Much of
980 the code below is copied from the handling of non-local gotos. */
981
982 static void
983 expand_builtin_longjmp (rtx buf_addr, rtx value)
984 {
985 rtx fp, lab, stack;
986 rtx_insn *insn, *last;
987 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
988
989 /* DRAP is needed for stack realign if longjmp is expanded to current
990 function */
991 if (SUPPORTS_STACK_ALIGNMENT)
992 crtl->need_drap = true;
993
994 if (setjmp_alias_set == -1)
995 setjmp_alias_set = new_alias_set ();
996
997 buf_addr = convert_memory_address (Pmode, buf_addr);
998
999 buf_addr = force_reg (Pmode, buf_addr);
1000
1001 /* We require that the user must pass a second argument of 1, because
1002 that is what builtin_setjmp will return. */
1003 gcc_assert (value == const1_rtx);
1004
1005 last = get_last_insn ();
1006 #ifdef HAVE_builtin_longjmp
1007 if (HAVE_builtin_longjmp)
1008 emit_insn (gen_builtin_longjmp (buf_addr));
1009 else
1010 #endif
1011 {
1012 fp = gen_rtx_MEM (Pmode, buf_addr);
1013 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1014 GET_MODE_SIZE (Pmode)));
1015
1016 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1017 2 * GET_MODE_SIZE (Pmode)));
1018 set_mem_alias_set (fp, setjmp_alias_set);
1019 set_mem_alias_set (lab, setjmp_alias_set);
1020 set_mem_alias_set (stack, setjmp_alias_set);
1021
1022 /* Pick up FP, label, and SP from the block and jump. This code is
1023 from expand_goto in stmt.c; see there for detailed comments. */
1024 #ifdef HAVE_nonlocal_goto
1025 if (HAVE_nonlocal_goto)
1026 /* We have to pass a value to the nonlocal_goto pattern that will
1027 get copied into the static_chain pointer, but it does not matter
1028 what that value is, because builtin_setjmp does not use it. */
1029 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1030 else
1031 #endif
1032 {
1033 lab = copy_to_reg (lab);
1034
1035 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1036 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1037
1038 emit_move_insn (hard_frame_pointer_rtx, fp);
1039 emit_stack_restore (SAVE_NONLOCAL, stack);
1040
1041 emit_use (hard_frame_pointer_rtx);
1042 emit_use (stack_pointer_rtx);
1043 emit_indirect_jump (lab);
1044 }
1045 }
1046
1047 /* Search backwards and mark the jump insn as a non-local goto.
1048 Note that this precludes the use of __builtin_longjmp to a
1049 __builtin_setjmp target in the same function. However, we've
1050 already cautioned the user that these functions are for
1051 internal exception handling use only. */
1052 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1053 {
1054 gcc_assert (insn != last);
1055
1056 if (JUMP_P (insn))
1057 {
1058 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1059 break;
1060 }
1061 else if (CALL_P (insn))
1062 break;
1063 }
1064 }
1065
1066 static inline bool
1067 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1068 {
1069 return (iter->i < iter->n);
1070 }
1071
1072 /* This function validates the types of a function call argument list
1073 against a specified list of tree_codes. If the last specifier is a 0,
1074 that represents an ellipses, otherwise the last specifier must be a
1075 VOID_TYPE. */
1076
1077 static bool
1078 validate_arglist (const_tree callexpr, ...)
1079 {
1080 enum tree_code code;
1081 bool res = 0;
1082 va_list ap;
1083 const_call_expr_arg_iterator iter;
1084 const_tree arg;
1085
1086 va_start (ap, callexpr);
1087 init_const_call_expr_arg_iterator (callexpr, &iter);
1088
1089 do
1090 {
1091 code = (enum tree_code) va_arg (ap, int);
1092 switch (code)
1093 {
1094 case 0:
1095 /* This signifies an ellipses, any further arguments are all ok. */
1096 res = true;
1097 goto end;
1098 case VOID_TYPE:
1099 /* This signifies an endlink, if no arguments remain, return
1100 true, otherwise return false. */
1101 res = !more_const_call_expr_args_p (&iter);
1102 goto end;
1103 default:
1104 /* If no parameters remain or the parameter's code does not
1105 match the specified code, return false. Otherwise continue
1106 checking any remaining arguments. */
1107 arg = next_const_call_expr_arg (&iter);
1108 if (!validate_arg (arg, code))
1109 goto end;
1110 break;
1111 }
1112 }
1113 while (1);
1114
1115 /* We need gotos here since we can only have one VA_CLOSE in a
1116 function. */
1117 end: ;
1118 va_end (ap);
1119
1120 return res;
1121 }
1122
1123 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1124 and the address of the save area. */
1125
1126 static rtx
1127 expand_builtin_nonlocal_goto (tree exp)
1128 {
1129 tree t_label, t_save_area;
1130 rtx r_label, r_save_area, r_fp, r_sp;
1131 rtx_insn *insn;
1132
1133 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1134 return NULL_RTX;
1135
1136 t_label = CALL_EXPR_ARG (exp, 0);
1137 t_save_area = CALL_EXPR_ARG (exp, 1);
1138
1139 r_label = expand_normal (t_label);
1140 r_label = convert_memory_address (Pmode, r_label);
1141 r_save_area = expand_normal (t_save_area);
1142 r_save_area = convert_memory_address (Pmode, r_save_area);
1143 /* Copy the address of the save location to a register just in case it was
1144 based on the frame pointer. */
1145 r_save_area = copy_to_reg (r_save_area);
1146 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1147 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1148 plus_constant (Pmode, r_save_area,
1149 GET_MODE_SIZE (Pmode)));
1150
1151 crtl->has_nonlocal_goto = 1;
1152
1153 #ifdef HAVE_nonlocal_goto
1154 /* ??? We no longer need to pass the static chain value, afaik. */
1155 if (HAVE_nonlocal_goto)
1156 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1157 else
1158 #endif
1159 {
1160 r_label = copy_to_reg (r_label);
1161
1162 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1163 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1164
1165 /* Restore frame pointer for containing function. */
1166 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1167 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1168
1169 /* USE of hard_frame_pointer_rtx added for consistency;
1170 not clear if really needed. */
1171 emit_use (hard_frame_pointer_rtx);
1172 emit_use (stack_pointer_rtx);
1173
1174 /* If the architecture is using a GP register, we must
1175 conservatively assume that the target function makes use of it.
1176 The prologue of functions with nonlocal gotos must therefore
1177 initialize the GP register to the appropriate value, and we
1178 must then make sure that this value is live at the point
1179 of the jump. (Note that this doesn't necessarily apply
1180 to targets with a nonlocal_goto pattern; they are free
1181 to implement it in their own way. Note also that this is
1182 a no-op if the GP register is a global invariant.) */
1183 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1184 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1185 emit_use (pic_offset_table_rtx);
1186
1187 emit_indirect_jump (r_label);
1188 }
1189
1190 /* Search backwards to the jump insn and mark it as a
1191 non-local goto. */
1192 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1193 {
1194 if (JUMP_P (insn))
1195 {
1196 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1197 break;
1198 }
1199 else if (CALL_P (insn))
1200 break;
1201 }
1202
1203 return const0_rtx;
1204 }
1205
1206 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1207 (not all will be used on all machines) that was passed to __builtin_setjmp.
1208 It updates the stack pointer in that block to correspond to the current
1209 stack pointer. */
1210
1211 static void
1212 expand_builtin_update_setjmp_buf (rtx buf_addr)
1213 {
1214 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1215 rtx stack_save
1216 = gen_rtx_MEM (sa_mode,
1217 memory_address
1218 (sa_mode,
1219 plus_constant (Pmode, buf_addr,
1220 2 * GET_MODE_SIZE (Pmode))));
1221
1222 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1223 }
1224
1225 /* Expand a call to __builtin_prefetch. For a target that does not support
1226 data prefetch, evaluate the memory address argument in case it has side
1227 effects. */
1228
1229 static void
1230 expand_builtin_prefetch (tree exp)
1231 {
1232 tree arg0, arg1, arg2;
1233 int nargs;
1234 rtx op0, op1, op2;
1235
1236 if (!validate_arglist (exp, POINTER_TYPE, 0))
1237 return;
1238
1239 arg0 = CALL_EXPR_ARG (exp, 0);
1240
1241 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1242 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1243 locality). */
1244 nargs = call_expr_nargs (exp);
1245 if (nargs > 1)
1246 arg1 = CALL_EXPR_ARG (exp, 1);
1247 else
1248 arg1 = integer_zero_node;
1249 if (nargs > 2)
1250 arg2 = CALL_EXPR_ARG (exp, 2);
1251 else
1252 arg2 = integer_three_node;
1253
1254 /* Argument 0 is an address. */
1255 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1256
1257 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1258 if (TREE_CODE (arg1) != INTEGER_CST)
1259 {
1260 error ("second argument to %<__builtin_prefetch%> must be a constant");
1261 arg1 = integer_zero_node;
1262 }
1263 op1 = expand_normal (arg1);
1264 /* Argument 1 must be either zero or one. */
1265 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1266 {
1267 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1268 " using zero");
1269 op1 = const0_rtx;
1270 }
1271
1272 /* Argument 2 (locality) must be a compile-time constant int. */
1273 if (TREE_CODE (arg2) != INTEGER_CST)
1274 {
1275 error ("third argument to %<__builtin_prefetch%> must be a constant");
1276 arg2 = integer_zero_node;
1277 }
1278 op2 = expand_normal (arg2);
1279 /* Argument 2 must be 0, 1, 2, or 3. */
1280 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1281 {
1282 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1283 op2 = const0_rtx;
1284 }
1285
1286 #ifdef HAVE_prefetch
1287 if (HAVE_prefetch)
1288 {
1289 struct expand_operand ops[3];
1290
1291 create_address_operand (&ops[0], op0);
1292 create_integer_operand (&ops[1], INTVAL (op1));
1293 create_integer_operand (&ops[2], INTVAL (op2));
1294 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1295 return;
1296 }
1297 #endif
1298
1299 /* Don't do anything with direct references to volatile memory, but
1300 generate code to handle other side effects. */
1301 if (!MEM_P (op0) && side_effects_p (op0))
1302 emit_insn (op0);
1303 }
1304
1305 /* Get a MEM rtx for expression EXP which is the address of an operand
1306 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1307 the maximum length of the block of memory that might be accessed or
1308 NULL if unknown. */
1309
1310 static rtx
1311 get_memory_rtx (tree exp, tree len)
1312 {
1313 tree orig_exp = exp;
1314 rtx addr, mem;
1315
1316 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1317 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1318 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1319 exp = TREE_OPERAND (exp, 0);
1320
1321 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1322 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1323
1324 /* Get an expression we can use to find the attributes to assign to MEM.
1325 First remove any nops. */
1326 while (CONVERT_EXPR_P (exp)
1327 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1328 exp = TREE_OPERAND (exp, 0);
1329
1330 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1331 (as builtin stringops may alias with anything). */
1332 exp = fold_build2 (MEM_REF,
1333 build_array_type (char_type_node,
1334 build_range_type (sizetype,
1335 size_one_node, len)),
1336 exp, build_int_cst (ptr_type_node, 0));
1337
1338 /* If the MEM_REF has no acceptable address, try to get the base object
1339 from the original address we got, and build an all-aliasing
1340 unknown-sized access to that one. */
1341 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1342 set_mem_attributes (mem, exp, 0);
1343 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1344 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1345 0))))
1346 {
1347 exp = build_fold_addr_expr (exp);
1348 exp = fold_build2 (MEM_REF,
1349 build_array_type (char_type_node,
1350 build_range_type (sizetype,
1351 size_zero_node,
1352 NULL)),
1353 exp, build_int_cst (ptr_type_node, 0));
1354 set_mem_attributes (mem, exp, 0);
1355 }
1356 set_mem_alias_set (mem, 0);
1357 return mem;
1358 }
1359 \f
1360 /* Built-in functions to perform an untyped call and return. */
1361
1362 #define apply_args_mode \
1363 (this_target_builtins->x_apply_args_mode)
1364 #define apply_result_mode \
1365 (this_target_builtins->x_apply_result_mode)
1366
1367 /* Return the size required for the block returned by __builtin_apply_args,
1368 and initialize apply_args_mode. */
1369
1370 static int
1371 apply_args_size (void)
1372 {
1373 static int size = -1;
1374 int align;
1375 unsigned int regno;
1376 machine_mode mode;
1377
1378 /* The values computed by this function never change. */
1379 if (size < 0)
1380 {
1381 /* The first value is the incoming arg-pointer. */
1382 size = GET_MODE_SIZE (Pmode);
1383
1384 /* The second value is the structure value address unless this is
1385 passed as an "invisible" first argument. */
1386 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1387 size += GET_MODE_SIZE (Pmode);
1388
1389 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1390 if (FUNCTION_ARG_REGNO_P (regno))
1391 {
1392 mode = targetm.calls.get_raw_arg_mode (regno);
1393
1394 gcc_assert (mode != VOIDmode);
1395
1396 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1397 if (size % align != 0)
1398 size = CEIL (size, align) * align;
1399 size += GET_MODE_SIZE (mode);
1400 apply_args_mode[regno] = mode;
1401 }
1402 else
1403 {
1404 apply_args_mode[regno] = VOIDmode;
1405 }
1406 }
1407 return size;
1408 }
1409
1410 /* Return the size required for the block returned by __builtin_apply,
1411 and initialize apply_result_mode. */
1412
1413 static int
1414 apply_result_size (void)
1415 {
1416 static int size = -1;
1417 int align, regno;
1418 machine_mode mode;
1419
1420 /* The values computed by this function never change. */
1421 if (size < 0)
1422 {
1423 size = 0;
1424
1425 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1426 if (targetm.calls.function_value_regno_p (regno))
1427 {
1428 mode = targetm.calls.get_raw_result_mode (regno);
1429
1430 gcc_assert (mode != VOIDmode);
1431
1432 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1433 if (size % align != 0)
1434 size = CEIL (size, align) * align;
1435 size += GET_MODE_SIZE (mode);
1436 apply_result_mode[regno] = mode;
1437 }
1438 else
1439 apply_result_mode[regno] = VOIDmode;
1440
1441 /* Allow targets that use untyped_call and untyped_return to override
1442 the size so that machine-specific information can be stored here. */
1443 #ifdef APPLY_RESULT_SIZE
1444 size = APPLY_RESULT_SIZE;
1445 #endif
1446 }
1447 return size;
1448 }
1449
1450 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1451 /* Create a vector describing the result block RESULT. If SAVEP is true,
1452 the result block is used to save the values; otherwise it is used to
1453 restore the values. */
1454
1455 static rtx
1456 result_vector (int savep, rtx result)
1457 {
1458 int regno, size, align, nelts;
1459 machine_mode mode;
1460 rtx reg, mem;
1461 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1462
1463 size = nelts = 0;
1464 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1465 if ((mode = apply_result_mode[regno]) != VOIDmode)
1466 {
1467 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1468 if (size % align != 0)
1469 size = CEIL (size, align) * align;
1470 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1471 mem = adjust_address (result, mode, size);
1472 savevec[nelts++] = (savep
1473 ? gen_rtx_SET (VOIDmode, mem, reg)
1474 : gen_rtx_SET (VOIDmode, reg, mem));
1475 size += GET_MODE_SIZE (mode);
1476 }
1477 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1478 }
1479 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1480
1481 /* Save the state required to perform an untyped call with the same
1482 arguments as were passed to the current function. */
1483
1484 static rtx
1485 expand_builtin_apply_args_1 (void)
1486 {
1487 rtx registers, tem;
1488 int size, align, regno;
1489 machine_mode mode;
1490 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1491
1492 /* Create a block where the arg-pointer, structure value address,
1493 and argument registers can be saved. */
1494 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1495
1496 /* Walk past the arg-pointer and structure value address. */
1497 size = GET_MODE_SIZE (Pmode);
1498 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1499 size += GET_MODE_SIZE (Pmode);
1500
1501 /* Save each register used in calling a function to the block. */
1502 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1503 if ((mode = apply_args_mode[regno]) != VOIDmode)
1504 {
1505 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1506 if (size % align != 0)
1507 size = CEIL (size, align) * align;
1508
1509 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1510
1511 emit_move_insn (adjust_address (registers, mode, size), tem);
1512 size += GET_MODE_SIZE (mode);
1513 }
1514
1515 /* Save the arg pointer to the block. */
1516 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1517 #ifdef STACK_GROWS_DOWNWARD
1518 /* We need the pointer as the caller actually passed them to us, not
1519 as we might have pretended they were passed. Make sure it's a valid
1520 operand, as emit_move_insn isn't expected to handle a PLUS. */
1521 tem
1522 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1523 NULL_RTX);
1524 #endif
1525 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1526
1527 size = GET_MODE_SIZE (Pmode);
1528
1529 /* Save the structure value address unless this is passed as an
1530 "invisible" first argument. */
1531 if (struct_incoming_value)
1532 {
1533 emit_move_insn (adjust_address (registers, Pmode, size),
1534 copy_to_reg (struct_incoming_value));
1535 size += GET_MODE_SIZE (Pmode);
1536 }
1537
1538 /* Return the address of the block. */
1539 return copy_addr_to_reg (XEXP (registers, 0));
1540 }
1541
1542 /* __builtin_apply_args returns block of memory allocated on
1543 the stack into which is stored the arg pointer, structure
1544 value address, static chain, and all the registers that might
1545 possibly be used in performing a function call. The code is
1546 moved to the start of the function so the incoming values are
1547 saved. */
1548
1549 static rtx
1550 expand_builtin_apply_args (void)
1551 {
1552 /* Don't do __builtin_apply_args more than once in a function.
1553 Save the result of the first call and reuse it. */
1554 if (apply_args_value != 0)
1555 return apply_args_value;
1556 {
1557 /* When this function is called, it means that registers must be
1558 saved on entry to this function. So we migrate the
1559 call to the first insn of this function. */
1560 rtx temp;
1561 rtx seq;
1562
1563 start_sequence ();
1564 temp = expand_builtin_apply_args_1 ();
1565 seq = get_insns ();
1566 end_sequence ();
1567
1568 apply_args_value = temp;
1569
1570 /* Put the insns after the NOTE that starts the function.
1571 If this is inside a start_sequence, make the outer-level insn
1572 chain current, so the code is placed at the start of the
1573 function. If internal_arg_pointer is a non-virtual pseudo,
1574 it needs to be placed after the function that initializes
1575 that pseudo. */
1576 push_topmost_sequence ();
1577 if (REG_P (crtl->args.internal_arg_pointer)
1578 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1579 emit_insn_before (seq, parm_birth_insn);
1580 else
1581 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1582 pop_topmost_sequence ();
1583 return temp;
1584 }
1585 }
1586
1587 /* Perform an untyped call and save the state required to perform an
1588 untyped return of whatever value was returned by the given function. */
1589
1590 static rtx
1591 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1592 {
1593 int size, align, regno;
1594 machine_mode mode;
1595 rtx incoming_args, result, reg, dest, src;
1596 rtx_call_insn *call_insn;
1597 rtx old_stack_level = 0;
1598 rtx call_fusage = 0;
1599 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1600
1601 arguments = convert_memory_address (Pmode, arguments);
1602
1603 /* Create a block where the return registers can be saved. */
1604 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1605
1606 /* Fetch the arg pointer from the ARGUMENTS block. */
1607 incoming_args = gen_reg_rtx (Pmode);
1608 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1609 #ifndef STACK_GROWS_DOWNWARD
1610 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1611 incoming_args, 0, OPTAB_LIB_WIDEN);
1612 #endif
1613
1614 /* Push a new argument block and copy the arguments. Do not allow
1615 the (potential) memcpy call below to interfere with our stack
1616 manipulations. */
1617 do_pending_stack_adjust ();
1618 NO_DEFER_POP;
1619
1620 /* Save the stack with nonlocal if available. */
1621 #ifdef HAVE_save_stack_nonlocal
1622 if (HAVE_save_stack_nonlocal)
1623 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1624 else
1625 #endif
1626 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1627
1628 /* Allocate a block of memory onto the stack and copy the memory
1629 arguments to the outgoing arguments address. We can pass TRUE
1630 as the 4th argument because we just saved the stack pointer
1631 and will restore it right after the call. */
1632 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1633
1634 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1635 may have already set current_function_calls_alloca to true.
1636 current_function_calls_alloca won't be set if argsize is zero,
1637 so we have to guarantee need_drap is true here. */
1638 if (SUPPORTS_STACK_ALIGNMENT)
1639 crtl->need_drap = true;
1640
1641 dest = virtual_outgoing_args_rtx;
1642 #ifndef STACK_GROWS_DOWNWARD
1643 if (CONST_INT_P (argsize))
1644 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1645 else
1646 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1647 #endif
1648 dest = gen_rtx_MEM (BLKmode, dest);
1649 set_mem_align (dest, PARM_BOUNDARY);
1650 src = gen_rtx_MEM (BLKmode, incoming_args);
1651 set_mem_align (src, PARM_BOUNDARY);
1652 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1653
1654 /* Refer to the argument block. */
1655 apply_args_size ();
1656 arguments = gen_rtx_MEM (BLKmode, arguments);
1657 set_mem_align (arguments, PARM_BOUNDARY);
1658
1659 /* Walk past the arg-pointer and structure value address. */
1660 size = GET_MODE_SIZE (Pmode);
1661 if (struct_value)
1662 size += GET_MODE_SIZE (Pmode);
1663
1664 /* Restore each of the registers previously saved. Make USE insns
1665 for each of these registers for use in making the call. */
1666 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1667 if ((mode = apply_args_mode[regno]) != VOIDmode)
1668 {
1669 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1670 if (size % align != 0)
1671 size = CEIL (size, align) * align;
1672 reg = gen_rtx_REG (mode, regno);
1673 emit_move_insn (reg, adjust_address (arguments, mode, size));
1674 use_reg (&call_fusage, reg);
1675 size += GET_MODE_SIZE (mode);
1676 }
1677
1678 /* Restore the structure value address unless this is passed as an
1679 "invisible" first argument. */
1680 size = GET_MODE_SIZE (Pmode);
1681 if (struct_value)
1682 {
1683 rtx value = gen_reg_rtx (Pmode);
1684 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1685 emit_move_insn (struct_value, value);
1686 if (REG_P (struct_value))
1687 use_reg (&call_fusage, struct_value);
1688 size += GET_MODE_SIZE (Pmode);
1689 }
1690
1691 /* All arguments and registers used for the call are set up by now! */
1692 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1693
1694 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1695 and we don't want to load it into a register as an optimization,
1696 because prepare_call_address already did it if it should be done. */
1697 if (GET_CODE (function) != SYMBOL_REF)
1698 function = memory_address (FUNCTION_MODE, function);
1699
1700 /* Generate the actual call instruction and save the return value. */
1701 #ifdef HAVE_untyped_call
1702 if (HAVE_untyped_call)
1703 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1704 result, result_vector (1, result)));
1705 else
1706 #endif
1707 #ifdef HAVE_call_value
1708 if (HAVE_call_value)
1709 {
1710 rtx valreg = 0;
1711
1712 /* Locate the unique return register. It is not possible to
1713 express a call that sets more than one return register using
1714 call_value; use untyped_call for that. In fact, untyped_call
1715 only needs to save the return registers in the given block. */
1716 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1717 if ((mode = apply_result_mode[regno]) != VOIDmode)
1718 {
1719 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1720
1721 valreg = gen_rtx_REG (mode, regno);
1722 }
1723
1724 emit_call_insn (GEN_CALL_VALUE (valreg,
1725 gen_rtx_MEM (FUNCTION_MODE, function),
1726 const0_rtx, NULL_RTX, const0_rtx));
1727
1728 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1729 }
1730 else
1731 #endif
1732 gcc_unreachable ();
1733
1734 /* Find the CALL insn we just emitted, and attach the register usage
1735 information. */
1736 call_insn = last_call_insn ();
1737 add_function_usage_to (call_insn, call_fusage);
1738
1739 /* Restore the stack. */
1740 #ifdef HAVE_save_stack_nonlocal
1741 if (HAVE_save_stack_nonlocal)
1742 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1743 else
1744 #endif
1745 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1746 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1747
1748 OK_DEFER_POP;
1749
1750 /* Return the address of the result block. */
1751 result = copy_addr_to_reg (XEXP (result, 0));
1752 return convert_memory_address (ptr_mode, result);
1753 }
1754
1755 /* Perform an untyped return. */
1756
1757 static void
1758 expand_builtin_return (rtx result)
1759 {
1760 int size, align, regno;
1761 machine_mode mode;
1762 rtx reg;
1763 rtx_insn *call_fusage = 0;
1764
1765 result = convert_memory_address (Pmode, result);
1766
1767 apply_result_size ();
1768 result = gen_rtx_MEM (BLKmode, result);
1769
1770 #ifdef HAVE_untyped_return
1771 if (HAVE_untyped_return)
1772 {
1773 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1774 emit_barrier ();
1775 return;
1776 }
1777 #endif
1778
1779 /* Restore the return value and note that each value is used. */
1780 size = 0;
1781 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1782 if ((mode = apply_result_mode[regno]) != VOIDmode)
1783 {
1784 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1785 if (size % align != 0)
1786 size = CEIL (size, align) * align;
1787 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1788 emit_move_insn (reg, adjust_address (result, mode, size));
1789
1790 push_to_sequence (call_fusage);
1791 emit_use (reg);
1792 call_fusage = get_insns ();
1793 end_sequence ();
1794 size += GET_MODE_SIZE (mode);
1795 }
1796
1797 /* Put the USE insns before the return. */
1798 emit_insn (call_fusage);
1799
1800 /* Return whatever values was restored by jumping directly to the end
1801 of the function. */
1802 expand_naked_return ();
1803 }
1804
1805 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1806
1807 static enum type_class
1808 type_to_class (tree type)
1809 {
1810 switch (TREE_CODE (type))
1811 {
1812 case VOID_TYPE: return void_type_class;
1813 case INTEGER_TYPE: return integer_type_class;
1814 case ENUMERAL_TYPE: return enumeral_type_class;
1815 case BOOLEAN_TYPE: return boolean_type_class;
1816 case POINTER_TYPE: return pointer_type_class;
1817 case REFERENCE_TYPE: return reference_type_class;
1818 case OFFSET_TYPE: return offset_type_class;
1819 case REAL_TYPE: return real_type_class;
1820 case COMPLEX_TYPE: return complex_type_class;
1821 case FUNCTION_TYPE: return function_type_class;
1822 case METHOD_TYPE: return method_type_class;
1823 case RECORD_TYPE: return record_type_class;
1824 case UNION_TYPE:
1825 case QUAL_UNION_TYPE: return union_type_class;
1826 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1827 ? string_type_class : array_type_class);
1828 case LANG_TYPE: return lang_type_class;
1829 default: return no_type_class;
1830 }
1831 }
1832
1833 /* Expand a call EXP to __builtin_classify_type. */
1834
1835 static rtx
1836 expand_builtin_classify_type (tree exp)
1837 {
1838 if (call_expr_nargs (exp))
1839 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1840 return GEN_INT (no_type_class);
1841 }
1842
1843 /* This helper macro, meant to be used in mathfn_built_in below,
1844 determines which among a set of three builtin math functions is
1845 appropriate for a given type mode. The `F' and `L' cases are
1846 automatically generated from the `double' case. */
1847 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1848 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1849 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1850 fcodel = BUILT_IN_MATHFN##L ; break;
1851 /* Similar to above, but appends _R after any F/L suffix. */
1852 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1853 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1854 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1855 fcodel = BUILT_IN_MATHFN##L_R ; break;
1856
1857 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1858 if available. If IMPLICIT is true use the implicit builtin declaration,
1859 otherwise use the explicit declaration. If we can't do the conversion,
1860 return zero. */
1861
1862 static tree
1863 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1864 {
1865 enum built_in_function fcode, fcodef, fcodel, fcode2;
1866
1867 switch (fn)
1868 {
1869 CASE_MATHFN (BUILT_IN_ACOS)
1870 CASE_MATHFN (BUILT_IN_ACOSH)
1871 CASE_MATHFN (BUILT_IN_ASIN)
1872 CASE_MATHFN (BUILT_IN_ASINH)
1873 CASE_MATHFN (BUILT_IN_ATAN)
1874 CASE_MATHFN (BUILT_IN_ATAN2)
1875 CASE_MATHFN (BUILT_IN_ATANH)
1876 CASE_MATHFN (BUILT_IN_CBRT)
1877 CASE_MATHFN (BUILT_IN_CEIL)
1878 CASE_MATHFN (BUILT_IN_CEXPI)
1879 CASE_MATHFN (BUILT_IN_COPYSIGN)
1880 CASE_MATHFN (BUILT_IN_COS)
1881 CASE_MATHFN (BUILT_IN_COSH)
1882 CASE_MATHFN (BUILT_IN_DREM)
1883 CASE_MATHFN (BUILT_IN_ERF)
1884 CASE_MATHFN (BUILT_IN_ERFC)
1885 CASE_MATHFN (BUILT_IN_EXP)
1886 CASE_MATHFN (BUILT_IN_EXP10)
1887 CASE_MATHFN (BUILT_IN_EXP2)
1888 CASE_MATHFN (BUILT_IN_EXPM1)
1889 CASE_MATHFN (BUILT_IN_FABS)
1890 CASE_MATHFN (BUILT_IN_FDIM)
1891 CASE_MATHFN (BUILT_IN_FLOOR)
1892 CASE_MATHFN (BUILT_IN_FMA)
1893 CASE_MATHFN (BUILT_IN_FMAX)
1894 CASE_MATHFN (BUILT_IN_FMIN)
1895 CASE_MATHFN (BUILT_IN_FMOD)
1896 CASE_MATHFN (BUILT_IN_FREXP)
1897 CASE_MATHFN (BUILT_IN_GAMMA)
1898 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1899 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1900 CASE_MATHFN (BUILT_IN_HYPOT)
1901 CASE_MATHFN (BUILT_IN_ILOGB)
1902 CASE_MATHFN (BUILT_IN_ICEIL)
1903 CASE_MATHFN (BUILT_IN_IFLOOR)
1904 CASE_MATHFN (BUILT_IN_INF)
1905 CASE_MATHFN (BUILT_IN_IRINT)
1906 CASE_MATHFN (BUILT_IN_IROUND)
1907 CASE_MATHFN (BUILT_IN_ISINF)
1908 CASE_MATHFN (BUILT_IN_J0)
1909 CASE_MATHFN (BUILT_IN_J1)
1910 CASE_MATHFN (BUILT_IN_JN)
1911 CASE_MATHFN (BUILT_IN_LCEIL)
1912 CASE_MATHFN (BUILT_IN_LDEXP)
1913 CASE_MATHFN (BUILT_IN_LFLOOR)
1914 CASE_MATHFN (BUILT_IN_LGAMMA)
1915 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1916 CASE_MATHFN (BUILT_IN_LLCEIL)
1917 CASE_MATHFN (BUILT_IN_LLFLOOR)
1918 CASE_MATHFN (BUILT_IN_LLRINT)
1919 CASE_MATHFN (BUILT_IN_LLROUND)
1920 CASE_MATHFN (BUILT_IN_LOG)
1921 CASE_MATHFN (BUILT_IN_LOG10)
1922 CASE_MATHFN (BUILT_IN_LOG1P)
1923 CASE_MATHFN (BUILT_IN_LOG2)
1924 CASE_MATHFN (BUILT_IN_LOGB)
1925 CASE_MATHFN (BUILT_IN_LRINT)
1926 CASE_MATHFN (BUILT_IN_LROUND)
1927 CASE_MATHFN (BUILT_IN_MODF)
1928 CASE_MATHFN (BUILT_IN_NAN)
1929 CASE_MATHFN (BUILT_IN_NANS)
1930 CASE_MATHFN (BUILT_IN_NEARBYINT)
1931 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1932 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1933 CASE_MATHFN (BUILT_IN_POW)
1934 CASE_MATHFN (BUILT_IN_POWI)
1935 CASE_MATHFN (BUILT_IN_POW10)
1936 CASE_MATHFN (BUILT_IN_REMAINDER)
1937 CASE_MATHFN (BUILT_IN_REMQUO)
1938 CASE_MATHFN (BUILT_IN_RINT)
1939 CASE_MATHFN (BUILT_IN_ROUND)
1940 CASE_MATHFN (BUILT_IN_SCALB)
1941 CASE_MATHFN (BUILT_IN_SCALBLN)
1942 CASE_MATHFN (BUILT_IN_SCALBN)
1943 CASE_MATHFN (BUILT_IN_SIGNBIT)
1944 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1945 CASE_MATHFN (BUILT_IN_SIN)
1946 CASE_MATHFN (BUILT_IN_SINCOS)
1947 CASE_MATHFN (BUILT_IN_SINH)
1948 CASE_MATHFN (BUILT_IN_SQRT)
1949 CASE_MATHFN (BUILT_IN_TAN)
1950 CASE_MATHFN (BUILT_IN_TANH)
1951 CASE_MATHFN (BUILT_IN_TGAMMA)
1952 CASE_MATHFN (BUILT_IN_TRUNC)
1953 CASE_MATHFN (BUILT_IN_Y0)
1954 CASE_MATHFN (BUILT_IN_Y1)
1955 CASE_MATHFN (BUILT_IN_YN)
1956
1957 default:
1958 return NULL_TREE;
1959 }
1960
1961 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1962 fcode2 = fcode;
1963 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1964 fcode2 = fcodef;
1965 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1966 fcode2 = fcodel;
1967 else
1968 return NULL_TREE;
1969
1970 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1971 return NULL_TREE;
1972
1973 return builtin_decl_explicit (fcode2);
1974 }
1975
1976 /* Like mathfn_built_in_1(), but always use the implicit array. */
1977
1978 tree
1979 mathfn_built_in (tree type, enum built_in_function fn)
1980 {
1981 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1982 }
1983
1984 /* If errno must be maintained, expand the RTL to check if the result,
1985 TARGET, of a built-in function call, EXP, is NaN, and if so set
1986 errno to EDOM. */
1987
1988 static void
1989 expand_errno_check (tree exp, rtx target)
1990 {
1991 rtx_code_label *lab = gen_label_rtx ();
1992
1993 /* Test the result; if it is NaN, set errno=EDOM because
1994 the argument was not in the domain. */
1995 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1996 NULL_RTX, NULL_RTX, lab,
1997 /* The jump is very likely. */
1998 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1999
2000 #ifdef TARGET_EDOM
2001 /* If this built-in doesn't throw an exception, set errno directly. */
2002 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
2003 {
2004 #ifdef GEN_ERRNO_RTX
2005 rtx errno_rtx = GEN_ERRNO_RTX;
2006 #else
2007 rtx errno_rtx
2008 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
2009 #endif
2010 emit_move_insn (errno_rtx,
2011 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
2012 emit_label (lab);
2013 return;
2014 }
2015 #endif
2016
2017 /* Make sure the library call isn't expanded as a tail call. */
2018 CALL_EXPR_TAILCALL (exp) = 0;
2019
2020 /* We can't set errno=EDOM directly; let the library call do it.
2021 Pop the arguments right away in case the call gets deleted. */
2022 NO_DEFER_POP;
2023 expand_call (exp, target, 0);
2024 OK_DEFER_POP;
2025 emit_label (lab);
2026 }
2027
2028 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2029 Return NULL_RTX if a normal call should be emitted rather than expanding
2030 the function in-line. EXP is the expression that is a call to the builtin
2031 function; if convenient, the result should be placed in TARGET.
2032 SUBTARGET may be used as the target for computing one of EXP's operands. */
2033
2034 static rtx
2035 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2036 {
2037 optab builtin_optab;
2038 rtx op0;
2039 rtx_insn *insns;
2040 tree fndecl = get_callee_fndecl (exp);
2041 machine_mode mode;
2042 bool errno_set = false;
2043 bool try_widening = false;
2044 tree arg;
2045
2046 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2047 return NULL_RTX;
2048
2049 arg = CALL_EXPR_ARG (exp, 0);
2050
2051 switch (DECL_FUNCTION_CODE (fndecl))
2052 {
2053 CASE_FLT_FN (BUILT_IN_SQRT):
2054 errno_set = ! tree_expr_nonnegative_p (arg);
2055 try_widening = true;
2056 builtin_optab = sqrt_optab;
2057 break;
2058 CASE_FLT_FN (BUILT_IN_EXP):
2059 errno_set = true; builtin_optab = exp_optab; break;
2060 CASE_FLT_FN (BUILT_IN_EXP10):
2061 CASE_FLT_FN (BUILT_IN_POW10):
2062 errno_set = true; builtin_optab = exp10_optab; break;
2063 CASE_FLT_FN (BUILT_IN_EXP2):
2064 errno_set = true; builtin_optab = exp2_optab; break;
2065 CASE_FLT_FN (BUILT_IN_EXPM1):
2066 errno_set = true; builtin_optab = expm1_optab; break;
2067 CASE_FLT_FN (BUILT_IN_LOGB):
2068 errno_set = true; builtin_optab = logb_optab; break;
2069 CASE_FLT_FN (BUILT_IN_LOG):
2070 errno_set = true; builtin_optab = log_optab; break;
2071 CASE_FLT_FN (BUILT_IN_LOG10):
2072 errno_set = true; builtin_optab = log10_optab; break;
2073 CASE_FLT_FN (BUILT_IN_LOG2):
2074 errno_set = true; builtin_optab = log2_optab; break;
2075 CASE_FLT_FN (BUILT_IN_LOG1P):
2076 errno_set = true; builtin_optab = log1p_optab; break;
2077 CASE_FLT_FN (BUILT_IN_ASIN):
2078 builtin_optab = asin_optab; break;
2079 CASE_FLT_FN (BUILT_IN_ACOS):
2080 builtin_optab = acos_optab; break;
2081 CASE_FLT_FN (BUILT_IN_TAN):
2082 builtin_optab = tan_optab; break;
2083 CASE_FLT_FN (BUILT_IN_ATAN):
2084 builtin_optab = atan_optab; break;
2085 CASE_FLT_FN (BUILT_IN_FLOOR):
2086 builtin_optab = floor_optab; break;
2087 CASE_FLT_FN (BUILT_IN_CEIL):
2088 builtin_optab = ceil_optab; break;
2089 CASE_FLT_FN (BUILT_IN_TRUNC):
2090 builtin_optab = btrunc_optab; break;
2091 CASE_FLT_FN (BUILT_IN_ROUND):
2092 builtin_optab = round_optab; break;
2093 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2094 builtin_optab = nearbyint_optab;
2095 if (flag_trapping_math)
2096 break;
2097 /* Else fallthrough and expand as rint. */
2098 CASE_FLT_FN (BUILT_IN_RINT):
2099 builtin_optab = rint_optab; break;
2100 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2101 builtin_optab = significand_optab; break;
2102 default:
2103 gcc_unreachable ();
2104 }
2105
2106 /* Make a suitable register to place result in. */
2107 mode = TYPE_MODE (TREE_TYPE (exp));
2108
2109 if (! flag_errno_math || ! HONOR_NANS (mode))
2110 errno_set = false;
2111
2112 /* Before working hard, check whether the instruction is available, but try
2113 to widen the mode for specific operations. */
2114 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2115 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2116 && (!errno_set || !optimize_insn_for_size_p ()))
2117 {
2118 rtx result = gen_reg_rtx (mode);
2119
2120 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2121 need to expand the argument again. This way, we will not perform
2122 side-effects more the once. */
2123 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2124
2125 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2126
2127 start_sequence ();
2128
2129 /* Compute into RESULT.
2130 Set RESULT to wherever the result comes back. */
2131 result = expand_unop (mode, builtin_optab, op0, result, 0);
2132
2133 if (result != 0)
2134 {
2135 if (errno_set)
2136 expand_errno_check (exp, result);
2137
2138 /* Output the entire sequence. */
2139 insns = get_insns ();
2140 end_sequence ();
2141 emit_insn (insns);
2142 return result;
2143 }
2144
2145 /* If we were unable to expand via the builtin, stop the sequence
2146 (without outputting the insns) and call to the library function
2147 with the stabilized argument list. */
2148 end_sequence ();
2149 }
2150
2151 return expand_call (exp, target, target == const0_rtx);
2152 }
2153
2154 /* Expand a call to the builtin binary math functions (pow and atan2).
2155 Return NULL_RTX if a normal call should be emitted rather than expanding the
2156 function in-line. EXP is the expression that is a call to the builtin
2157 function; if convenient, the result should be placed in TARGET.
2158 SUBTARGET may be used as the target for computing one of EXP's
2159 operands. */
2160
2161 static rtx
2162 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2163 {
2164 optab builtin_optab;
2165 rtx op0, op1, result;
2166 rtx_insn *insns;
2167 int op1_type = REAL_TYPE;
2168 tree fndecl = get_callee_fndecl (exp);
2169 tree arg0, arg1;
2170 machine_mode mode;
2171 bool errno_set = true;
2172
2173 switch (DECL_FUNCTION_CODE (fndecl))
2174 {
2175 CASE_FLT_FN (BUILT_IN_SCALBN):
2176 CASE_FLT_FN (BUILT_IN_SCALBLN):
2177 CASE_FLT_FN (BUILT_IN_LDEXP):
2178 op1_type = INTEGER_TYPE;
2179 default:
2180 break;
2181 }
2182
2183 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2184 return NULL_RTX;
2185
2186 arg0 = CALL_EXPR_ARG (exp, 0);
2187 arg1 = CALL_EXPR_ARG (exp, 1);
2188
2189 switch (DECL_FUNCTION_CODE (fndecl))
2190 {
2191 CASE_FLT_FN (BUILT_IN_POW):
2192 builtin_optab = pow_optab; break;
2193 CASE_FLT_FN (BUILT_IN_ATAN2):
2194 builtin_optab = atan2_optab; break;
2195 CASE_FLT_FN (BUILT_IN_SCALB):
2196 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2197 return 0;
2198 builtin_optab = scalb_optab; break;
2199 CASE_FLT_FN (BUILT_IN_SCALBN):
2200 CASE_FLT_FN (BUILT_IN_SCALBLN):
2201 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2202 return 0;
2203 /* Fall through... */
2204 CASE_FLT_FN (BUILT_IN_LDEXP):
2205 builtin_optab = ldexp_optab; break;
2206 CASE_FLT_FN (BUILT_IN_FMOD):
2207 builtin_optab = fmod_optab; break;
2208 CASE_FLT_FN (BUILT_IN_REMAINDER):
2209 CASE_FLT_FN (BUILT_IN_DREM):
2210 builtin_optab = remainder_optab; break;
2211 default:
2212 gcc_unreachable ();
2213 }
2214
2215 /* Make a suitable register to place result in. */
2216 mode = TYPE_MODE (TREE_TYPE (exp));
2217
2218 /* Before working hard, check whether the instruction is available. */
2219 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2220 return NULL_RTX;
2221
2222 result = gen_reg_rtx (mode);
2223
2224 if (! flag_errno_math || ! HONOR_NANS (mode))
2225 errno_set = false;
2226
2227 if (errno_set && optimize_insn_for_size_p ())
2228 return 0;
2229
2230 /* Always stabilize the argument list. */
2231 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2232 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2233
2234 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2235 op1 = expand_normal (arg1);
2236
2237 start_sequence ();
2238
2239 /* Compute into RESULT.
2240 Set RESULT to wherever the result comes back. */
2241 result = expand_binop (mode, builtin_optab, op0, op1,
2242 result, 0, OPTAB_DIRECT);
2243
2244 /* If we were unable to expand via the builtin, stop the sequence
2245 (without outputting the insns) and call to the library function
2246 with the stabilized argument list. */
2247 if (result == 0)
2248 {
2249 end_sequence ();
2250 return expand_call (exp, target, target == const0_rtx);
2251 }
2252
2253 if (errno_set)
2254 expand_errno_check (exp, result);
2255
2256 /* Output the entire sequence. */
2257 insns = get_insns ();
2258 end_sequence ();
2259 emit_insn (insns);
2260
2261 return result;
2262 }
2263
2264 /* Expand a call to the builtin trinary math functions (fma).
2265 Return NULL_RTX if a normal call should be emitted rather than expanding the
2266 function in-line. EXP is the expression that is a call to the builtin
2267 function; if convenient, the result should be placed in TARGET.
2268 SUBTARGET may be used as the target for computing one of EXP's
2269 operands. */
2270
2271 static rtx
2272 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2273 {
2274 optab builtin_optab;
2275 rtx op0, op1, op2, result;
2276 rtx_insn *insns;
2277 tree fndecl = get_callee_fndecl (exp);
2278 tree arg0, arg1, arg2;
2279 machine_mode mode;
2280
2281 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2282 return NULL_RTX;
2283
2284 arg0 = CALL_EXPR_ARG (exp, 0);
2285 arg1 = CALL_EXPR_ARG (exp, 1);
2286 arg2 = CALL_EXPR_ARG (exp, 2);
2287
2288 switch (DECL_FUNCTION_CODE (fndecl))
2289 {
2290 CASE_FLT_FN (BUILT_IN_FMA):
2291 builtin_optab = fma_optab; break;
2292 default:
2293 gcc_unreachable ();
2294 }
2295
2296 /* Make a suitable register to place result in. */
2297 mode = TYPE_MODE (TREE_TYPE (exp));
2298
2299 /* Before working hard, check whether the instruction is available. */
2300 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2301 return NULL_RTX;
2302
2303 result = gen_reg_rtx (mode);
2304
2305 /* Always stabilize the argument list. */
2306 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2307 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2308 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2309
2310 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2311 op1 = expand_normal (arg1);
2312 op2 = expand_normal (arg2);
2313
2314 start_sequence ();
2315
2316 /* Compute into RESULT.
2317 Set RESULT to wherever the result comes back. */
2318 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2319 result, 0);
2320
2321 /* If we were unable to expand via the builtin, stop the sequence
2322 (without outputting the insns) and call to the library function
2323 with the stabilized argument list. */
2324 if (result == 0)
2325 {
2326 end_sequence ();
2327 return expand_call (exp, target, target == const0_rtx);
2328 }
2329
2330 /* Output the entire sequence. */
2331 insns = get_insns ();
2332 end_sequence ();
2333 emit_insn (insns);
2334
2335 return result;
2336 }
2337
2338 /* Expand a call to the builtin sin and cos math functions.
2339 Return NULL_RTX if a normal call should be emitted rather than expanding the
2340 function in-line. EXP is the expression that is a call to the builtin
2341 function; if convenient, the result should be placed in TARGET.
2342 SUBTARGET may be used as the target for computing one of EXP's
2343 operands. */
2344
2345 static rtx
2346 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2347 {
2348 optab builtin_optab;
2349 rtx op0;
2350 rtx_insn *insns;
2351 tree fndecl = get_callee_fndecl (exp);
2352 machine_mode mode;
2353 tree arg;
2354
2355 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2356 return NULL_RTX;
2357
2358 arg = CALL_EXPR_ARG (exp, 0);
2359
2360 switch (DECL_FUNCTION_CODE (fndecl))
2361 {
2362 CASE_FLT_FN (BUILT_IN_SIN):
2363 CASE_FLT_FN (BUILT_IN_COS):
2364 builtin_optab = sincos_optab; break;
2365 default:
2366 gcc_unreachable ();
2367 }
2368
2369 /* Make a suitable register to place result in. */
2370 mode = TYPE_MODE (TREE_TYPE (exp));
2371
2372 /* Check if sincos insn is available, otherwise fallback
2373 to sin or cos insn. */
2374 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2375 switch (DECL_FUNCTION_CODE (fndecl))
2376 {
2377 CASE_FLT_FN (BUILT_IN_SIN):
2378 builtin_optab = sin_optab; break;
2379 CASE_FLT_FN (BUILT_IN_COS):
2380 builtin_optab = cos_optab; break;
2381 default:
2382 gcc_unreachable ();
2383 }
2384
2385 /* Before working hard, check whether the instruction is available. */
2386 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2387 {
2388 rtx result = gen_reg_rtx (mode);
2389
2390 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2391 need to expand the argument again. This way, we will not perform
2392 side-effects more the once. */
2393 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2394
2395 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2396
2397 start_sequence ();
2398
2399 /* Compute into RESULT.
2400 Set RESULT to wherever the result comes back. */
2401 if (builtin_optab == sincos_optab)
2402 {
2403 int ok;
2404
2405 switch (DECL_FUNCTION_CODE (fndecl))
2406 {
2407 CASE_FLT_FN (BUILT_IN_SIN):
2408 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2409 break;
2410 CASE_FLT_FN (BUILT_IN_COS):
2411 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2412 break;
2413 default:
2414 gcc_unreachable ();
2415 }
2416 gcc_assert (ok);
2417 }
2418 else
2419 result = expand_unop (mode, builtin_optab, op0, result, 0);
2420
2421 if (result != 0)
2422 {
2423 /* Output the entire sequence. */
2424 insns = get_insns ();
2425 end_sequence ();
2426 emit_insn (insns);
2427 return result;
2428 }
2429
2430 /* If we were unable to expand via the builtin, stop the sequence
2431 (without outputting the insns) and call to the library function
2432 with the stabilized argument list. */
2433 end_sequence ();
2434 }
2435
2436 return expand_call (exp, target, target == const0_rtx);
2437 }
2438
2439 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2440 return an RTL instruction code that implements the functionality.
2441 If that isn't possible or available return CODE_FOR_nothing. */
2442
2443 static enum insn_code
2444 interclass_mathfn_icode (tree arg, tree fndecl)
2445 {
2446 bool errno_set = false;
2447 optab builtin_optab = unknown_optab;
2448 machine_mode mode;
2449
2450 switch (DECL_FUNCTION_CODE (fndecl))
2451 {
2452 CASE_FLT_FN (BUILT_IN_ILOGB):
2453 errno_set = true; builtin_optab = ilogb_optab; break;
2454 CASE_FLT_FN (BUILT_IN_ISINF):
2455 builtin_optab = isinf_optab; break;
2456 case BUILT_IN_ISNORMAL:
2457 case BUILT_IN_ISFINITE:
2458 CASE_FLT_FN (BUILT_IN_FINITE):
2459 case BUILT_IN_FINITED32:
2460 case BUILT_IN_FINITED64:
2461 case BUILT_IN_FINITED128:
2462 case BUILT_IN_ISINFD32:
2463 case BUILT_IN_ISINFD64:
2464 case BUILT_IN_ISINFD128:
2465 /* These builtins have no optabs (yet). */
2466 break;
2467 default:
2468 gcc_unreachable ();
2469 }
2470
2471 /* There's no easy way to detect the case we need to set EDOM. */
2472 if (flag_errno_math && errno_set)
2473 return CODE_FOR_nothing;
2474
2475 /* Optab mode depends on the mode of the input argument. */
2476 mode = TYPE_MODE (TREE_TYPE (arg));
2477
2478 if (builtin_optab)
2479 return optab_handler (builtin_optab, mode);
2480 return CODE_FOR_nothing;
2481 }
2482
2483 /* Expand a call to one of the builtin math functions that operate on
2484 floating point argument and output an integer result (ilogb, isinf,
2485 isnan, etc).
2486 Return 0 if a normal call should be emitted rather than expanding the
2487 function in-line. EXP is the expression that is a call to the builtin
2488 function; if convenient, the result should be placed in TARGET. */
2489
2490 static rtx
2491 expand_builtin_interclass_mathfn (tree exp, rtx target)
2492 {
2493 enum insn_code icode = CODE_FOR_nothing;
2494 rtx op0;
2495 tree fndecl = get_callee_fndecl (exp);
2496 machine_mode mode;
2497 tree arg;
2498
2499 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2500 return NULL_RTX;
2501
2502 arg = CALL_EXPR_ARG (exp, 0);
2503 icode = interclass_mathfn_icode (arg, fndecl);
2504 mode = TYPE_MODE (TREE_TYPE (arg));
2505
2506 if (icode != CODE_FOR_nothing)
2507 {
2508 struct expand_operand ops[1];
2509 rtx_insn *last = get_last_insn ();
2510 tree orig_arg = arg;
2511
2512 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2513 need to expand the argument again. This way, we will not perform
2514 side-effects more the once. */
2515 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2516
2517 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2518
2519 if (mode != GET_MODE (op0))
2520 op0 = convert_to_mode (mode, op0, 0);
2521
2522 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2523 if (maybe_legitimize_operands (icode, 0, 1, ops)
2524 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2525 return ops[0].value;
2526
2527 delete_insns_since (last);
2528 CALL_EXPR_ARG (exp, 0) = orig_arg;
2529 }
2530
2531 return NULL_RTX;
2532 }
2533
2534 /* Expand a call to the builtin sincos math function.
2535 Return NULL_RTX if a normal call should be emitted rather than expanding the
2536 function in-line. EXP is the expression that is a call to the builtin
2537 function. */
2538
2539 static rtx
2540 expand_builtin_sincos (tree exp)
2541 {
2542 rtx op0, op1, op2, target1, target2;
2543 machine_mode mode;
2544 tree arg, sinp, cosp;
2545 int result;
2546 location_t loc = EXPR_LOCATION (exp);
2547 tree alias_type, alias_off;
2548
2549 if (!validate_arglist (exp, REAL_TYPE,
2550 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2551 return NULL_RTX;
2552
2553 arg = CALL_EXPR_ARG (exp, 0);
2554 sinp = CALL_EXPR_ARG (exp, 1);
2555 cosp = CALL_EXPR_ARG (exp, 2);
2556
2557 /* Make a suitable register to place result in. */
2558 mode = TYPE_MODE (TREE_TYPE (arg));
2559
2560 /* Check if sincos insn is available, otherwise emit the call. */
2561 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2562 return NULL_RTX;
2563
2564 target1 = gen_reg_rtx (mode);
2565 target2 = gen_reg_rtx (mode);
2566
2567 op0 = expand_normal (arg);
2568 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2569 alias_off = build_int_cst (alias_type, 0);
2570 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2571 sinp, alias_off));
2572 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2573 cosp, alias_off));
2574
2575 /* Compute into target1 and target2.
2576 Set TARGET to wherever the result comes back. */
2577 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2578 gcc_assert (result);
2579
2580 /* Move target1 and target2 to the memory locations indicated
2581 by op1 and op2. */
2582 emit_move_insn (op1, target1);
2583 emit_move_insn (op2, target2);
2584
2585 return const0_rtx;
2586 }
2587
2588 /* Expand a call to the internal cexpi builtin to the sincos math function.
2589 EXP is the expression that is a call to the builtin function; if convenient,
2590 the result should be placed in TARGET. */
2591
2592 static rtx
2593 expand_builtin_cexpi (tree exp, rtx target)
2594 {
2595 tree fndecl = get_callee_fndecl (exp);
2596 tree arg, type;
2597 machine_mode mode;
2598 rtx op0, op1, op2;
2599 location_t loc = EXPR_LOCATION (exp);
2600
2601 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2602 return NULL_RTX;
2603
2604 arg = CALL_EXPR_ARG (exp, 0);
2605 type = TREE_TYPE (arg);
2606 mode = TYPE_MODE (TREE_TYPE (arg));
2607
2608 /* Try expanding via a sincos optab, fall back to emitting a libcall
2609 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2610 is only generated from sincos, cexp or if we have either of them. */
2611 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2612 {
2613 op1 = gen_reg_rtx (mode);
2614 op2 = gen_reg_rtx (mode);
2615
2616 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2617
2618 /* Compute into op1 and op2. */
2619 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2620 }
2621 else if (targetm.libc_has_function (function_sincos))
2622 {
2623 tree call, fn = NULL_TREE;
2624 tree top1, top2;
2625 rtx op1a, op2a;
2626
2627 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2628 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2629 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2630 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2631 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2632 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2633 else
2634 gcc_unreachable ();
2635
2636 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2637 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2638 op1a = copy_addr_to_reg (XEXP (op1, 0));
2639 op2a = copy_addr_to_reg (XEXP (op2, 0));
2640 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2641 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2642
2643 /* Make sure not to fold the sincos call again. */
2644 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2645 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2646 call, 3, arg, top1, top2));
2647 }
2648 else
2649 {
2650 tree call, fn = NULL_TREE, narg;
2651 tree ctype = build_complex_type (type);
2652
2653 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2654 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2655 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2656 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2657 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2658 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2659 else
2660 gcc_unreachable ();
2661
2662 /* If we don't have a decl for cexp create one. This is the
2663 friendliest fallback if the user calls __builtin_cexpi
2664 without full target C99 function support. */
2665 if (fn == NULL_TREE)
2666 {
2667 tree fntype;
2668 const char *name = NULL;
2669
2670 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2671 name = "cexpf";
2672 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2673 name = "cexp";
2674 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2675 name = "cexpl";
2676
2677 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2678 fn = build_fn_decl (name, fntype);
2679 }
2680
2681 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2682 build_real (type, dconst0), arg);
2683
2684 /* Make sure not to fold the cexp call again. */
2685 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2686 return expand_expr (build_call_nary (ctype, call, 1, narg),
2687 target, VOIDmode, EXPAND_NORMAL);
2688 }
2689
2690 /* Now build the proper return type. */
2691 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2692 make_tree (TREE_TYPE (arg), op2),
2693 make_tree (TREE_TYPE (arg), op1)),
2694 target, VOIDmode, EXPAND_NORMAL);
2695 }
2696
2697 /* Conveniently construct a function call expression. FNDECL names the
2698 function to be called, N is the number of arguments, and the "..."
2699 parameters are the argument expressions. Unlike build_call_exr
2700 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2701
2702 static tree
2703 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2704 {
2705 va_list ap;
2706 tree fntype = TREE_TYPE (fndecl);
2707 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2708
2709 va_start (ap, n);
2710 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2711 va_end (ap);
2712 SET_EXPR_LOCATION (fn, loc);
2713 return fn;
2714 }
2715
2716 /* Expand a call to one of the builtin rounding functions gcc defines
2717 as an extension (lfloor and lceil). As these are gcc extensions we
2718 do not need to worry about setting errno to EDOM.
2719 If expanding via optab fails, lower expression to (int)(floor(x)).
2720 EXP is the expression that is a call to the builtin function;
2721 if convenient, the result should be placed in TARGET. */
2722
2723 static rtx
2724 expand_builtin_int_roundingfn (tree exp, rtx target)
2725 {
2726 convert_optab builtin_optab;
2727 rtx op0, tmp;
2728 rtx_insn *insns;
2729 tree fndecl = get_callee_fndecl (exp);
2730 enum built_in_function fallback_fn;
2731 tree fallback_fndecl;
2732 machine_mode mode;
2733 tree arg;
2734
2735 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2736 gcc_unreachable ();
2737
2738 arg = CALL_EXPR_ARG (exp, 0);
2739
2740 switch (DECL_FUNCTION_CODE (fndecl))
2741 {
2742 CASE_FLT_FN (BUILT_IN_ICEIL):
2743 CASE_FLT_FN (BUILT_IN_LCEIL):
2744 CASE_FLT_FN (BUILT_IN_LLCEIL):
2745 builtin_optab = lceil_optab;
2746 fallback_fn = BUILT_IN_CEIL;
2747 break;
2748
2749 CASE_FLT_FN (BUILT_IN_IFLOOR):
2750 CASE_FLT_FN (BUILT_IN_LFLOOR):
2751 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2752 builtin_optab = lfloor_optab;
2753 fallback_fn = BUILT_IN_FLOOR;
2754 break;
2755
2756 default:
2757 gcc_unreachable ();
2758 }
2759
2760 /* Make a suitable register to place result in. */
2761 mode = TYPE_MODE (TREE_TYPE (exp));
2762
2763 target = gen_reg_rtx (mode);
2764
2765 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2766 need to expand the argument again. This way, we will not perform
2767 side-effects more the once. */
2768 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2769
2770 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2771
2772 start_sequence ();
2773
2774 /* Compute into TARGET. */
2775 if (expand_sfix_optab (target, op0, builtin_optab))
2776 {
2777 /* Output the entire sequence. */
2778 insns = get_insns ();
2779 end_sequence ();
2780 emit_insn (insns);
2781 return target;
2782 }
2783
2784 /* If we were unable to expand via the builtin, stop the sequence
2785 (without outputting the insns). */
2786 end_sequence ();
2787
2788 /* Fall back to floating point rounding optab. */
2789 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2790
2791 /* For non-C99 targets we may end up without a fallback fndecl here
2792 if the user called __builtin_lfloor directly. In this case emit
2793 a call to the floor/ceil variants nevertheless. This should result
2794 in the best user experience for not full C99 targets. */
2795 if (fallback_fndecl == NULL_TREE)
2796 {
2797 tree fntype;
2798 const char *name = NULL;
2799
2800 switch (DECL_FUNCTION_CODE (fndecl))
2801 {
2802 case BUILT_IN_ICEIL:
2803 case BUILT_IN_LCEIL:
2804 case BUILT_IN_LLCEIL:
2805 name = "ceil";
2806 break;
2807 case BUILT_IN_ICEILF:
2808 case BUILT_IN_LCEILF:
2809 case BUILT_IN_LLCEILF:
2810 name = "ceilf";
2811 break;
2812 case BUILT_IN_ICEILL:
2813 case BUILT_IN_LCEILL:
2814 case BUILT_IN_LLCEILL:
2815 name = "ceill";
2816 break;
2817 case BUILT_IN_IFLOOR:
2818 case BUILT_IN_LFLOOR:
2819 case BUILT_IN_LLFLOOR:
2820 name = "floor";
2821 break;
2822 case BUILT_IN_IFLOORF:
2823 case BUILT_IN_LFLOORF:
2824 case BUILT_IN_LLFLOORF:
2825 name = "floorf";
2826 break;
2827 case BUILT_IN_IFLOORL:
2828 case BUILT_IN_LFLOORL:
2829 case BUILT_IN_LLFLOORL:
2830 name = "floorl";
2831 break;
2832 default:
2833 gcc_unreachable ();
2834 }
2835
2836 fntype = build_function_type_list (TREE_TYPE (arg),
2837 TREE_TYPE (arg), NULL_TREE);
2838 fallback_fndecl = build_fn_decl (name, fntype);
2839 }
2840
2841 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2842
2843 tmp = expand_normal (exp);
2844 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2845
2846 /* Truncate the result of floating point optab to integer
2847 via expand_fix (). */
2848 target = gen_reg_rtx (mode);
2849 expand_fix (target, tmp, 0);
2850
2851 return target;
2852 }
2853
2854 /* Expand a call to one of the builtin math functions doing integer
2855 conversion (lrint).
2856 Return 0 if a normal call should be emitted rather than expanding the
2857 function in-line. EXP is the expression that is a call to the builtin
2858 function; if convenient, the result should be placed in TARGET. */
2859
2860 static rtx
2861 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2862 {
2863 convert_optab builtin_optab;
2864 rtx op0;
2865 rtx_insn *insns;
2866 tree fndecl = get_callee_fndecl (exp);
2867 tree arg;
2868 machine_mode mode;
2869 enum built_in_function fallback_fn = BUILT_IN_NONE;
2870
2871 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2872 gcc_unreachable ();
2873
2874 arg = CALL_EXPR_ARG (exp, 0);
2875
2876 switch (DECL_FUNCTION_CODE (fndecl))
2877 {
2878 CASE_FLT_FN (BUILT_IN_IRINT):
2879 fallback_fn = BUILT_IN_LRINT;
2880 /* FALLTHRU */
2881 CASE_FLT_FN (BUILT_IN_LRINT):
2882 CASE_FLT_FN (BUILT_IN_LLRINT):
2883 builtin_optab = lrint_optab;
2884 break;
2885
2886 CASE_FLT_FN (BUILT_IN_IROUND):
2887 fallback_fn = BUILT_IN_LROUND;
2888 /* FALLTHRU */
2889 CASE_FLT_FN (BUILT_IN_LROUND):
2890 CASE_FLT_FN (BUILT_IN_LLROUND):
2891 builtin_optab = lround_optab;
2892 break;
2893
2894 default:
2895 gcc_unreachable ();
2896 }
2897
2898 /* There's no easy way to detect the case we need to set EDOM. */
2899 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2900 return NULL_RTX;
2901
2902 /* Make a suitable register to place result in. */
2903 mode = TYPE_MODE (TREE_TYPE (exp));
2904
2905 /* There's no easy way to detect the case we need to set EDOM. */
2906 if (!flag_errno_math)
2907 {
2908 rtx result = gen_reg_rtx (mode);
2909
2910 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2911 need to expand the argument again. This way, we will not perform
2912 side-effects more the once. */
2913 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2914
2915 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2916
2917 start_sequence ();
2918
2919 if (expand_sfix_optab (result, op0, builtin_optab))
2920 {
2921 /* Output the entire sequence. */
2922 insns = get_insns ();
2923 end_sequence ();
2924 emit_insn (insns);
2925 return result;
2926 }
2927
2928 /* If we were unable to expand via the builtin, stop the sequence
2929 (without outputting the insns) and call to the library function
2930 with the stabilized argument list. */
2931 end_sequence ();
2932 }
2933
2934 if (fallback_fn != BUILT_IN_NONE)
2935 {
2936 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2937 targets, (int) round (x) should never be transformed into
2938 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2939 a call to lround in the hope that the target provides at least some
2940 C99 functions. This should result in the best user experience for
2941 not full C99 targets. */
2942 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2943 fallback_fn, 0);
2944
2945 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2946 fallback_fndecl, 1, arg);
2947
2948 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2949 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2950 return convert_to_mode (mode, target, 0);
2951 }
2952
2953 return expand_call (exp, target, target == const0_rtx);
2954 }
2955
2956 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2957 a normal call should be emitted rather than expanding the function
2958 in-line. EXP is the expression that is a call to the builtin
2959 function; if convenient, the result should be placed in TARGET. */
2960
2961 static rtx
2962 expand_builtin_powi (tree exp, rtx target)
2963 {
2964 tree arg0, arg1;
2965 rtx op0, op1;
2966 machine_mode mode;
2967 machine_mode mode2;
2968
2969 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2970 return NULL_RTX;
2971
2972 arg0 = CALL_EXPR_ARG (exp, 0);
2973 arg1 = CALL_EXPR_ARG (exp, 1);
2974 mode = TYPE_MODE (TREE_TYPE (exp));
2975
2976 /* Emit a libcall to libgcc. */
2977
2978 /* Mode of the 2nd argument must match that of an int. */
2979 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2980
2981 if (target == NULL_RTX)
2982 target = gen_reg_rtx (mode);
2983
2984 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2985 if (GET_MODE (op0) != mode)
2986 op0 = convert_to_mode (mode, op0, 0);
2987 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2988 if (GET_MODE (op1) != mode2)
2989 op1 = convert_to_mode (mode2, op1, 0);
2990
2991 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2992 target, LCT_CONST, mode, 2,
2993 op0, mode, op1, mode2);
2994
2995 return target;
2996 }
2997
2998 /* Expand expression EXP which is a call to the strlen builtin. Return
2999 NULL_RTX if we failed the caller should emit a normal call, otherwise
3000 try to get the result in TARGET, if convenient. */
3001
3002 static rtx
3003 expand_builtin_strlen (tree exp, rtx target,
3004 machine_mode target_mode)
3005 {
3006 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3007 return NULL_RTX;
3008 else
3009 {
3010 struct expand_operand ops[4];
3011 rtx pat;
3012 tree len;
3013 tree src = CALL_EXPR_ARG (exp, 0);
3014 rtx src_reg;
3015 rtx_insn *before_strlen;
3016 machine_mode insn_mode = target_mode;
3017 enum insn_code icode = CODE_FOR_nothing;
3018 unsigned int align;
3019
3020 /* If the length can be computed at compile-time, return it. */
3021 len = c_strlen (src, 0);
3022 if (len)
3023 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3024
3025 /* If the length can be computed at compile-time and is constant
3026 integer, but there are side-effects in src, evaluate
3027 src for side-effects, then return len.
3028 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3029 can be optimized into: i++; x = 3; */
3030 len = c_strlen (src, 1);
3031 if (len && TREE_CODE (len) == INTEGER_CST)
3032 {
3033 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3034 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3035 }
3036
3037 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3038
3039 /* If SRC is not a pointer type, don't do this operation inline. */
3040 if (align == 0)
3041 return NULL_RTX;
3042
3043 /* Bail out if we can't compute strlen in the right mode. */
3044 while (insn_mode != VOIDmode)
3045 {
3046 icode = optab_handler (strlen_optab, insn_mode);
3047 if (icode != CODE_FOR_nothing)
3048 break;
3049
3050 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3051 }
3052 if (insn_mode == VOIDmode)
3053 return NULL_RTX;
3054
3055 /* Make a place to hold the source address. We will not expand
3056 the actual source until we are sure that the expansion will
3057 not fail -- there are trees that cannot be expanded twice. */
3058 src_reg = gen_reg_rtx (Pmode);
3059
3060 /* Mark the beginning of the strlen sequence so we can emit the
3061 source operand later. */
3062 before_strlen = get_last_insn ();
3063
3064 create_output_operand (&ops[0], target, insn_mode);
3065 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3066 create_integer_operand (&ops[2], 0);
3067 create_integer_operand (&ops[3], align);
3068 if (!maybe_expand_insn (icode, 4, ops))
3069 return NULL_RTX;
3070
3071 /* Now that we are assured of success, expand the source. */
3072 start_sequence ();
3073 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3074 if (pat != src_reg)
3075 {
3076 #ifdef POINTERS_EXTEND_UNSIGNED
3077 if (GET_MODE (pat) != Pmode)
3078 pat = convert_to_mode (Pmode, pat,
3079 POINTERS_EXTEND_UNSIGNED);
3080 #endif
3081 emit_move_insn (src_reg, pat);
3082 }
3083 pat = get_insns ();
3084 end_sequence ();
3085
3086 if (before_strlen)
3087 emit_insn_after (pat, before_strlen);
3088 else
3089 emit_insn_before (pat, get_insns ());
3090
3091 /* Return the value in the proper mode for this function. */
3092 if (GET_MODE (ops[0].value) == target_mode)
3093 target = ops[0].value;
3094 else if (target != 0)
3095 convert_move (target, ops[0].value, 0);
3096 else
3097 target = convert_to_mode (target_mode, ops[0].value, 0);
3098
3099 return target;
3100 }
3101 }
3102
3103 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3104 bytes from constant string DATA + OFFSET and return it as target
3105 constant. */
3106
3107 static rtx
3108 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3109 machine_mode mode)
3110 {
3111 const char *str = (const char *) data;
3112
3113 gcc_assert (offset >= 0
3114 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3115 <= strlen (str) + 1));
3116
3117 return c_readstr (str + offset, mode);
3118 }
3119
3120 /* LEN specify length of the block of memcpy/memset operation.
3121 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3122 In some cases we can make very likely guess on max size, then we
3123 set it into PROBABLE_MAX_SIZE. */
3124
3125 static void
3126 determine_block_size (tree len, rtx len_rtx,
3127 unsigned HOST_WIDE_INT *min_size,
3128 unsigned HOST_WIDE_INT *max_size,
3129 unsigned HOST_WIDE_INT *probable_max_size)
3130 {
3131 if (CONST_INT_P (len_rtx))
3132 {
3133 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3134 return;
3135 }
3136 else
3137 {
3138 wide_int min, max;
3139 enum value_range_type range_type = VR_UNDEFINED;
3140
3141 /* Determine bounds from the type. */
3142 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3143 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3144 else
3145 *min_size = 0;
3146 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3147 *probable_max_size = *max_size
3148 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3149 else
3150 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3151
3152 if (TREE_CODE (len) == SSA_NAME)
3153 range_type = get_range_info (len, &min, &max);
3154 if (range_type == VR_RANGE)
3155 {
3156 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3157 *min_size = min.to_uhwi ();
3158 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3159 *probable_max_size = *max_size = max.to_uhwi ();
3160 }
3161 else if (range_type == VR_ANTI_RANGE)
3162 {
3163 /* Anti range 0...N lets us to determine minimal size to N+1. */
3164 if (min == 0)
3165 {
3166 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3167 *min_size = max.to_uhwi () + 1;
3168 }
3169 /* Code like
3170
3171 int n;
3172 if (n < 100)
3173 memcpy (a, b, n)
3174
3175 Produce anti range allowing negative values of N. We still
3176 can use the information and make a guess that N is not negative.
3177 */
3178 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3179 *probable_max_size = min.to_uhwi () - 1;
3180 }
3181 }
3182 gcc_checking_assert (*max_size <=
3183 (unsigned HOST_WIDE_INT)
3184 GET_MODE_MASK (GET_MODE (len_rtx)));
3185 }
3186
3187 /* Helper function to do the actual work for expand_builtin_memcpy. */
3188
3189 static rtx
3190 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3191 {
3192 const char *src_str;
3193 unsigned int src_align = get_pointer_alignment (src);
3194 unsigned int dest_align = get_pointer_alignment (dest);
3195 rtx dest_mem, src_mem, dest_addr, len_rtx;
3196 HOST_WIDE_INT expected_size = -1;
3197 unsigned int expected_align = 0;
3198 unsigned HOST_WIDE_INT min_size;
3199 unsigned HOST_WIDE_INT max_size;
3200 unsigned HOST_WIDE_INT probable_max_size;
3201
3202 /* If DEST is not a pointer type, call the normal function. */
3203 if (dest_align == 0)
3204 return NULL_RTX;
3205
3206 /* If either SRC is not a pointer type, don't do this
3207 operation in-line. */
3208 if (src_align == 0)
3209 return NULL_RTX;
3210
3211 if (currently_expanding_gimple_stmt)
3212 stringop_block_profile (currently_expanding_gimple_stmt,
3213 &expected_align, &expected_size);
3214
3215 if (expected_align < dest_align)
3216 expected_align = dest_align;
3217 dest_mem = get_memory_rtx (dest, len);
3218 set_mem_align (dest_mem, dest_align);
3219 len_rtx = expand_normal (len);
3220 determine_block_size (len, len_rtx, &min_size, &max_size,
3221 &probable_max_size);
3222 src_str = c_getstr (src);
3223
3224 /* If SRC is a string constant and block move would be done
3225 by pieces, we can avoid loading the string from memory
3226 and only stored the computed constants. */
3227 if (src_str
3228 && CONST_INT_P (len_rtx)
3229 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3230 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3231 CONST_CAST (char *, src_str),
3232 dest_align, false))
3233 {
3234 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3235 builtin_memcpy_read_str,
3236 CONST_CAST (char *, src_str),
3237 dest_align, false, 0);
3238 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3239 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3240 return dest_mem;
3241 }
3242
3243 src_mem = get_memory_rtx (src, len);
3244 set_mem_align (src_mem, src_align);
3245
3246 /* Copy word part most expediently. */
3247 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3248 CALL_EXPR_TAILCALL (exp)
3249 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3250 expected_align, expected_size,
3251 min_size, max_size, probable_max_size);
3252
3253 if (dest_addr == 0)
3254 {
3255 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3256 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3257 }
3258
3259 return dest_addr;
3260 }
3261
3262 /* Expand a call EXP to the memcpy builtin.
3263 Return NULL_RTX if we failed, the caller should emit a normal call,
3264 otherwise try to get the result in TARGET, if convenient (and in
3265 mode MODE if that's convenient). */
3266
3267 static rtx
3268 expand_builtin_memcpy (tree exp, rtx target)
3269 {
3270 if (!validate_arglist (exp,
3271 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3272 return NULL_RTX;
3273 else
3274 {
3275 tree dest = CALL_EXPR_ARG (exp, 0);
3276 tree src = CALL_EXPR_ARG (exp, 1);
3277 tree len = CALL_EXPR_ARG (exp, 2);
3278 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3279 }
3280 }
3281
3282 /* Expand an instrumented call EXP to the memcpy builtin.
3283 Return NULL_RTX if we failed, the caller should emit a normal call,
3284 otherwise try to get the result in TARGET, if convenient (and in
3285 mode MODE if that's convenient). */
3286
3287 static rtx
3288 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3289 {
3290 if (!validate_arglist (exp,
3291 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3292 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3293 INTEGER_TYPE, VOID_TYPE))
3294 return NULL_RTX;
3295 else
3296 {
3297 tree dest = CALL_EXPR_ARG (exp, 0);
3298 tree src = CALL_EXPR_ARG (exp, 2);
3299 tree len = CALL_EXPR_ARG (exp, 4);
3300 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3301
3302 /* Return src bounds with the result. */
3303 if (res)
3304 {
3305 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3306 expand_normal (CALL_EXPR_ARG (exp, 1)));
3307 res = chkp_join_splitted_slot (res, bnd);
3308 }
3309 return res;
3310 }
3311 }
3312
3313 /* Expand a call EXP to the mempcpy builtin.
3314 Return NULL_RTX if we failed; the caller should emit a normal call,
3315 otherwise try to get the result in TARGET, if convenient (and in
3316 mode MODE if that's convenient). If ENDP is 0 return the
3317 destination pointer, if ENDP is 1 return the end pointer ala
3318 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3319 stpcpy. */
3320
3321 static rtx
3322 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3323 {
3324 if (!validate_arglist (exp,
3325 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3326 return NULL_RTX;
3327 else
3328 {
3329 tree dest = CALL_EXPR_ARG (exp, 0);
3330 tree src = CALL_EXPR_ARG (exp, 1);
3331 tree len = CALL_EXPR_ARG (exp, 2);
3332 return expand_builtin_mempcpy_args (dest, src, len,
3333 target, mode, /*endp=*/ 1,
3334 exp);
3335 }
3336 }
3337
3338 /* Expand an instrumented call EXP to the mempcpy builtin.
3339 Return NULL_RTX if we failed, the caller should emit a normal call,
3340 otherwise try to get the result in TARGET, if convenient (and in
3341 mode MODE if that's convenient). */
3342
3343 static rtx
3344 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3345 {
3346 if (!validate_arglist (exp,
3347 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3348 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3349 INTEGER_TYPE, VOID_TYPE))
3350 return NULL_RTX;
3351 else
3352 {
3353 tree dest = CALL_EXPR_ARG (exp, 0);
3354 tree src = CALL_EXPR_ARG (exp, 2);
3355 tree len = CALL_EXPR_ARG (exp, 4);
3356 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3357 mode, 1, exp);
3358
3359 /* Return src bounds with the result. */
3360 if (res)
3361 {
3362 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3363 expand_normal (CALL_EXPR_ARG (exp, 1)));
3364 res = chkp_join_splitted_slot (res, bnd);
3365 }
3366 return res;
3367 }
3368 }
3369
3370 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3371 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3372 so that this can also be called without constructing an actual CALL_EXPR.
3373 The other arguments and return value are the same as for
3374 expand_builtin_mempcpy. */
3375
3376 static rtx
3377 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3378 rtx target, machine_mode mode, int endp,
3379 tree orig_exp)
3380 {
3381 tree fndecl = get_callee_fndecl (orig_exp);
3382
3383 /* If return value is ignored, transform mempcpy into memcpy. */
3384 if (target == const0_rtx
3385 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3386 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3387 {
3388 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3389 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3390 dest, src, len);
3391 return expand_expr (result, target, mode, EXPAND_NORMAL);
3392 }
3393 else if (target == const0_rtx
3394 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3395 {
3396 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3397 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3398 dest, src, len);
3399 return expand_expr (result, target, mode, EXPAND_NORMAL);
3400 }
3401 else
3402 {
3403 const char *src_str;
3404 unsigned int src_align = get_pointer_alignment (src);
3405 unsigned int dest_align = get_pointer_alignment (dest);
3406 rtx dest_mem, src_mem, len_rtx;
3407
3408 /* If either SRC or DEST is not a pointer type, don't do this
3409 operation in-line. */
3410 if (dest_align == 0 || src_align == 0)
3411 return NULL_RTX;
3412
3413 /* If LEN is not constant, call the normal function. */
3414 if (! tree_fits_uhwi_p (len))
3415 return NULL_RTX;
3416
3417 len_rtx = expand_normal (len);
3418 src_str = c_getstr (src);
3419
3420 /* If SRC is a string constant and block move would be done
3421 by pieces, we can avoid loading the string from memory
3422 and only stored the computed constants. */
3423 if (src_str
3424 && CONST_INT_P (len_rtx)
3425 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3426 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3427 CONST_CAST (char *, src_str),
3428 dest_align, false))
3429 {
3430 dest_mem = get_memory_rtx (dest, len);
3431 set_mem_align (dest_mem, dest_align);
3432 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3433 builtin_memcpy_read_str,
3434 CONST_CAST (char *, src_str),
3435 dest_align, false, endp);
3436 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3437 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3438 return dest_mem;
3439 }
3440
3441 if (CONST_INT_P (len_rtx)
3442 && can_move_by_pieces (INTVAL (len_rtx),
3443 MIN (dest_align, src_align)))
3444 {
3445 dest_mem = get_memory_rtx (dest, len);
3446 set_mem_align (dest_mem, dest_align);
3447 src_mem = get_memory_rtx (src, len);
3448 set_mem_align (src_mem, src_align);
3449 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3450 MIN (dest_align, src_align), endp);
3451 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3452 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3453 return dest_mem;
3454 }
3455
3456 return NULL_RTX;
3457 }
3458 }
3459
3460 #ifndef HAVE_movstr
3461 # define HAVE_movstr 0
3462 # define CODE_FOR_movstr CODE_FOR_nothing
3463 #endif
3464
3465 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3466 we failed, the caller should emit a normal call, otherwise try to
3467 get the result in TARGET, if convenient. If ENDP is 0 return the
3468 destination pointer, if ENDP is 1 return the end pointer ala
3469 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3470 stpcpy. */
3471
3472 static rtx
3473 expand_movstr (tree dest, tree src, rtx target, int endp)
3474 {
3475 struct expand_operand ops[3];
3476 rtx dest_mem;
3477 rtx src_mem;
3478
3479 if (!HAVE_movstr)
3480 return NULL_RTX;
3481
3482 dest_mem = get_memory_rtx (dest, NULL);
3483 src_mem = get_memory_rtx (src, NULL);
3484 if (!endp)
3485 {
3486 target = force_reg (Pmode, XEXP (dest_mem, 0));
3487 dest_mem = replace_equiv_address (dest_mem, target);
3488 }
3489
3490 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3491 create_fixed_operand (&ops[1], dest_mem);
3492 create_fixed_operand (&ops[2], src_mem);
3493 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3494 return NULL_RTX;
3495
3496 if (endp && target != const0_rtx)
3497 {
3498 target = ops[0].value;
3499 /* movstr is supposed to set end to the address of the NUL
3500 terminator. If the caller requested a mempcpy-like return value,
3501 adjust it. */
3502 if (endp == 1)
3503 {
3504 rtx tem = plus_constant (GET_MODE (target),
3505 gen_lowpart (GET_MODE (target), target), 1);
3506 emit_move_insn (target, force_operand (tem, NULL_RTX));
3507 }
3508 }
3509 return target;
3510 }
3511
3512 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3513 NULL_RTX if we failed the caller should emit a normal call, otherwise
3514 try to get the result in TARGET, if convenient (and in mode MODE if that's
3515 convenient). */
3516
3517 static rtx
3518 expand_builtin_strcpy (tree exp, rtx target)
3519 {
3520 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3521 {
3522 tree dest = CALL_EXPR_ARG (exp, 0);
3523 tree src = CALL_EXPR_ARG (exp, 1);
3524 return expand_builtin_strcpy_args (dest, src, target);
3525 }
3526 return NULL_RTX;
3527 }
3528
3529 /* Helper function to do the actual work for expand_builtin_strcpy. The
3530 arguments to the builtin_strcpy call DEST and SRC are broken out
3531 so that this can also be called without constructing an actual CALL_EXPR.
3532 The other arguments and return value are the same as for
3533 expand_builtin_strcpy. */
3534
3535 static rtx
3536 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3537 {
3538 return expand_movstr (dest, src, target, /*endp=*/0);
3539 }
3540
3541 /* Expand a call EXP to the stpcpy builtin.
3542 Return NULL_RTX if we failed the caller should emit a normal call,
3543 otherwise try to get the result in TARGET, if convenient (and in
3544 mode MODE if that's convenient). */
3545
3546 static rtx
3547 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3548 {
3549 tree dst, src;
3550 location_t loc = EXPR_LOCATION (exp);
3551
3552 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3553 return NULL_RTX;
3554
3555 dst = CALL_EXPR_ARG (exp, 0);
3556 src = CALL_EXPR_ARG (exp, 1);
3557
3558 /* If return value is ignored, transform stpcpy into strcpy. */
3559 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3560 {
3561 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3562 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3563 return expand_expr (result, target, mode, EXPAND_NORMAL);
3564 }
3565 else
3566 {
3567 tree len, lenp1;
3568 rtx ret;
3569
3570 /* Ensure we get an actual string whose length can be evaluated at
3571 compile-time, not an expression containing a string. This is
3572 because the latter will potentially produce pessimized code
3573 when used to produce the return value. */
3574 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3575 return expand_movstr (dst, src, target, /*endp=*/2);
3576
3577 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3578 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3579 target, mode, /*endp=*/2,
3580 exp);
3581
3582 if (ret)
3583 return ret;
3584
3585 if (TREE_CODE (len) == INTEGER_CST)
3586 {
3587 rtx len_rtx = expand_normal (len);
3588
3589 if (CONST_INT_P (len_rtx))
3590 {
3591 ret = expand_builtin_strcpy_args (dst, src, target);
3592
3593 if (ret)
3594 {
3595 if (! target)
3596 {
3597 if (mode != VOIDmode)
3598 target = gen_reg_rtx (mode);
3599 else
3600 target = gen_reg_rtx (GET_MODE (ret));
3601 }
3602 if (GET_MODE (target) != GET_MODE (ret))
3603 ret = gen_lowpart (GET_MODE (target), ret);
3604
3605 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3606 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3607 gcc_assert (ret);
3608
3609 return target;
3610 }
3611 }
3612 }
3613
3614 return expand_movstr (dst, src, target, /*endp=*/2);
3615 }
3616 }
3617
3618 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3619 bytes from constant string DATA + OFFSET and return it as target
3620 constant. */
3621
3622 rtx
3623 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3624 machine_mode mode)
3625 {
3626 const char *str = (const char *) data;
3627
3628 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3629 return const0_rtx;
3630
3631 return c_readstr (str + offset, mode);
3632 }
3633
3634 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3635 NULL_RTX if we failed the caller should emit a normal call. */
3636
3637 static rtx
3638 expand_builtin_strncpy (tree exp, rtx target)
3639 {
3640 location_t loc = EXPR_LOCATION (exp);
3641
3642 if (validate_arglist (exp,
3643 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3644 {
3645 tree dest = CALL_EXPR_ARG (exp, 0);
3646 tree src = CALL_EXPR_ARG (exp, 1);
3647 tree len = CALL_EXPR_ARG (exp, 2);
3648 tree slen = c_strlen (src, 1);
3649
3650 /* We must be passed a constant len and src parameter. */
3651 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3652 return NULL_RTX;
3653
3654 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3655
3656 /* We're required to pad with trailing zeros if the requested
3657 len is greater than strlen(s2)+1. In that case try to
3658 use store_by_pieces, if it fails, punt. */
3659 if (tree_int_cst_lt (slen, len))
3660 {
3661 unsigned int dest_align = get_pointer_alignment (dest);
3662 const char *p = c_getstr (src);
3663 rtx dest_mem;
3664
3665 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3666 || !can_store_by_pieces (tree_to_uhwi (len),
3667 builtin_strncpy_read_str,
3668 CONST_CAST (char *, p),
3669 dest_align, false))
3670 return NULL_RTX;
3671
3672 dest_mem = get_memory_rtx (dest, len);
3673 store_by_pieces (dest_mem, tree_to_uhwi (len),
3674 builtin_strncpy_read_str,
3675 CONST_CAST (char *, p), dest_align, false, 0);
3676 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3677 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3678 return dest_mem;
3679 }
3680 }
3681 return NULL_RTX;
3682 }
3683
3684 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3685 bytes from constant string DATA + OFFSET and return it as target
3686 constant. */
3687
3688 rtx
3689 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3690 machine_mode mode)
3691 {
3692 const char *c = (const char *) data;
3693 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3694
3695 memset (p, *c, GET_MODE_SIZE (mode));
3696
3697 return c_readstr (p, mode);
3698 }
3699
3700 /* Callback routine for store_by_pieces. Return the RTL of a register
3701 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3702 char value given in the RTL register data. For example, if mode is
3703 4 bytes wide, return the RTL for 0x01010101*data. */
3704
3705 static rtx
3706 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3707 machine_mode mode)
3708 {
3709 rtx target, coeff;
3710 size_t size;
3711 char *p;
3712
3713 size = GET_MODE_SIZE (mode);
3714 if (size == 1)
3715 return (rtx) data;
3716
3717 p = XALLOCAVEC (char, size);
3718 memset (p, 1, size);
3719 coeff = c_readstr (p, mode);
3720
3721 target = convert_to_mode (mode, (rtx) data, 1);
3722 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3723 return force_reg (mode, target);
3724 }
3725
3726 /* Expand expression EXP, which is a call to the memset builtin. Return
3727 NULL_RTX if we failed the caller should emit a normal call, otherwise
3728 try to get the result in TARGET, if convenient (and in mode MODE if that's
3729 convenient). */
3730
3731 static rtx
3732 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3733 {
3734 if (!validate_arglist (exp,
3735 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3736 return NULL_RTX;
3737 else
3738 {
3739 tree dest = CALL_EXPR_ARG (exp, 0);
3740 tree val = CALL_EXPR_ARG (exp, 1);
3741 tree len = CALL_EXPR_ARG (exp, 2);
3742 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3743 }
3744 }
3745
3746 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3747 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3748 try to get the result in TARGET, if convenient (and in mode MODE if that's
3749 convenient). */
3750
3751 static rtx
3752 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3753 {
3754 if (!validate_arglist (exp,
3755 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3756 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3757 return NULL_RTX;
3758 else
3759 {
3760 tree dest = CALL_EXPR_ARG (exp, 0);
3761 tree val = CALL_EXPR_ARG (exp, 2);
3762 tree len = CALL_EXPR_ARG (exp, 3);
3763 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3764
3765 /* Return src bounds with the result. */
3766 if (res)
3767 {
3768 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3769 expand_normal (CALL_EXPR_ARG (exp, 1)));
3770 res = chkp_join_splitted_slot (res, bnd);
3771 }
3772 return res;
3773 }
3774 }
3775
3776 /* Helper function to do the actual work for expand_builtin_memset. The
3777 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3778 so that this can also be called without constructing an actual CALL_EXPR.
3779 The other arguments and return value are the same as for
3780 expand_builtin_memset. */
3781
3782 static rtx
3783 expand_builtin_memset_args (tree dest, tree val, tree len,
3784 rtx target, machine_mode mode, tree orig_exp)
3785 {
3786 tree fndecl, fn;
3787 enum built_in_function fcode;
3788 machine_mode val_mode;
3789 char c;
3790 unsigned int dest_align;
3791 rtx dest_mem, dest_addr, len_rtx;
3792 HOST_WIDE_INT expected_size = -1;
3793 unsigned int expected_align = 0;
3794 unsigned HOST_WIDE_INT min_size;
3795 unsigned HOST_WIDE_INT max_size;
3796 unsigned HOST_WIDE_INT probable_max_size;
3797
3798 dest_align = get_pointer_alignment (dest);
3799
3800 /* If DEST is not a pointer type, don't do this operation in-line. */
3801 if (dest_align == 0)
3802 return NULL_RTX;
3803
3804 if (currently_expanding_gimple_stmt)
3805 stringop_block_profile (currently_expanding_gimple_stmt,
3806 &expected_align, &expected_size);
3807
3808 if (expected_align < dest_align)
3809 expected_align = dest_align;
3810
3811 /* If the LEN parameter is zero, return DEST. */
3812 if (integer_zerop (len))
3813 {
3814 /* Evaluate and ignore VAL in case it has side-effects. */
3815 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3816 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3817 }
3818
3819 /* Stabilize the arguments in case we fail. */
3820 dest = builtin_save_expr (dest);
3821 val = builtin_save_expr (val);
3822 len = builtin_save_expr (len);
3823
3824 len_rtx = expand_normal (len);
3825 determine_block_size (len, len_rtx, &min_size, &max_size,
3826 &probable_max_size);
3827 dest_mem = get_memory_rtx (dest, len);
3828 val_mode = TYPE_MODE (unsigned_char_type_node);
3829
3830 if (TREE_CODE (val) != INTEGER_CST)
3831 {
3832 rtx val_rtx;
3833
3834 val_rtx = expand_normal (val);
3835 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3836
3837 /* Assume that we can memset by pieces if we can store
3838 * the coefficients by pieces (in the required modes).
3839 * We can't pass builtin_memset_gen_str as that emits RTL. */
3840 c = 1;
3841 if (tree_fits_uhwi_p (len)
3842 && can_store_by_pieces (tree_to_uhwi (len),
3843 builtin_memset_read_str, &c, dest_align,
3844 true))
3845 {
3846 val_rtx = force_reg (val_mode, val_rtx);
3847 store_by_pieces (dest_mem, tree_to_uhwi (len),
3848 builtin_memset_gen_str, val_rtx, dest_align,
3849 true, 0);
3850 }
3851 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3852 dest_align, expected_align,
3853 expected_size, min_size, max_size,
3854 probable_max_size))
3855 goto do_libcall;
3856
3857 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3858 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3859 return dest_mem;
3860 }
3861
3862 if (target_char_cast (val, &c))
3863 goto do_libcall;
3864
3865 if (c)
3866 {
3867 if (tree_fits_uhwi_p (len)
3868 && can_store_by_pieces (tree_to_uhwi (len),
3869 builtin_memset_read_str, &c, dest_align,
3870 true))
3871 store_by_pieces (dest_mem, tree_to_uhwi (len),
3872 builtin_memset_read_str, &c, dest_align, true, 0);
3873 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3874 gen_int_mode (c, val_mode),
3875 dest_align, expected_align,
3876 expected_size, min_size, max_size,
3877 probable_max_size))
3878 goto do_libcall;
3879
3880 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3881 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3882 return dest_mem;
3883 }
3884
3885 set_mem_align (dest_mem, dest_align);
3886 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3887 CALL_EXPR_TAILCALL (orig_exp)
3888 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3889 expected_align, expected_size,
3890 min_size, max_size,
3891 probable_max_size);
3892
3893 if (dest_addr == 0)
3894 {
3895 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3896 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3897 }
3898
3899 return dest_addr;
3900
3901 do_libcall:
3902 fndecl = get_callee_fndecl (orig_exp);
3903 fcode = DECL_FUNCTION_CODE (fndecl);
3904 if (fcode == BUILT_IN_MEMSET
3905 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3906 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3907 dest, val, len);
3908 else if (fcode == BUILT_IN_BZERO)
3909 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3910 dest, len);
3911 else
3912 gcc_unreachable ();
3913 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3914 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3915 return expand_call (fn, target, target == const0_rtx);
3916 }
3917
3918 /* Expand expression EXP, which is a call to the bzero builtin. Return
3919 NULL_RTX if we failed the caller should emit a normal call. */
3920
3921 static rtx
3922 expand_builtin_bzero (tree exp)
3923 {
3924 tree dest, size;
3925 location_t loc = EXPR_LOCATION (exp);
3926
3927 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3928 return NULL_RTX;
3929
3930 dest = CALL_EXPR_ARG (exp, 0);
3931 size = CALL_EXPR_ARG (exp, 1);
3932
3933 /* New argument list transforming bzero(ptr x, int y) to
3934 memset(ptr x, int 0, size_t y). This is done this way
3935 so that if it isn't expanded inline, we fallback to
3936 calling bzero instead of memset. */
3937
3938 return expand_builtin_memset_args (dest, integer_zero_node,
3939 fold_convert_loc (loc,
3940 size_type_node, size),
3941 const0_rtx, VOIDmode, exp);
3942 }
3943
3944 /* Expand expression EXP, which is a call to the memcmp built-in function.
3945 Return NULL_RTX if we failed and the caller should emit a normal call,
3946 otherwise try to get the result in TARGET, if convenient (and in mode
3947 MODE, if that's convenient). */
3948
3949 static rtx
3950 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3951 ATTRIBUTE_UNUSED machine_mode mode)
3952 {
3953 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3954
3955 if (!validate_arglist (exp,
3956 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3957 return NULL_RTX;
3958
3959 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3960 implementing memcmp because it will stop if it encounters two
3961 zero bytes. */
3962 #if defined HAVE_cmpmemsi
3963 {
3964 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3965 rtx result;
3966 rtx insn;
3967 tree arg1 = CALL_EXPR_ARG (exp, 0);
3968 tree arg2 = CALL_EXPR_ARG (exp, 1);
3969 tree len = CALL_EXPR_ARG (exp, 2);
3970
3971 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3972 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3973 machine_mode insn_mode;
3974
3975 if (HAVE_cmpmemsi)
3976 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3977 else
3978 return NULL_RTX;
3979
3980 /* If we don't have POINTER_TYPE, call the function. */
3981 if (arg1_align == 0 || arg2_align == 0)
3982 return NULL_RTX;
3983
3984 /* Make a place to write the result of the instruction. */
3985 result = target;
3986 if (! (result != 0
3987 && REG_P (result) && GET_MODE (result) == insn_mode
3988 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3989 result = gen_reg_rtx (insn_mode);
3990
3991 arg1_rtx = get_memory_rtx (arg1, len);
3992 arg2_rtx = get_memory_rtx (arg2, len);
3993 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3994
3995 /* Set MEM_SIZE as appropriate. */
3996 if (CONST_INT_P (arg3_rtx))
3997 {
3998 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3999 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
4000 }
4001
4002 if (HAVE_cmpmemsi)
4003 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4004 GEN_INT (MIN (arg1_align, arg2_align)));
4005 else
4006 gcc_unreachable ();
4007
4008 if (insn)
4009 emit_insn (insn);
4010 else
4011 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4012 TYPE_MODE (integer_type_node), 3,
4013 XEXP (arg1_rtx, 0), Pmode,
4014 XEXP (arg2_rtx, 0), Pmode,
4015 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4016 TYPE_UNSIGNED (sizetype)),
4017 TYPE_MODE (sizetype));
4018
4019 /* Return the value in the proper mode for this function. */
4020 mode = TYPE_MODE (TREE_TYPE (exp));
4021 if (GET_MODE (result) == mode)
4022 return result;
4023 else if (target != 0)
4024 {
4025 convert_move (target, result, 0);
4026 return target;
4027 }
4028 else
4029 return convert_to_mode (mode, result, 0);
4030 }
4031 #endif /* HAVE_cmpmemsi. */
4032
4033 return NULL_RTX;
4034 }
4035
4036 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4037 if we failed the caller should emit a normal call, otherwise try to get
4038 the result in TARGET, if convenient. */
4039
4040 static rtx
4041 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4042 {
4043 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4044 return NULL_RTX;
4045
4046 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4047 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
4048 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
4049 {
4050 rtx arg1_rtx, arg2_rtx;
4051 rtx result, insn = NULL_RTX;
4052 tree fndecl, fn;
4053 tree arg1 = CALL_EXPR_ARG (exp, 0);
4054 tree arg2 = CALL_EXPR_ARG (exp, 1);
4055
4056 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4057 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4058
4059 /* If we don't have POINTER_TYPE, call the function. */
4060 if (arg1_align == 0 || arg2_align == 0)
4061 return NULL_RTX;
4062
4063 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4064 arg1 = builtin_save_expr (arg1);
4065 arg2 = builtin_save_expr (arg2);
4066
4067 arg1_rtx = get_memory_rtx (arg1, NULL);
4068 arg2_rtx = get_memory_rtx (arg2, NULL);
4069
4070 #ifdef HAVE_cmpstrsi
4071 /* Try to call cmpstrsi. */
4072 if (HAVE_cmpstrsi)
4073 {
4074 machine_mode insn_mode
4075 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4076
4077 /* Make a place to write the result of the instruction. */
4078 result = target;
4079 if (! (result != 0
4080 && REG_P (result) && GET_MODE (result) == insn_mode
4081 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4082 result = gen_reg_rtx (insn_mode);
4083
4084 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4085 GEN_INT (MIN (arg1_align, arg2_align)));
4086 }
4087 #endif
4088 #ifdef HAVE_cmpstrnsi
4089 /* Try to determine at least one length and call cmpstrnsi. */
4090 if (!insn && HAVE_cmpstrnsi)
4091 {
4092 tree len;
4093 rtx arg3_rtx;
4094
4095 machine_mode insn_mode
4096 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4097 tree len1 = c_strlen (arg1, 1);
4098 tree len2 = c_strlen (arg2, 1);
4099
4100 if (len1)
4101 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4102 if (len2)
4103 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4104
4105 /* If we don't have a constant length for the first, use the length
4106 of the second, if we know it. We don't require a constant for
4107 this case; some cost analysis could be done if both are available
4108 but neither is constant. For now, assume they're equally cheap,
4109 unless one has side effects. If both strings have constant lengths,
4110 use the smaller. */
4111
4112 if (!len1)
4113 len = len2;
4114 else if (!len2)
4115 len = len1;
4116 else if (TREE_SIDE_EFFECTS (len1))
4117 len = len2;
4118 else if (TREE_SIDE_EFFECTS (len2))
4119 len = len1;
4120 else if (TREE_CODE (len1) != INTEGER_CST)
4121 len = len2;
4122 else if (TREE_CODE (len2) != INTEGER_CST)
4123 len = len1;
4124 else if (tree_int_cst_lt (len1, len2))
4125 len = len1;
4126 else
4127 len = len2;
4128
4129 /* If both arguments have side effects, we cannot optimize. */
4130 if (!len || TREE_SIDE_EFFECTS (len))
4131 goto do_libcall;
4132
4133 arg3_rtx = expand_normal (len);
4134
4135 /* Make a place to write the result of the instruction. */
4136 result = target;
4137 if (! (result != 0
4138 && REG_P (result) && GET_MODE (result) == insn_mode
4139 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4140 result = gen_reg_rtx (insn_mode);
4141
4142 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4143 GEN_INT (MIN (arg1_align, arg2_align)));
4144 }
4145 #endif
4146
4147 if (insn)
4148 {
4149 machine_mode mode;
4150 emit_insn (insn);
4151
4152 /* Return the value in the proper mode for this function. */
4153 mode = TYPE_MODE (TREE_TYPE (exp));
4154 if (GET_MODE (result) == mode)
4155 return result;
4156 if (target == 0)
4157 return convert_to_mode (mode, result, 0);
4158 convert_move (target, result, 0);
4159 return target;
4160 }
4161
4162 /* Expand the library call ourselves using a stabilized argument
4163 list to avoid re-evaluating the function's arguments twice. */
4164 #ifdef HAVE_cmpstrnsi
4165 do_libcall:
4166 #endif
4167 fndecl = get_callee_fndecl (exp);
4168 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4169 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4170 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4171 return expand_call (fn, target, target == const0_rtx);
4172 }
4173 #endif
4174 return NULL_RTX;
4175 }
4176
4177 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4178 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4179 the result in TARGET, if convenient. */
4180
4181 static rtx
4182 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4183 ATTRIBUTE_UNUSED machine_mode mode)
4184 {
4185 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4186
4187 if (!validate_arglist (exp,
4188 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4189 return NULL_RTX;
4190
4191 /* If c_strlen can determine an expression for one of the string
4192 lengths, and it doesn't have side effects, then emit cmpstrnsi
4193 using length MIN(strlen(string)+1, arg3). */
4194 #ifdef HAVE_cmpstrnsi
4195 if (HAVE_cmpstrnsi)
4196 {
4197 tree len, len1, len2;
4198 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4199 rtx result, insn;
4200 tree fndecl, fn;
4201 tree arg1 = CALL_EXPR_ARG (exp, 0);
4202 tree arg2 = CALL_EXPR_ARG (exp, 1);
4203 tree arg3 = CALL_EXPR_ARG (exp, 2);
4204
4205 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4206 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4207 machine_mode insn_mode
4208 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4209
4210 len1 = c_strlen (arg1, 1);
4211 len2 = c_strlen (arg2, 1);
4212
4213 if (len1)
4214 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4215 if (len2)
4216 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4217
4218 /* If we don't have a constant length for the first, use the length
4219 of the second, if we know it. We don't require a constant for
4220 this case; some cost analysis could be done if both are available
4221 but neither is constant. For now, assume they're equally cheap,
4222 unless one has side effects. If both strings have constant lengths,
4223 use the smaller. */
4224
4225 if (!len1)
4226 len = len2;
4227 else if (!len2)
4228 len = len1;
4229 else if (TREE_SIDE_EFFECTS (len1))
4230 len = len2;
4231 else if (TREE_SIDE_EFFECTS (len2))
4232 len = len1;
4233 else if (TREE_CODE (len1) != INTEGER_CST)
4234 len = len2;
4235 else if (TREE_CODE (len2) != INTEGER_CST)
4236 len = len1;
4237 else if (tree_int_cst_lt (len1, len2))
4238 len = len1;
4239 else
4240 len = len2;
4241
4242 /* If both arguments have side effects, we cannot optimize. */
4243 if (!len || TREE_SIDE_EFFECTS (len))
4244 return NULL_RTX;
4245
4246 /* The actual new length parameter is MIN(len,arg3). */
4247 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4248 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4249
4250 /* If we don't have POINTER_TYPE, call the function. */
4251 if (arg1_align == 0 || arg2_align == 0)
4252 return NULL_RTX;
4253
4254 /* Make a place to write the result of the instruction. */
4255 result = target;
4256 if (! (result != 0
4257 && REG_P (result) && GET_MODE (result) == insn_mode
4258 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4259 result = gen_reg_rtx (insn_mode);
4260
4261 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4262 arg1 = builtin_save_expr (arg1);
4263 arg2 = builtin_save_expr (arg2);
4264 len = builtin_save_expr (len);
4265
4266 arg1_rtx = get_memory_rtx (arg1, len);
4267 arg2_rtx = get_memory_rtx (arg2, len);
4268 arg3_rtx = expand_normal (len);
4269 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4270 GEN_INT (MIN (arg1_align, arg2_align)));
4271 if (insn)
4272 {
4273 emit_insn (insn);
4274
4275 /* Return the value in the proper mode for this function. */
4276 mode = TYPE_MODE (TREE_TYPE (exp));
4277 if (GET_MODE (result) == mode)
4278 return result;
4279 if (target == 0)
4280 return convert_to_mode (mode, result, 0);
4281 convert_move (target, result, 0);
4282 return target;
4283 }
4284
4285 /* Expand the library call ourselves using a stabilized argument
4286 list to avoid re-evaluating the function's arguments twice. */
4287 fndecl = get_callee_fndecl (exp);
4288 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4289 arg1, arg2, len);
4290 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4291 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4292 return expand_call (fn, target, target == const0_rtx);
4293 }
4294 #endif
4295 return NULL_RTX;
4296 }
4297
4298 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4299 if that's convenient. */
4300
4301 rtx
4302 expand_builtin_saveregs (void)
4303 {
4304 rtx val;
4305 rtx_insn *seq;
4306
4307 /* Don't do __builtin_saveregs more than once in a function.
4308 Save the result of the first call and reuse it. */
4309 if (saveregs_value != 0)
4310 return saveregs_value;
4311
4312 /* When this function is called, it means that registers must be
4313 saved on entry to this function. So we migrate the call to the
4314 first insn of this function. */
4315
4316 start_sequence ();
4317
4318 /* Do whatever the machine needs done in this case. */
4319 val = targetm.calls.expand_builtin_saveregs ();
4320
4321 seq = get_insns ();
4322 end_sequence ();
4323
4324 saveregs_value = val;
4325
4326 /* Put the insns after the NOTE that starts the function. If this
4327 is inside a start_sequence, make the outer-level insn chain current, so
4328 the code is placed at the start of the function. */
4329 push_topmost_sequence ();
4330 emit_insn_after (seq, entry_of_function ());
4331 pop_topmost_sequence ();
4332
4333 return val;
4334 }
4335
4336 /* Expand a call to __builtin_next_arg. */
4337
4338 static rtx
4339 expand_builtin_next_arg (void)
4340 {
4341 /* Checking arguments is already done in fold_builtin_next_arg
4342 that must be called before this function. */
4343 return expand_binop (ptr_mode, add_optab,
4344 crtl->args.internal_arg_pointer,
4345 crtl->args.arg_offset_rtx,
4346 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4347 }
4348
4349 /* Make it easier for the backends by protecting the valist argument
4350 from multiple evaluations. */
4351
4352 static tree
4353 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4354 {
4355 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4356
4357 /* The current way of determining the type of valist is completely
4358 bogus. We should have the information on the va builtin instead. */
4359 if (!vatype)
4360 vatype = targetm.fn_abi_va_list (cfun->decl);
4361
4362 if (TREE_CODE (vatype) == ARRAY_TYPE)
4363 {
4364 if (TREE_SIDE_EFFECTS (valist))
4365 valist = save_expr (valist);
4366
4367 /* For this case, the backends will be expecting a pointer to
4368 vatype, but it's possible we've actually been given an array
4369 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4370 So fix it. */
4371 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4372 {
4373 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4374 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4375 }
4376 }
4377 else
4378 {
4379 tree pt = build_pointer_type (vatype);
4380
4381 if (! needs_lvalue)
4382 {
4383 if (! TREE_SIDE_EFFECTS (valist))
4384 return valist;
4385
4386 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4387 TREE_SIDE_EFFECTS (valist) = 1;
4388 }
4389
4390 if (TREE_SIDE_EFFECTS (valist))
4391 valist = save_expr (valist);
4392 valist = fold_build2_loc (loc, MEM_REF,
4393 vatype, valist, build_int_cst (pt, 0));
4394 }
4395
4396 return valist;
4397 }
4398
4399 /* The "standard" definition of va_list is void*. */
4400
4401 tree
4402 std_build_builtin_va_list (void)
4403 {
4404 return ptr_type_node;
4405 }
4406
4407 /* The "standard" abi va_list is va_list_type_node. */
4408
4409 tree
4410 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4411 {
4412 return va_list_type_node;
4413 }
4414
4415 /* The "standard" type of va_list is va_list_type_node. */
4416
4417 tree
4418 std_canonical_va_list_type (tree type)
4419 {
4420 tree wtype, htype;
4421
4422 if (INDIRECT_REF_P (type))
4423 type = TREE_TYPE (type);
4424 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4425 type = TREE_TYPE (type);
4426 wtype = va_list_type_node;
4427 htype = type;
4428 /* Treat structure va_list types. */
4429 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4430 htype = TREE_TYPE (htype);
4431 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4432 {
4433 /* If va_list is an array type, the argument may have decayed
4434 to a pointer type, e.g. by being passed to another function.
4435 In that case, unwrap both types so that we can compare the
4436 underlying records. */
4437 if (TREE_CODE (htype) == ARRAY_TYPE
4438 || POINTER_TYPE_P (htype))
4439 {
4440 wtype = TREE_TYPE (wtype);
4441 htype = TREE_TYPE (htype);
4442 }
4443 }
4444 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4445 return va_list_type_node;
4446
4447 return NULL_TREE;
4448 }
4449
4450 /* The "standard" implementation of va_start: just assign `nextarg' to
4451 the variable. */
4452
4453 void
4454 std_expand_builtin_va_start (tree valist, rtx nextarg)
4455 {
4456 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4457 convert_move (va_r, nextarg, 0);
4458
4459 /* We do not have any valid bounds for the pointer, so
4460 just store zero bounds for it. */
4461 if (chkp_function_instrumented_p (current_function_decl))
4462 chkp_expand_bounds_reset_for_mem (valist,
4463 make_tree (TREE_TYPE (valist),
4464 nextarg));
4465 }
4466
4467 /* Expand EXP, a call to __builtin_va_start. */
4468
4469 static rtx
4470 expand_builtin_va_start (tree exp)
4471 {
4472 rtx nextarg;
4473 tree valist;
4474 location_t loc = EXPR_LOCATION (exp);
4475
4476 if (call_expr_nargs (exp) < 2)
4477 {
4478 error_at (loc, "too few arguments to function %<va_start%>");
4479 return const0_rtx;
4480 }
4481
4482 if (fold_builtin_next_arg (exp, true))
4483 return const0_rtx;
4484
4485 nextarg = expand_builtin_next_arg ();
4486 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4487
4488 if (targetm.expand_builtin_va_start)
4489 targetm.expand_builtin_va_start (valist, nextarg);
4490 else
4491 std_expand_builtin_va_start (valist, nextarg);
4492
4493 return const0_rtx;
4494 }
4495
4496 /* Expand EXP, a call to __builtin_va_end. */
4497
4498 static rtx
4499 expand_builtin_va_end (tree exp)
4500 {
4501 tree valist = CALL_EXPR_ARG (exp, 0);
4502
4503 /* Evaluate for side effects, if needed. I hate macros that don't
4504 do that. */
4505 if (TREE_SIDE_EFFECTS (valist))
4506 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4507
4508 return const0_rtx;
4509 }
4510
4511 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4512 builtin rather than just as an assignment in stdarg.h because of the
4513 nastiness of array-type va_list types. */
4514
4515 static rtx
4516 expand_builtin_va_copy (tree exp)
4517 {
4518 tree dst, src, t;
4519 location_t loc = EXPR_LOCATION (exp);
4520
4521 dst = CALL_EXPR_ARG (exp, 0);
4522 src = CALL_EXPR_ARG (exp, 1);
4523
4524 dst = stabilize_va_list_loc (loc, dst, 1);
4525 src = stabilize_va_list_loc (loc, src, 0);
4526
4527 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4528
4529 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4530 {
4531 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4532 TREE_SIDE_EFFECTS (t) = 1;
4533 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4534 }
4535 else
4536 {
4537 rtx dstb, srcb, size;
4538
4539 /* Evaluate to pointers. */
4540 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4541 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4542 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4543 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4544
4545 dstb = convert_memory_address (Pmode, dstb);
4546 srcb = convert_memory_address (Pmode, srcb);
4547
4548 /* "Dereference" to BLKmode memories. */
4549 dstb = gen_rtx_MEM (BLKmode, dstb);
4550 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4551 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4552 srcb = gen_rtx_MEM (BLKmode, srcb);
4553 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4554 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4555
4556 /* Copy. */
4557 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4558 }
4559
4560 return const0_rtx;
4561 }
4562
4563 /* Expand a call to one of the builtin functions __builtin_frame_address or
4564 __builtin_return_address. */
4565
4566 static rtx
4567 expand_builtin_frame_address (tree fndecl, tree exp)
4568 {
4569 /* The argument must be a nonnegative integer constant.
4570 It counts the number of frames to scan up the stack.
4571 The value is the return address saved in that frame. */
4572 if (call_expr_nargs (exp) == 0)
4573 /* Warning about missing arg was already issued. */
4574 return const0_rtx;
4575 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4576 {
4577 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4578 error ("invalid argument to %<__builtin_frame_address%>");
4579 else
4580 error ("invalid argument to %<__builtin_return_address%>");
4581 return const0_rtx;
4582 }
4583 else
4584 {
4585 rtx tem
4586 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4587 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4588
4589 /* Some ports cannot access arbitrary stack frames. */
4590 if (tem == NULL)
4591 {
4592 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4593 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4594 else
4595 warning (0, "unsupported argument to %<__builtin_return_address%>");
4596 return const0_rtx;
4597 }
4598
4599 /* For __builtin_frame_address, return what we've got. */
4600 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4601 return tem;
4602
4603 if (!REG_P (tem)
4604 && ! CONSTANT_P (tem))
4605 tem = copy_addr_to_reg (tem);
4606 return tem;
4607 }
4608 }
4609
4610 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4611 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4612 is the same as for allocate_dynamic_stack_space. */
4613
4614 static rtx
4615 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4616 {
4617 rtx op0;
4618 rtx result;
4619 bool valid_arglist;
4620 unsigned int align;
4621 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4622 == BUILT_IN_ALLOCA_WITH_ALIGN);
4623
4624 valid_arglist
4625 = (alloca_with_align
4626 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4627 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4628
4629 if (!valid_arglist)
4630 return NULL_RTX;
4631
4632 /* Compute the argument. */
4633 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4634
4635 /* Compute the alignment. */
4636 align = (alloca_with_align
4637 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4638 : BIGGEST_ALIGNMENT);
4639
4640 /* Allocate the desired space. */
4641 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4642 result = convert_memory_address (ptr_mode, result);
4643
4644 return result;
4645 }
4646
4647 /* Expand a call to bswap builtin in EXP.
4648 Return NULL_RTX if a normal call should be emitted rather than expanding the
4649 function in-line. If convenient, the result should be placed in TARGET.
4650 SUBTARGET may be used as the target for computing one of EXP's operands. */
4651
4652 static rtx
4653 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4654 rtx subtarget)
4655 {
4656 tree arg;
4657 rtx op0;
4658
4659 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4660 return NULL_RTX;
4661
4662 arg = CALL_EXPR_ARG (exp, 0);
4663 op0 = expand_expr (arg,
4664 subtarget && GET_MODE (subtarget) == target_mode
4665 ? subtarget : NULL_RTX,
4666 target_mode, EXPAND_NORMAL);
4667 if (GET_MODE (op0) != target_mode)
4668 op0 = convert_to_mode (target_mode, op0, 1);
4669
4670 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4671
4672 gcc_assert (target);
4673
4674 return convert_to_mode (target_mode, target, 1);
4675 }
4676
4677 /* Expand a call to a unary builtin in EXP.
4678 Return NULL_RTX if a normal call should be emitted rather than expanding the
4679 function in-line. If convenient, the result should be placed in TARGET.
4680 SUBTARGET may be used as the target for computing one of EXP's operands. */
4681
4682 static rtx
4683 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4684 rtx subtarget, optab op_optab)
4685 {
4686 rtx op0;
4687
4688 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4689 return NULL_RTX;
4690
4691 /* Compute the argument. */
4692 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4693 (subtarget
4694 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4695 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4696 VOIDmode, EXPAND_NORMAL);
4697 /* Compute op, into TARGET if possible.
4698 Set TARGET to wherever the result comes back. */
4699 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4700 op_optab, op0, target, op_optab != clrsb_optab);
4701 gcc_assert (target);
4702
4703 return convert_to_mode (target_mode, target, 0);
4704 }
4705
4706 /* Expand a call to __builtin_expect. We just return our argument
4707 as the builtin_expect semantic should've been already executed by
4708 tree branch prediction pass. */
4709
4710 static rtx
4711 expand_builtin_expect (tree exp, rtx target)
4712 {
4713 tree arg;
4714
4715 if (call_expr_nargs (exp) < 2)
4716 return const0_rtx;
4717 arg = CALL_EXPR_ARG (exp, 0);
4718
4719 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4720 /* When guessing was done, the hints should be already stripped away. */
4721 gcc_assert (!flag_guess_branch_prob
4722 || optimize == 0 || seen_error ());
4723 return target;
4724 }
4725
4726 /* Expand a call to __builtin_assume_aligned. We just return our first
4727 argument as the builtin_assume_aligned semantic should've been already
4728 executed by CCP. */
4729
4730 static rtx
4731 expand_builtin_assume_aligned (tree exp, rtx target)
4732 {
4733 if (call_expr_nargs (exp) < 2)
4734 return const0_rtx;
4735 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4736 EXPAND_NORMAL);
4737 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4738 && (call_expr_nargs (exp) < 3
4739 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4740 return target;
4741 }
4742
4743 void
4744 expand_builtin_trap (void)
4745 {
4746 #ifdef HAVE_trap
4747 if (HAVE_trap)
4748 {
4749 rtx insn = emit_insn (gen_trap ());
4750 /* For trap insns when not accumulating outgoing args force
4751 REG_ARGS_SIZE note to prevent crossjumping of calls with
4752 different args sizes. */
4753 if (!ACCUMULATE_OUTGOING_ARGS)
4754 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4755 }
4756 else
4757 #endif
4758 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4759 emit_barrier ();
4760 }
4761
4762 /* Expand a call to __builtin_unreachable. We do nothing except emit
4763 a barrier saying that control flow will not pass here.
4764
4765 It is the responsibility of the program being compiled to ensure
4766 that control flow does never reach __builtin_unreachable. */
4767 static void
4768 expand_builtin_unreachable (void)
4769 {
4770 emit_barrier ();
4771 }
4772
4773 /* Expand EXP, a call to fabs, fabsf or fabsl.
4774 Return NULL_RTX if a normal call should be emitted rather than expanding
4775 the function inline. If convenient, the result should be placed
4776 in TARGET. SUBTARGET may be used as the target for computing
4777 the operand. */
4778
4779 static rtx
4780 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4781 {
4782 machine_mode mode;
4783 tree arg;
4784 rtx op0;
4785
4786 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4787 return NULL_RTX;
4788
4789 arg = CALL_EXPR_ARG (exp, 0);
4790 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4791 mode = TYPE_MODE (TREE_TYPE (arg));
4792 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4793 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4794 }
4795
4796 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4797 Return NULL is a normal call should be emitted rather than expanding the
4798 function inline. If convenient, the result should be placed in TARGET.
4799 SUBTARGET may be used as the target for computing the operand. */
4800
4801 static rtx
4802 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4803 {
4804 rtx op0, op1;
4805 tree arg;
4806
4807 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4808 return NULL_RTX;
4809
4810 arg = CALL_EXPR_ARG (exp, 0);
4811 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4812
4813 arg = CALL_EXPR_ARG (exp, 1);
4814 op1 = expand_normal (arg);
4815
4816 return expand_copysign (op0, op1, target);
4817 }
4818
4819 /* Expand a call to __builtin___clear_cache. */
4820
4821 static rtx
4822 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4823 {
4824 #ifndef HAVE_clear_cache
4825 #ifdef CLEAR_INSN_CACHE
4826 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4827 does something. Just do the default expansion to a call to
4828 __clear_cache(). */
4829 return NULL_RTX;
4830 #else
4831 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4832 does nothing. There is no need to call it. Do nothing. */
4833 return const0_rtx;
4834 #endif /* CLEAR_INSN_CACHE */
4835 #else
4836 /* We have a "clear_cache" insn, and it will handle everything. */
4837 tree begin, end;
4838 rtx begin_rtx, end_rtx;
4839
4840 /* We must not expand to a library call. If we did, any
4841 fallback library function in libgcc that might contain a call to
4842 __builtin___clear_cache() would recurse infinitely. */
4843 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4844 {
4845 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4846 return const0_rtx;
4847 }
4848
4849 if (HAVE_clear_cache)
4850 {
4851 struct expand_operand ops[2];
4852
4853 begin = CALL_EXPR_ARG (exp, 0);
4854 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4855
4856 end = CALL_EXPR_ARG (exp, 1);
4857 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4858
4859 create_address_operand (&ops[0], begin_rtx);
4860 create_address_operand (&ops[1], end_rtx);
4861 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4862 return const0_rtx;
4863 }
4864 return const0_rtx;
4865 #endif /* HAVE_clear_cache */
4866 }
4867
4868 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4869
4870 static rtx
4871 round_trampoline_addr (rtx tramp)
4872 {
4873 rtx temp, addend, mask;
4874
4875 /* If we don't need too much alignment, we'll have been guaranteed
4876 proper alignment by get_trampoline_type. */
4877 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4878 return tramp;
4879
4880 /* Round address up to desired boundary. */
4881 temp = gen_reg_rtx (Pmode);
4882 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4883 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4884
4885 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4886 temp, 0, OPTAB_LIB_WIDEN);
4887 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4888 temp, 0, OPTAB_LIB_WIDEN);
4889
4890 return tramp;
4891 }
4892
4893 static rtx
4894 expand_builtin_init_trampoline (tree exp, bool onstack)
4895 {
4896 tree t_tramp, t_func, t_chain;
4897 rtx m_tramp, r_tramp, r_chain, tmp;
4898
4899 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4900 POINTER_TYPE, VOID_TYPE))
4901 return NULL_RTX;
4902
4903 t_tramp = CALL_EXPR_ARG (exp, 0);
4904 t_func = CALL_EXPR_ARG (exp, 1);
4905 t_chain = CALL_EXPR_ARG (exp, 2);
4906
4907 r_tramp = expand_normal (t_tramp);
4908 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4909 MEM_NOTRAP_P (m_tramp) = 1;
4910
4911 /* If ONSTACK, the TRAMP argument should be the address of a field
4912 within the local function's FRAME decl. Either way, let's see if
4913 we can fill in the MEM_ATTRs for this memory. */
4914 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4915 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4916
4917 /* Creator of a heap trampoline is responsible for making sure the
4918 address is aligned to at least STACK_BOUNDARY. Normally malloc
4919 will ensure this anyhow. */
4920 tmp = round_trampoline_addr (r_tramp);
4921 if (tmp != r_tramp)
4922 {
4923 m_tramp = change_address (m_tramp, BLKmode, tmp);
4924 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4925 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4926 }
4927
4928 /* The FUNC argument should be the address of the nested function.
4929 Extract the actual function decl to pass to the hook. */
4930 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4931 t_func = TREE_OPERAND (t_func, 0);
4932 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4933
4934 r_chain = expand_normal (t_chain);
4935
4936 /* Generate insns to initialize the trampoline. */
4937 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4938
4939 if (onstack)
4940 {
4941 trampolines_created = 1;
4942
4943 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4944 "trampoline generated for nested function %qD", t_func);
4945 }
4946
4947 return const0_rtx;
4948 }
4949
4950 static rtx
4951 expand_builtin_adjust_trampoline (tree exp)
4952 {
4953 rtx tramp;
4954
4955 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4956 return NULL_RTX;
4957
4958 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4959 tramp = round_trampoline_addr (tramp);
4960 if (targetm.calls.trampoline_adjust_address)
4961 tramp = targetm.calls.trampoline_adjust_address (tramp);
4962
4963 return tramp;
4964 }
4965
4966 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4967 function. The function first checks whether the back end provides
4968 an insn to implement signbit for the respective mode. If not, it
4969 checks whether the floating point format of the value is such that
4970 the sign bit can be extracted. If that is not the case, the
4971 function returns NULL_RTX to indicate that a normal call should be
4972 emitted rather than expanding the function in-line. EXP is the
4973 expression that is a call to the builtin function; if convenient,
4974 the result should be placed in TARGET. */
4975 static rtx
4976 expand_builtin_signbit (tree exp, rtx target)
4977 {
4978 const struct real_format *fmt;
4979 machine_mode fmode, imode, rmode;
4980 tree arg;
4981 int word, bitpos;
4982 enum insn_code icode;
4983 rtx temp;
4984 location_t loc = EXPR_LOCATION (exp);
4985
4986 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4987 return NULL_RTX;
4988
4989 arg = CALL_EXPR_ARG (exp, 0);
4990 fmode = TYPE_MODE (TREE_TYPE (arg));
4991 rmode = TYPE_MODE (TREE_TYPE (exp));
4992 fmt = REAL_MODE_FORMAT (fmode);
4993
4994 arg = builtin_save_expr (arg);
4995
4996 /* Expand the argument yielding a RTX expression. */
4997 temp = expand_normal (arg);
4998
4999 /* Check if the back end provides an insn that handles signbit for the
5000 argument's mode. */
5001 icode = optab_handler (signbit_optab, fmode);
5002 if (icode != CODE_FOR_nothing)
5003 {
5004 rtx_insn *last = get_last_insn ();
5005 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5006 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5007 return target;
5008 delete_insns_since (last);
5009 }
5010
5011 /* For floating point formats without a sign bit, implement signbit
5012 as "ARG < 0.0". */
5013 bitpos = fmt->signbit_ro;
5014 if (bitpos < 0)
5015 {
5016 /* But we can't do this if the format supports signed zero. */
5017 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5018 return NULL_RTX;
5019
5020 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5021 build_real (TREE_TYPE (arg), dconst0));
5022 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5023 }
5024
5025 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5026 {
5027 imode = int_mode_for_mode (fmode);
5028 if (imode == BLKmode)
5029 return NULL_RTX;
5030 temp = gen_lowpart (imode, temp);
5031 }
5032 else
5033 {
5034 imode = word_mode;
5035 /* Handle targets with different FP word orders. */
5036 if (FLOAT_WORDS_BIG_ENDIAN)
5037 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5038 else
5039 word = bitpos / BITS_PER_WORD;
5040 temp = operand_subword_force (temp, word, fmode);
5041 bitpos = bitpos % BITS_PER_WORD;
5042 }
5043
5044 /* Force the intermediate word_mode (or narrower) result into a
5045 register. This avoids attempting to create paradoxical SUBREGs
5046 of floating point modes below. */
5047 temp = force_reg (imode, temp);
5048
5049 /* If the bitpos is within the "result mode" lowpart, the operation
5050 can be implement with a single bitwise AND. Otherwise, we need
5051 a right shift and an AND. */
5052
5053 if (bitpos < GET_MODE_BITSIZE (rmode))
5054 {
5055 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5056
5057 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5058 temp = gen_lowpart (rmode, temp);
5059 temp = expand_binop (rmode, and_optab, temp,
5060 immed_wide_int_const (mask, rmode),
5061 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5062 }
5063 else
5064 {
5065 /* Perform a logical right shift to place the signbit in the least
5066 significant bit, then truncate the result to the desired mode
5067 and mask just this bit. */
5068 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5069 temp = gen_lowpart (rmode, temp);
5070 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5071 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5072 }
5073
5074 return temp;
5075 }
5076
5077 /* Expand fork or exec calls. TARGET is the desired target of the
5078 call. EXP is the call. FN is the
5079 identificator of the actual function. IGNORE is nonzero if the
5080 value is to be ignored. */
5081
5082 static rtx
5083 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5084 {
5085 tree id, decl;
5086 tree call;
5087
5088 /* If we are not profiling, just call the function. */
5089 if (!profile_arc_flag)
5090 return NULL_RTX;
5091
5092 /* Otherwise call the wrapper. This should be equivalent for the rest of
5093 compiler, so the code does not diverge, and the wrapper may run the
5094 code necessary for keeping the profiling sane. */
5095
5096 switch (DECL_FUNCTION_CODE (fn))
5097 {
5098 case BUILT_IN_FORK:
5099 id = get_identifier ("__gcov_fork");
5100 break;
5101
5102 case BUILT_IN_EXECL:
5103 id = get_identifier ("__gcov_execl");
5104 break;
5105
5106 case BUILT_IN_EXECV:
5107 id = get_identifier ("__gcov_execv");
5108 break;
5109
5110 case BUILT_IN_EXECLP:
5111 id = get_identifier ("__gcov_execlp");
5112 break;
5113
5114 case BUILT_IN_EXECLE:
5115 id = get_identifier ("__gcov_execle");
5116 break;
5117
5118 case BUILT_IN_EXECVP:
5119 id = get_identifier ("__gcov_execvp");
5120 break;
5121
5122 case BUILT_IN_EXECVE:
5123 id = get_identifier ("__gcov_execve");
5124 break;
5125
5126 default:
5127 gcc_unreachable ();
5128 }
5129
5130 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5131 FUNCTION_DECL, id, TREE_TYPE (fn));
5132 DECL_EXTERNAL (decl) = 1;
5133 TREE_PUBLIC (decl) = 1;
5134 DECL_ARTIFICIAL (decl) = 1;
5135 TREE_NOTHROW (decl) = 1;
5136 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5137 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5138 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5139 return expand_call (call, target, ignore);
5140 }
5141
5142
5143 \f
5144 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5145 the pointer in these functions is void*, the tree optimizers may remove
5146 casts. The mode computed in expand_builtin isn't reliable either, due
5147 to __sync_bool_compare_and_swap.
5148
5149 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5150 group of builtins. This gives us log2 of the mode size. */
5151
5152 static inline machine_mode
5153 get_builtin_sync_mode (int fcode_diff)
5154 {
5155 /* The size is not negotiable, so ask not to get BLKmode in return
5156 if the target indicates that a smaller size would be better. */
5157 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5158 }
5159
5160 /* Expand the memory expression LOC and return the appropriate memory operand
5161 for the builtin_sync operations. */
5162
5163 static rtx
5164 get_builtin_sync_mem (tree loc, machine_mode mode)
5165 {
5166 rtx addr, mem;
5167
5168 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5169 addr = convert_memory_address (Pmode, addr);
5170
5171 /* Note that we explicitly do not want any alias information for this
5172 memory, so that we kill all other live memories. Otherwise we don't
5173 satisfy the full barrier semantics of the intrinsic. */
5174 mem = validize_mem (gen_rtx_MEM (mode, addr));
5175
5176 /* The alignment needs to be at least according to that of the mode. */
5177 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5178 get_pointer_alignment (loc)));
5179 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5180 MEM_VOLATILE_P (mem) = 1;
5181
5182 return mem;
5183 }
5184
5185 /* Make sure an argument is in the right mode.
5186 EXP is the tree argument.
5187 MODE is the mode it should be in. */
5188
5189 static rtx
5190 expand_expr_force_mode (tree exp, machine_mode mode)
5191 {
5192 rtx val;
5193 machine_mode old_mode;
5194
5195 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5196 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5197 of CONST_INTs, where we know the old_mode only from the call argument. */
5198
5199 old_mode = GET_MODE (val);
5200 if (old_mode == VOIDmode)
5201 old_mode = TYPE_MODE (TREE_TYPE (exp));
5202 val = convert_modes (mode, old_mode, val, 1);
5203 return val;
5204 }
5205
5206
5207 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5208 EXP is the CALL_EXPR. CODE is the rtx code
5209 that corresponds to the arithmetic or logical operation from the name;
5210 an exception here is that NOT actually means NAND. TARGET is an optional
5211 place for us to store the results; AFTER is true if this is the
5212 fetch_and_xxx form. */
5213
5214 static rtx
5215 expand_builtin_sync_operation (machine_mode mode, tree exp,
5216 enum rtx_code code, bool after,
5217 rtx target)
5218 {
5219 rtx val, mem;
5220 location_t loc = EXPR_LOCATION (exp);
5221
5222 if (code == NOT && warn_sync_nand)
5223 {
5224 tree fndecl = get_callee_fndecl (exp);
5225 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5226
5227 static bool warned_f_a_n, warned_n_a_f;
5228
5229 switch (fcode)
5230 {
5231 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5232 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5233 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5234 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5235 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5236 if (warned_f_a_n)
5237 break;
5238
5239 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5240 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5241 warned_f_a_n = true;
5242 break;
5243
5244 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5245 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5246 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5247 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5248 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5249 if (warned_n_a_f)
5250 break;
5251
5252 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5253 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5254 warned_n_a_f = true;
5255 break;
5256
5257 default:
5258 gcc_unreachable ();
5259 }
5260 }
5261
5262 /* Expand the operands. */
5263 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5264 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5265
5266 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5267 after);
5268 }
5269
5270 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5271 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5272 true if this is the boolean form. TARGET is a place for us to store the
5273 results; this is NOT optional if IS_BOOL is true. */
5274
5275 static rtx
5276 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5277 bool is_bool, rtx target)
5278 {
5279 rtx old_val, new_val, mem;
5280 rtx *pbool, *poval;
5281
5282 /* Expand the operands. */
5283 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5284 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5285 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5286
5287 pbool = poval = NULL;
5288 if (target != const0_rtx)
5289 {
5290 if (is_bool)
5291 pbool = &target;
5292 else
5293 poval = &target;
5294 }
5295 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5296 false, MEMMODEL_SEQ_CST,
5297 MEMMODEL_SEQ_CST))
5298 return NULL_RTX;
5299
5300 return target;
5301 }
5302
5303 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5304 general form is actually an atomic exchange, and some targets only
5305 support a reduced form with the second argument being a constant 1.
5306 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5307 the results. */
5308
5309 static rtx
5310 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5311 rtx target)
5312 {
5313 rtx val, mem;
5314
5315 /* Expand the operands. */
5316 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5317 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5318
5319 return expand_sync_lock_test_and_set (target, mem, val);
5320 }
5321
5322 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5323
5324 static void
5325 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5326 {
5327 rtx mem;
5328
5329 /* Expand the operands. */
5330 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5331
5332 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5333 }
5334
5335 /* Given an integer representing an ``enum memmodel'', verify its
5336 correctness and return the memory model enum. */
5337
5338 static enum memmodel
5339 get_memmodel (tree exp)
5340 {
5341 rtx op;
5342 unsigned HOST_WIDE_INT val;
5343
5344 /* If the parameter is not a constant, it's a run time value so we'll just
5345 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5346 if (TREE_CODE (exp) != INTEGER_CST)
5347 return MEMMODEL_SEQ_CST;
5348
5349 op = expand_normal (exp);
5350
5351 val = INTVAL (op);
5352 if (targetm.memmodel_check)
5353 val = targetm.memmodel_check (val);
5354 else if (val & ~MEMMODEL_MASK)
5355 {
5356 warning (OPT_Winvalid_memory_model,
5357 "Unknown architecture specifier in memory model to builtin.");
5358 return MEMMODEL_SEQ_CST;
5359 }
5360
5361 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5362 {
5363 warning (OPT_Winvalid_memory_model,
5364 "invalid memory model argument to builtin");
5365 return MEMMODEL_SEQ_CST;
5366 }
5367
5368 return (enum memmodel) val;
5369 }
5370
5371 /* Expand the __atomic_exchange intrinsic:
5372 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5373 EXP is the CALL_EXPR.
5374 TARGET is an optional place for us to store the results. */
5375
5376 static rtx
5377 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5378 {
5379 rtx val, mem;
5380 enum memmodel model;
5381
5382 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5383 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5384 {
5385 error ("invalid memory model for %<__atomic_exchange%>");
5386 return NULL_RTX;
5387 }
5388
5389 if (!flag_inline_atomics)
5390 return NULL_RTX;
5391
5392 /* Expand the operands. */
5393 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5394 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5395
5396 return expand_atomic_exchange (target, mem, val, model);
5397 }
5398
5399 /* Expand the __atomic_compare_exchange intrinsic:
5400 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5401 TYPE desired, BOOL weak,
5402 enum memmodel success,
5403 enum memmodel failure)
5404 EXP is the CALL_EXPR.
5405 TARGET is an optional place for us to store the results. */
5406
5407 static rtx
5408 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5409 rtx target)
5410 {
5411 rtx expect, desired, mem, oldval;
5412 rtx_code_label *label;
5413 enum memmodel success, failure;
5414 tree weak;
5415 bool is_weak;
5416
5417 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5418 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5419
5420 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5421 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5422 {
5423 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5424 return NULL_RTX;
5425 }
5426
5427 if (failure > success)
5428 {
5429 error ("failure memory model cannot be stronger than success "
5430 "memory model for %<__atomic_compare_exchange%>");
5431 return NULL_RTX;
5432 }
5433
5434 if (!flag_inline_atomics)
5435 return NULL_RTX;
5436
5437 /* Expand the operands. */
5438 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5439
5440 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5441 expect = convert_memory_address (Pmode, expect);
5442 expect = gen_rtx_MEM (mode, expect);
5443 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5444
5445 weak = CALL_EXPR_ARG (exp, 3);
5446 is_weak = false;
5447 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5448 is_weak = true;
5449
5450 if (target == const0_rtx)
5451 target = NULL;
5452
5453 /* Lest the rtl backend create a race condition with an imporoper store
5454 to memory, always create a new pseudo for OLDVAL. */
5455 oldval = NULL;
5456
5457 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5458 is_weak, success, failure))
5459 return NULL_RTX;
5460
5461 /* Conditionally store back to EXPECT, lest we create a race condition
5462 with an improper store to memory. */
5463 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5464 the normal case where EXPECT is totally private, i.e. a register. At
5465 which point the store can be unconditional. */
5466 label = gen_label_rtx ();
5467 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL, VOIDmode, 1, label);
5468 emit_move_insn (expect, oldval);
5469 emit_label (label);
5470
5471 return target;
5472 }
5473
5474 /* Expand the __atomic_load intrinsic:
5475 TYPE __atomic_load (TYPE *object, enum memmodel)
5476 EXP is the CALL_EXPR.
5477 TARGET is an optional place for us to store the results. */
5478
5479 static rtx
5480 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5481 {
5482 rtx mem;
5483 enum memmodel model;
5484
5485 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5486 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5487 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5488 {
5489 error ("invalid memory model for %<__atomic_load%>");
5490 return NULL_RTX;
5491 }
5492
5493 if (!flag_inline_atomics)
5494 return NULL_RTX;
5495
5496 /* Expand the operand. */
5497 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5498
5499 return expand_atomic_load (target, mem, model);
5500 }
5501
5502
5503 /* Expand the __atomic_store intrinsic:
5504 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5505 EXP is the CALL_EXPR.
5506 TARGET is an optional place for us to store the results. */
5507
5508 static rtx
5509 expand_builtin_atomic_store (machine_mode mode, tree exp)
5510 {
5511 rtx mem, val;
5512 enum memmodel model;
5513
5514 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5515 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5516 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5517 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5518 {
5519 error ("invalid memory model for %<__atomic_store%>");
5520 return NULL_RTX;
5521 }
5522
5523 if (!flag_inline_atomics)
5524 return NULL_RTX;
5525
5526 /* Expand the operands. */
5527 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5528 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5529
5530 return expand_atomic_store (mem, val, model, false);
5531 }
5532
5533 /* Expand the __atomic_fetch_XXX intrinsic:
5534 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5535 EXP is the CALL_EXPR.
5536 TARGET is an optional place for us to store the results.
5537 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5538 FETCH_AFTER is true if returning the result of the operation.
5539 FETCH_AFTER is false if returning the value before the operation.
5540 IGNORE is true if the result is not used.
5541 EXT_CALL is the correct builtin for an external call if this cannot be
5542 resolved to an instruction sequence. */
5543
5544 static rtx
5545 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5546 enum rtx_code code, bool fetch_after,
5547 bool ignore, enum built_in_function ext_call)
5548 {
5549 rtx val, mem, ret;
5550 enum memmodel model;
5551 tree fndecl;
5552 tree addr;
5553
5554 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5555
5556 /* Expand the operands. */
5557 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5558 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5559
5560 /* Only try generating instructions if inlining is turned on. */
5561 if (flag_inline_atomics)
5562 {
5563 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5564 if (ret)
5565 return ret;
5566 }
5567
5568 /* Return if a different routine isn't needed for the library call. */
5569 if (ext_call == BUILT_IN_NONE)
5570 return NULL_RTX;
5571
5572 /* Change the call to the specified function. */
5573 fndecl = get_callee_fndecl (exp);
5574 addr = CALL_EXPR_FN (exp);
5575 STRIP_NOPS (addr);
5576
5577 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5578 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5579
5580 /* Expand the call here so we can emit trailing code. */
5581 ret = expand_call (exp, target, ignore);
5582
5583 /* Replace the original function just in case it matters. */
5584 TREE_OPERAND (addr, 0) = fndecl;
5585
5586 /* Then issue the arithmetic correction to return the right result. */
5587 if (!ignore)
5588 {
5589 if (code == NOT)
5590 {
5591 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5592 OPTAB_LIB_WIDEN);
5593 ret = expand_simple_unop (mode, NOT, ret, target, true);
5594 }
5595 else
5596 ret = expand_simple_binop (mode, code, ret, val, target, true,
5597 OPTAB_LIB_WIDEN);
5598 }
5599 return ret;
5600 }
5601
5602
5603 #ifndef HAVE_atomic_clear
5604 # define HAVE_atomic_clear 0
5605 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5606 #endif
5607
5608 /* Expand an atomic clear operation.
5609 void _atomic_clear (BOOL *obj, enum memmodel)
5610 EXP is the call expression. */
5611
5612 static rtx
5613 expand_builtin_atomic_clear (tree exp)
5614 {
5615 machine_mode mode;
5616 rtx mem, ret;
5617 enum memmodel model;
5618
5619 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5620 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5621 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5622
5623 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5624 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5625 {
5626 error ("invalid memory model for %<__atomic_store%>");
5627 return const0_rtx;
5628 }
5629
5630 if (HAVE_atomic_clear)
5631 {
5632 emit_insn (gen_atomic_clear (mem, model));
5633 return const0_rtx;
5634 }
5635
5636 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5637 Failing that, a store is issued by __atomic_store. The only way this can
5638 fail is if the bool type is larger than a word size. Unlikely, but
5639 handle it anyway for completeness. Assume a single threaded model since
5640 there is no atomic support in this case, and no barriers are required. */
5641 ret = expand_atomic_store (mem, const0_rtx, model, true);
5642 if (!ret)
5643 emit_move_insn (mem, const0_rtx);
5644 return const0_rtx;
5645 }
5646
5647 /* Expand an atomic test_and_set operation.
5648 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5649 EXP is the call expression. */
5650
5651 static rtx
5652 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5653 {
5654 rtx mem;
5655 enum memmodel model;
5656 machine_mode mode;
5657
5658 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5659 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5660 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5661
5662 return expand_atomic_test_and_set (target, mem, model);
5663 }
5664
5665
5666 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5667 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5668
5669 static tree
5670 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5671 {
5672 int size;
5673 machine_mode mode;
5674 unsigned int mode_align, type_align;
5675
5676 if (TREE_CODE (arg0) != INTEGER_CST)
5677 return NULL_TREE;
5678
5679 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5680 mode = mode_for_size (size, MODE_INT, 0);
5681 mode_align = GET_MODE_ALIGNMENT (mode);
5682
5683 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5684 type_align = mode_align;
5685 else
5686 {
5687 tree ttype = TREE_TYPE (arg1);
5688
5689 /* This function is usually invoked and folded immediately by the front
5690 end before anything else has a chance to look at it. The pointer
5691 parameter at this point is usually cast to a void *, so check for that
5692 and look past the cast. */
5693 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5694 && VOID_TYPE_P (TREE_TYPE (ttype)))
5695 arg1 = TREE_OPERAND (arg1, 0);
5696
5697 ttype = TREE_TYPE (arg1);
5698 gcc_assert (POINTER_TYPE_P (ttype));
5699
5700 /* Get the underlying type of the object. */
5701 ttype = TREE_TYPE (ttype);
5702 type_align = TYPE_ALIGN (ttype);
5703 }
5704
5705 /* If the object has smaller alignment, the the lock free routines cannot
5706 be used. */
5707 if (type_align < mode_align)
5708 return boolean_false_node;
5709
5710 /* Check if a compare_and_swap pattern exists for the mode which represents
5711 the required size. The pattern is not allowed to fail, so the existence
5712 of the pattern indicates support is present. */
5713 if (can_compare_and_swap_p (mode, true))
5714 return boolean_true_node;
5715 else
5716 return boolean_false_node;
5717 }
5718
5719 /* Return true if the parameters to call EXP represent an object which will
5720 always generate lock free instructions. The first argument represents the
5721 size of the object, and the second parameter is a pointer to the object
5722 itself. If NULL is passed for the object, then the result is based on
5723 typical alignment for an object of the specified size. Otherwise return
5724 false. */
5725
5726 static rtx
5727 expand_builtin_atomic_always_lock_free (tree exp)
5728 {
5729 tree size;
5730 tree arg0 = CALL_EXPR_ARG (exp, 0);
5731 tree arg1 = CALL_EXPR_ARG (exp, 1);
5732
5733 if (TREE_CODE (arg0) != INTEGER_CST)
5734 {
5735 error ("non-constant argument 1 to __atomic_always_lock_free");
5736 return const0_rtx;
5737 }
5738
5739 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5740 if (size == boolean_true_node)
5741 return const1_rtx;
5742 return const0_rtx;
5743 }
5744
5745 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5746 is lock free on this architecture. */
5747
5748 static tree
5749 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5750 {
5751 if (!flag_inline_atomics)
5752 return NULL_TREE;
5753
5754 /* If it isn't always lock free, don't generate a result. */
5755 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5756 return boolean_true_node;
5757
5758 return NULL_TREE;
5759 }
5760
5761 /* Return true if the parameters to call EXP represent an object which will
5762 always generate lock free instructions. The first argument represents the
5763 size of the object, and the second parameter is a pointer to the object
5764 itself. If NULL is passed for the object, then the result is based on
5765 typical alignment for an object of the specified size. Otherwise return
5766 NULL*/
5767
5768 static rtx
5769 expand_builtin_atomic_is_lock_free (tree exp)
5770 {
5771 tree size;
5772 tree arg0 = CALL_EXPR_ARG (exp, 0);
5773 tree arg1 = CALL_EXPR_ARG (exp, 1);
5774
5775 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5776 {
5777 error ("non-integer argument 1 to __atomic_is_lock_free");
5778 return NULL_RTX;
5779 }
5780
5781 if (!flag_inline_atomics)
5782 return NULL_RTX;
5783
5784 /* If the value is known at compile time, return the RTX for it. */
5785 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5786 if (size == boolean_true_node)
5787 return const1_rtx;
5788
5789 return NULL_RTX;
5790 }
5791
5792 /* Expand the __atomic_thread_fence intrinsic:
5793 void __atomic_thread_fence (enum memmodel)
5794 EXP is the CALL_EXPR. */
5795
5796 static void
5797 expand_builtin_atomic_thread_fence (tree exp)
5798 {
5799 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5800 expand_mem_thread_fence (model);
5801 }
5802
5803 /* Expand the __atomic_signal_fence intrinsic:
5804 void __atomic_signal_fence (enum memmodel)
5805 EXP is the CALL_EXPR. */
5806
5807 static void
5808 expand_builtin_atomic_signal_fence (tree exp)
5809 {
5810 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5811 expand_mem_signal_fence (model);
5812 }
5813
5814 /* Expand the __sync_synchronize intrinsic. */
5815
5816 static void
5817 expand_builtin_sync_synchronize (void)
5818 {
5819 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5820 }
5821
5822 static rtx
5823 expand_builtin_thread_pointer (tree exp, rtx target)
5824 {
5825 enum insn_code icode;
5826 if (!validate_arglist (exp, VOID_TYPE))
5827 return const0_rtx;
5828 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5829 if (icode != CODE_FOR_nothing)
5830 {
5831 struct expand_operand op;
5832 /* If the target is not sutitable then create a new target. */
5833 if (target == NULL_RTX
5834 || !REG_P (target)
5835 || GET_MODE (target) != Pmode)
5836 target = gen_reg_rtx (Pmode);
5837 create_output_operand (&op, target, Pmode);
5838 expand_insn (icode, 1, &op);
5839 return target;
5840 }
5841 error ("__builtin_thread_pointer is not supported on this target");
5842 return const0_rtx;
5843 }
5844
5845 static void
5846 expand_builtin_set_thread_pointer (tree exp)
5847 {
5848 enum insn_code icode;
5849 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5850 return;
5851 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5852 if (icode != CODE_FOR_nothing)
5853 {
5854 struct expand_operand op;
5855 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5856 Pmode, EXPAND_NORMAL);
5857 create_input_operand (&op, val, Pmode);
5858 expand_insn (icode, 1, &op);
5859 return;
5860 }
5861 error ("__builtin_set_thread_pointer is not supported on this target");
5862 }
5863
5864 \f
5865 /* Emit code to restore the current value of stack. */
5866
5867 static void
5868 expand_stack_restore (tree var)
5869 {
5870 rtx_insn *prev;
5871 rtx sa = expand_normal (var);
5872
5873 sa = convert_memory_address (Pmode, sa);
5874
5875 prev = get_last_insn ();
5876 emit_stack_restore (SAVE_BLOCK, sa);
5877 fixup_args_size_notes (prev, get_last_insn (), 0);
5878 }
5879
5880
5881 /* Emit code to save the current value of stack. */
5882
5883 static rtx
5884 expand_stack_save (void)
5885 {
5886 rtx ret = NULL_RTX;
5887
5888 do_pending_stack_adjust ();
5889 emit_stack_save (SAVE_BLOCK, &ret);
5890 return ret;
5891 }
5892
5893 /* Expand an expression EXP that calls a built-in function,
5894 with result going to TARGET if that's convenient
5895 (and in mode MODE if that's convenient).
5896 SUBTARGET may be used as the target for computing one of EXP's operands.
5897 IGNORE is nonzero if the value is to be ignored. */
5898
5899 rtx
5900 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5901 int ignore)
5902 {
5903 tree fndecl = get_callee_fndecl (exp);
5904 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5905 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5906 int flags;
5907
5908 /* When ASan is enabled, we don't want to expand some memory/string
5909 builtins and rely on libsanitizer's hooks. This allows us to avoid
5910 redundant checks and be sure, that possible overflow will be detected
5911 by ASan. */
5912
5913 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5914 return expand_call (exp, target, ignore);
5915
5916 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5917 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5918
5919 /* When not optimizing, generate calls to library functions for a certain
5920 set of builtins. */
5921 if (!optimize
5922 && !called_as_built_in (fndecl)
5923 && fcode != BUILT_IN_FORK
5924 && fcode != BUILT_IN_EXECL
5925 && fcode != BUILT_IN_EXECV
5926 && fcode != BUILT_IN_EXECLP
5927 && fcode != BUILT_IN_EXECLE
5928 && fcode != BUILT_IN_EXECVP
5929 && fcode != BUILT_IN_EXECVE
5930 && fcode != BUILT_IN_ALLOCA
5931 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5932 && fcode != BUILT_IN_FREE
5933 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5934 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5935 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5936 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5937 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5938 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5939 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5940 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5941 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5942 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5943 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5944 && fcode != BUILT_IN_CHKP_BNDRET)
5945 return expand_call (exp, target, ignore);
5946
5947 /* The built-in function expanders test for target == const0_rtx
5948 to determine whether the function's result will be ignored. */
5949 if (ignore)
5950 target = const0_rtx;
5951
5952 /* If the result of a pure or const built-in function is ignored, and
5953 none of its arguments are volatile, we can avoid expanding the
5954 built-in call and just evaluate the arguments for side-effects. */
5955 if (target == const0_rtx
5956 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5957 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5958 {
5959 bool volatilep = false;
5960 tree arg;
5961 call_expr_arg_iterator iter;
5962
5963 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5964 if (TREE_THIS_VOLATILE (arg))
5965 {
5966 volatilep = true;
5967 break;
5968 }
5969
5970 if (! volatilep)
5971 {
5972 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5973 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5974 return const0_rtx;
5975 }
5976 }
5977
5978 /* expand_builtin_with_bounds is supposed to be used for
5979 instrumented builtin calls. */
5980 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5981
5982 switch (fcode)
5983 {
5984 CASE_FLT_FN (BUILT_IN_FABS):
5985 case BUILT_IN_FABSD32:
5986 case BUILT_IN_FABSD64:
5987 case BUILT_IN_FABSD128:
5988 target = expand_builtin_fabs (exp, target, subtarget);
5989 if (target)
5990 return target;
5991 break;
5992
5993 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5994 target = expand_builtin_copysign (exp, target, subtarget);
5995 if (target)
5996 return target;
5997 break;
5998
5999 /* Just do a normal library call if we were unable to fold
6000 the values. */
6001 CASE_FLT_FN (BUILT_IN_CABS):
6002 break;
6003
6004 CASE_FLT_FN (BUILT_IN_EXP):
6005 CASE_FLT_FN (BUILT_IN_EXP10):
6006 CASE_FLT_FN (BUILT_IN_POW10):
6007 CASE_FLT_FN (BUILT_IN_EXP2):
6008 CASE_FLT_FN (BUILT_IN_EXPM1):
6009 CASE_FLT_FN (BUILT_IN_LOGB):
6010 CASE_FLT_FN (BUILT_IN_LOG):
6011 CASE_FLT_FN (BUILT_IN_LOG10):
6012 CASE_FLT_FN (BUILT_IN_LOG2):
6013 CASE_FLT_FN (BUILT_IN_LOG1P):
6014 CASE_FLT_FN (BUILT_IN_TAN):
6015 CASE_FLT_FN (BUILT_IN_ASIN):
6016 CASE_FLT_FN (BUILT_IN_ACOS):
6017 CASE_FLT_FN (BUILT_IN_ATAN):
6018 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
6019 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6020 because of possible accuracy problems. */
6021 if (! flag_unsafe_math_optimizations)
6022 break;
6023 CASE_FLT_FN (BUILT_IN_SQRT):
6024 CASE_FLT_FN (BUILT_IN_FLOOR):
6025 CASE_FLT_FN (BUILT_IN_CEIL):
6026 CASE_FLT_FN (BUILT_IN_TRUNC):
6027 CASE_FLT_FN (BUILT_IN_ROUND):
6028 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6029 CASE_FLT_FN (BUILT_IN_RINT):
6030 target = expand_builtin_mathfn (exp, target, subtarget);
6031 if (target)
6032 return target;
6033 break;
6034
6035 CASE_FLT_FN (BUILT_IN_FMA):
6036 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6037 if (target)
6038 return target;
6039 break;
6040
6041 CASE_FLT_FN (BUILT_IN_ILOGB):
6042 if (! flag_unsafe_math_optimizations)
6043 break;
6044 CASE_FLT_FN (BUILT_IN_ISINF):
6045 CASE_FLT_FN (BUILT_IN_FINITE):
6046 case BUILT_IN_ISFINITE:
6047 case BUILT_IN_ISNORMAL:
6048 target = expand_builtin_interclass_mathfn (exp, target);
6049 if (target)
6050 return target;
6051 break;
6052
6053 CASE_FLT_FN (BUILT_IN_ICEIL):
6054 CASE_FLT_FN (BUILT_IN_LCEIL):
6055 CASE_FLT_FN (BUILT_IN_LLCEIL):
6056 CASE_FLT_FN (BUILT_IN_LFLOOR):
6057 CASE_FLT_FN (BUILT_IN_IFLOOR):
6058 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6059 target = expand_builtin_int_roundingfn (exp, target);
6060 if (target)
6061 return target;
6062 break;
6063
6064 CASE_FLT_FN (BUILT_IN_IRINT):
6065 CASE_FLT_FN (BUILT_IN_LRINT):
6066 CASE_FLT_FN (BUILT_IN_LLRINT):
6067 CASE_FLT_FN (BUILT_IN_IROUND):
6068 CASE_FLT_FN (BUILT_IN_LROUND):
6069 CASE_FLT_FN (BUILT_IN_LLROUND):
6070 target = expand_builtin_int_roundingfn_2 (exp, target);
6071 if (target)
6072 return target;
6073 break;
6074
6075 CASE_FLT_FN (BUILT_IN_POWI):
6076 target = expand_builtin_powi (exp, target);
6077 if (target)
6078 return target;
6079 break;
6080
6081 CASE_FLT_FN (BUILT_IN_ATAN2):
6082 CASE_FLT_FN (BUILT_IN_LDEXP):
6083 CASE_FLT_FN (BUILT_IN_SCALB):
6084 CASE_FLT_FN (BUILT_IN_SCALBN):
6085 CASE_FLT_FN (BUILT_IN_SCALBLN):
6086 if (! flag_unsafe_math_optimizations)
6087 break;
6088
6089 CASE_FLT_FN (BUILT_IN_FMOD):
6090 CASE_FLT_FN (BUILT_IN_REMAINDER):
6091 CASE_FLT_FN (BUILT_IN_DREM):
6092 CASE_FLT_FN (BUILT_IN_POW):
6093 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6094 if (target)
6095 return target;
6096 break;
6097
6098 CASE_FLT_FN (BUILT_IN_CEXPI):
6099 target = expand_builtin_cexpi (exp, target);
6100 gcc_assert (target);
6101 return target;
6102
6103 CASE_FLT_FN (BUILT_IN_SIN):
6104 CASE_FLT_FN (BUILT_IN_COS):
6105 if (! flag_unsafe_math_optimizations)
6106 break;
6107 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6108 if (target)
6109 return target;
6110 break;
6111
6112 CASE_FLT_FN (BUILT_IN_SINCOS):
6113 if (! flag_unsafe_math_optimizations)
6114 break;
6115 target = expand_builtin_sincos (exp);
6116 if (target)
6117 return target;
6118 break;
6119
6120 case BUILT_IN_APPLY_ARGS:
6121 return expand_builtin_apply_args ();
6122
6123 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6124 FUNCTION with a copy of the parameters described by
6125 ARGUMENTS, and ARGSIZE. It returns a block of memory
6126 allocated on the stack into which is stored all the registers
6127 that might possibly be used for returning the result of a
6128 function. ARGUMENTS is the value returned by
6129 __builtin_apply_args. ARGSIZE is the number of bytes of
6130 arguments that must be copied. ??? How should this value be
6131 computed? We'll also need a safe worst case value for varargs
6132 functions. */
6133 case BUILT_IN_APPLY:
6134 if (!validate_arglist (exp, POINTER_TYPE,
6135 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6136 && !validate_arglist (exp, REFERENCE_TYPE,
6137 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6138 return const0_rtx;
6139 else
6140 {
6141 rtx ops[3];
6142
6143 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6144 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6145 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6146
6147 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6148 }
6149
6150 /* __builtin_return (RESULT) causes the function to return the
6151 value described by RESULT. RESULT is address of the block of
6152 memory returned by __builtin_apply. */
6153 case BUILT_IN_RETURN:
6154 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6155 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6156 return const0_rtx;
6157
6158 case BUILT_IN_SAVEREGS:
6159 return expand_builtin_saveregs ();
6160
6161 case BUILT_IN_VA_ARG_PACK:
6162 /* All valid uses of __builtin_va_arg_pack () are removed during
6163 inlining. */
6164 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6165 return const0_rtx;
6166
6167 case BUILT_IN_VA_ARG_PACK_LEN:
6168 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6169 inlining. */
6170 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6171 return const0_rtx;
6172
6173 /* Return the address of the first anonymous stack arg. */
6174 case BUILT_IN_NEXT_ARG:
6175 if (fold_builtin_next_arg (exp, false))
6176 return const0_rtx;
6177 return expand_builtin_next_arg ();
6178
6179 case BUILT_IN_CLEAR_CACHE:
6180 target = expand_builtin___clear_cache (exp);
6181 if (target)
6182 return target;
6183 break;
6184
6185 case BUILT_IN_CLASSIFY_TYPE:
6186 return expand_builtin_classify_type (exp);
6187
6188 case BUILT_IN_CONSTANT_P:
6189 return const0_rtx;
6190
6191 case BUILT_IN_FRAME_ADDRESS:
6192 case BUILT_IN_RETURN_ADDRESS:
6193 return expand_builtin_frame_address (fndecl, exp);
6194
6195 /* Returns the address of the area where the structure is returned.
6196 0 otherwise. */
6197 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6198 if (call_expr_nargs (exp) != 0
6199 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6200 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6201 return const0_rtx;
6202 else
6203 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6204
6205 case BUILT_IN_ALLOCA:
6206 case BUILT_IN_ALLOCA_WITH_ALIGN:
6207 /* If the allocation stems from the declaration of a variable-sized
6208 object, it cannot accumulate. */
6209 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6210 if (target)
6211 return target;
6212 break;
6213
6214 case BUILT_IN_STACK_SAVE:
6215 return expand_stack_save ();
6216
6217 case BUILT_IN_STACK_RESTORE:
6218 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6219 return const0_rtx;
6220
6221 case BUILT_IN_BSWAP16:
6222 case BUILT_IN_BSWAP32:
6223 case BUILT_IN_BSWAP64:
6224 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6225 if (target)
6226 return target;
6227 break;
6228
6229 CASE_INT_FN (BUILT_IN_FFS):
6230 target = expand_builtin_unop (target_mode, exp, target,
6231 subtarget, ffs_optab);
6232 if (target)
6233 return target;
6234 break;
6235
6236 CASE_INT_FN (BUILT_IN_CLZ):
6237 target = expand_builtin_unop (target_mode, exp, target,
6238 subtarget, clz_optab);
6239 if (target)
6240 return target;
6241 break;
6242
6243 CASE_INT_FN (BUILT_IN_CTZ):
6244 target = expand_builtin_unop (target_mode, exp, target,
6245 subtarget, ctz_optab);
6246 if (target)
6247 return target;
6248 break;
6249
6250 CASE_INT_FN (BUILT_IN_CLRSB):
6251 target = expand_builtin_unop (target_mode, exp, target,
6252 subtarget, clrsb_optab);
6253 if (target)
6254 return target;
6255 break;
6256
6257 CASE_INT_FN (BUILT_IN_POPCOUNT):
6258 target = expand_builtin_unop (target_mode, exp, target,
6259 subtarget, popcount_optab);
6260 if (target)
6261 return target;
6262 break;
6263
6264 CASE_INT_FN (BUILT_IN_PARITY):
6265 target = expand_builtin_unop (target_mode, exp, target,
6266 subtarget, parity_optab);
6267 if (target)
6268 return target;
6269 break;
6270
6271 case BUILT_IN_STRLEN:
6272 target = expand_builtin_strlen (exp, target, target_mode);
6273 if (target)
6274 return target;
6275 break;
6276
6277 case BUILT_IN_STRCPY:
6278 target = expand_builtin_strcpy (exp, target);
6279 if (target)
6280 return target;
6281 break;
6282
6283 case BUILT_IN_STRNCPY:
6284 target = expand_builtin_strncpy (exp, target);
6285 if (target)
6286 return target;
6287 break;
6288
6289 case BUILT_IN_STPCPY:
6290 target = expand_builtin_stpcpy (exp, target, mode);
6291 if (target)
6292 return target;
6293 break;
6294
6295 case BUILT_IN_MEMCPY:
6296 target = expand_builtin_memcpy (exp, target);
6297 if (target)
6298 return target;
6299 break;
6300
6301 case BUILT_IN_MEMPCPY:
6302 target = expand_builtin_mempcpy (exp, target, mode);
6303 if (target)
6304 return target;
6305 break;
6306
6307 case BUILT_IN_MEMSET:
6308 target = expand_builtin_memset (exp, target, mode);
6309 if (target)
6310 return target;
6311 break;
6312
6313 case BUILT_IN_BZERO:
6314 target = expand_builtin_bzero (exp);
6315 if (target)
6316 return target;
6317 break;
6318
6319 case BUILT_IN_STRCMP:
6320 target = expand_builtin_strcmp (exp, target);
6321 if (target)
6322 return target;
6323 break;
6324
6325 case BUILT_IN_STRNCMP:
6326 target = expand_builtin_strncmp (exp, target, mode);
6327 if (target)
6328 return target;
6329 break;
6330
6331 case BUILT_IN_BCMP:
6332 case BUILT_IN_MEMCMP:
6333 target = expand_builtin_memcmp (exp, target, mode);
6334 if (target)
6335 return target;
6336 break;
6337
6338 case BUILT_IN_SETJMP:
6339 /* This should have been lowered to the builtins below. */
6340 gcc_unreachable ();
6341
6342 case BUILT_IN_SETJMP_SETUP:
6343 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6344 and the receiver label. */
6345 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6346 {
6347 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6348 VOIDmode, EXPAND_NORMAL);
6349 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6350 rtx label_r = label_rtx (label);
6351
6352 /* This is copied from the handling of non-local gotos. */
6353 expand_builtin_setjmp_setup (buf_addr, label_r);
6354 nonlocal_goto_handler_labels
6355 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6356 nonlocal_goto_handler_labels);
6357 /* ??? Do not let expand_label treat us as such since we would
6358 not want to be both on the list of non-local labels and on
6359 the list of forced labels. */
6360 FORCED_LABEL (label) = 0;
6361 return const0_rtx;
6362 }
6363 break;
6364
6365 case BUILT_IN_SETJMP_RECEIVER:
6366 /* __builtin_setjmp_receiver is passed the receiver label. */
6367 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6368 {
6369 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6370 rtx label_r = label_rtx (label);
6371
6372 expand_builtin_setjmp_receiver (label_r);
6373 return const0_rtx;
6374 }
6375 break;
6376
6377 /* __builtin_longjmp is passed a pointer to an array of five words.
6378 It's similar to the C library longjmp function but works with
6379 __builtin_setjmp above. */
6380 case BUILT_IN_LONGJMP:
6381 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6382 {
6383 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6384 VOIDmode, EXPAND_NORMAL);
6385 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6386
6387 if (value != const1_rtx)
6388 {
6389 error ("%<__builtin_longjmp%> second argument must be 1");
6390 return const0_rtx;
6391 }
6392
6393 expand_builtin_longjmp (buf_addr, value);
6394 return const0_rtx;
6395 }
6396 break;
6397
6398 case BUILT_IN_NONLOCAL_GOTO:
6399 target = expand_builtin_nonlocal_goto (exp);
6400 if (target)
6401 return target;
6402 break;
6403
6404 /* This updates the setjmp buffer that is its argument with the value
6405 of the current stack pointer. */
6406 case BUILT_IN_UPDATE_SETJMP_BUF:
6407 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6408 {
6409 rtx buf_addr
6410 = expand_normal (CALL_EXPR_ARG (exp, 0));
6411
6412 expand_builtin_update_setjmp_buf (buf_addr);
6413 return const0_rtx;
6414 }
6415 break;
6416
6417 case BUILT_IN_TRAP:
6418 expand_builtin_trap ();
6419 return const0_rtx;
6420
6421 case BUILT_IN_UNREACHABLE:
6422 expand_builtin_unreachable ();
6423 return const0_rtx;
6424
6425 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6426 case BUILT_IN_SIGNBITD32:
6427 case BUILT_IN_SIGNBITD64:
6428 case BUILT_IN_SIGNBITD128:
6429 target = expand_builtin_signbit (exp, target);
6430 if (target)
6431 return target;
6432 break;
6433
6434 /* Various hooks for the DWARF 2 __throw routine. */
6435 case BUILT_IN_UNWIND_INIT:
6436 expand_builtin_unwind_init ();
6437 return const0_rtx;
6438 case BUILT_IN_DWARF_CFA:
6439 return virtual_cfa_rtx;
6440 #ifdef DWARF2_UNWIND_INFO
6441 case BUILT_IN_DWARF_SP_COLUMN:
6442 return expand_builtin_dwarf_sp_column ();
6443 case BUILT_IN_INIT_DWARF_REG_SIZES:
6444 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6445 return const0_rtx;
6446 #endif
6447 case BUILT_IN_FROB_RETURN_ADDR:
6448 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6449 case BUILT_IN_EXTRACT_RETURN_ADDR:
6450 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6451 case BUILT_IN_EH_RETURN:
6452 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6453 CALL_EXPR_ARG (exp, 1));
6454 return const0_rtx;
6455 #ifdef EH_RETURN_DATA_REGNO
6456 case BUILT_IN_EH_RETURN_DATA_REGNO:
6457 return expand_builtin_eh_return_data_regno (exp);
6458 #endif
6459 case BUILT_IN_EXTEND_POINTER:
6460 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6461 case BUILT_IN_EH_POINTER:
6462 return expand_builtin_eh_pointer (exp);
6463 case BUILT_IN_EH_FILTER:
6464 return expand_builtin_eh_filter (exp);
6465 case BUILT_IN_EH_COPY_VALUES:
6466 return expand_builtin_eh_copy_values (exp);
6467
6468 case BUILT_IN_VA_START:
6469 return expand_builtin_va_start (exp);
6470 case BUILT_IN_VA_END:
6471 return expand_builtin_va_end (exp);
6472 case BUILT_IN_VA_COPY:
6473 return expand_builtin_va_copy (exp);
6474 case BUILT_IN_EXPECT:
6475 return expand_builtin_expect (exp, target);
6476 case BUILT_IN_ASSUME_ALIGNED:
6477 return expand_builtin_assume_aligned (exp, target);
6478 case BUILT_IN_PREFETCH:
6479 expand_builtin_prefetch (exp);
6480 return const0_rtx;
6481
6482 case BUILT_IN_INIT_TRAMPOLINE:
6483 return expand_builtin_init_trampoline (exp, true);
6484 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6485 return expand_builtin_init_trampoline (exp, false);
6486 case BUILT_IN_ADJUST_TRAMPOLINE:
6487 return expand_builtin_adjust_trampoline (exp);
6488
6489 case BUILT_IN_FORK:
6490 case BUILT_IN_EXECL:
6491 case BUILT_IN_EXECV:
6492 case BUILT_IN_EXECLP:
6493 case BUILT_IN_EXECLE:
6494 case BUILT_IN_EXECVP:
6495 case BUILT_IN_EXECVE:
6496 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6497 if (target)
6498 return target;
6499 break;
6500
6501 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6502 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6503 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6504 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6505 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6506 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6507 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6508 if (target)
6509 return target;
6510 break;
6511
6512 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6513 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6514 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6515 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6516 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6517 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6518 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6519 if (target)
6520 return target;
6521 break;
6522
6523 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6524 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6525 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6526 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6527 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6528 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6529 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6530 if (target)
6531 return target;
6532 break;
6533
6534 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6535 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6536 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6537 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6538 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6539 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6540 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6541 if (target)
6542 return target;
6543 break;
6544
6545 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6546 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6547 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6548 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6549 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6550 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6551 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6552 if (target)
6553 return target;
6554 break;
6555
6556 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6557 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6558 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6559 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6560 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6561 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6562 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6563 if (target)
6564 return target;
6565 break;
6566
6567 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6568 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6569 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6570 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6571 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6572 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6573 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6574 if (target)
6575 return target;
6576 break;
6577
6578 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6579 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6580 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6581 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6582 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6583 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6584 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6585 if (target)
6586 return target;
6587 break;
6588
6589 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6590 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6591 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6592 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6593 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6594 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6595 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6596 if (target)
6597 return target;
6598 break;
6599
6600 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6601 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6602 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6603 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6604 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6605 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6606 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6607 if (target)
6608 return target;
6609 break;
6610
6611 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6612 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6613 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6614 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6615 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6616 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6617 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6618 if (target)
6619 return target;
6620 break;
6621
6622 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6623 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6624 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6625 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6626 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6627 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6628 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6629 if (target)
6630 return target;
6631 break;
6632
6633 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6634 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6635 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6636 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6637 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6638 if (mode == VOIDmode)
6639 mode = TYPE_MODE (boolean_type_node);
6640 if (!target || !register_operand (target, mode))
6641 target = gen_reg_rtx (mode);
6642
6643 mode = get_builtin_sync_mode
6644 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6645 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6646 if (target)
6647 return target;
6648 break;
6649
6650 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6651 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6652 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6653 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6654 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6655 mode = get_builtin_sync_mode
6656 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6657 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6658 if (target)
6659 return target;
6660 break;
6661
6662 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6663 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6664 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6665 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6666 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6667 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6668 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6669 if (target)
6670 return target;
6671 break;
6672
6673 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6674 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6675 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6676 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6677 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6678 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6679 expand_builtin_sync_lock_release (mode, exp);
6680 return const0_rtx;
6681
6682 case BUILT_IN_SYNC_SYNCHRONIZE:
6683 expand_builtin_sync_synchronize ();
6684 return const0_rtx;
6685
6686 case BUILT_IN_ATOMIC_EXCHANGE_1:
6687 case BUILT_IN_ATOMIC_EXCHANGE_2:
6688 case BUILT_IN_ATOMIC_EXCHANGE_4:
6689 case BUILT_IN_ATOMIC_EXCHANGE_8:
6690 case BUILT_IN_ATOMIC_EXCHANGE_16:
6691 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6692 target = expand_builtin_atomic_exchange (mode, exp, target);
6693 if (target)
6694 return target;
6695 break;
6696
6697 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6698 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6699 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6700 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6701 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6702 {
6703 unsigned int nargs, z;
6704 vec<tree, va_gc> *vec;
6705
6706 mode =
6707 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6708 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6709 if (target)
6710 return target;
6711
6712 /* If this is turned into an external library call, the weak parameter
6713 must be dropped to match the expected parameter list. */
6714 nargs = call_expr_nargs (exp);
6715 vec_alloc (vec, nargs - 1);
6716 for (z = 0; z < 3; z++)
6717 vec->quick_push (CALL_EXPR_ARG (exp, z));
6718 /* Skip the boolean weak parameter. */
6719 for (z = 4; z < 6; z++)
6720 vec->quick_push (CALL_EXPR_ARG (exp, z));
6721 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6722 break;
6723 }
6724
6725 case BUILT_IN_ATOMIC_LOAD_1:
6726 case BUILT_IN_ATOMIC_LOAD_2:
6727 case BUILT_IN_ATOMIC_LOAD_4:
6728 case BUILT_IN_ATOMIC_LOAD_8:
6729 case BUILT_IN_ATOMIC_LOAD_16:
6730 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6731 target = expand_builtin_atomic_load (mode, exp, target);
6732 if (target)
6733 return target;
6734 break;
6735
6736 case BUILT_IN_ATOMIC_STORE_1:
6737 case BUILT_IN_ATOMIC_STORE_2:
6738 case BUILT_IN_ATOMIC_STORE_4:
6739 case BUILT_IN_ATOMIC_STORE_8:
6740 case BUILT_IN_ATOMIC_STORE_16:
6741 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6742 target = expand_builtin_atomic_store (mode, exp);
6743 if (target)
6744 return const0_rtx;
6745 break;
6746
6747 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6748 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6749 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6750 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6751 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6752 {
6753 enum built_in_function lib;
6754 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6755 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6756 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6757 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6758 ignore, lib);
6759 if (target)
6760 return target;
6761 break;
6762 }
6763 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6764 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6765 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6766 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6767 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6768 {
6769 enum built_in_function lib;
6770 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6771 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6772 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6773 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6774 ignore, lib);
6775 if (target)
6776 return target;
6777 break;
6778 }
6779 case BUILT_IN_ATOMIC_AND_FETCH_1:
6780 case BUILT_IN_ATOMIC_AND_FETCH_2:
6781 case BUILT_IN_ATOMIC_AND_FETCH_4:
6782 case BUILT_IN_ATOMIC_AND_FETCH_8:
6783 case BUILT_IN_ATOMIC_AND_FETCH_16:
6784 {
6785 enum built_in_function lib;
6786 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6787 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6788 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6789 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6790 ignore, lib);
6791 if (target)
6792 return target;
6793 break;
6794 }
6795 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6796 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6797 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6798 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6799 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6800 {
6801 enum built_in_function lib;
6802 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6803 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6804 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6805 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6806 ignore, lib);
6807 if (target)
6808 return target;
6809 break;
6810 }
6811 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6812 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6813 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6814 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6815 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6816 {
6817 enum built_in_function lib;
6818 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6819 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6820 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6821 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6822 ignore, lib);
6823 if (target)
6824 return target;
6825 break;
6826 }
6827 case BUILT_IN_ATOMIC_OR_FETCH_1:
6828 case BUILT_IN_ATOMIC_OR_FETCH_2:
6829 case BUILT_IN_ATOMIC_OR_FETCH_4:
6830 case BUILT_IN_ATOMIC_OR_FETCH_8:
6831 case BUILT_IN_ATOMIC_OR_FETCH_16:
6832 {
6833 enum built_in_function lib;
6834 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6835 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6836 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6837 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6838 ignore, lib);
6839 if (target)
6840 return target;
6841 break;
6842 }
6843 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6844 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6845 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6846 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6847 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6848 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6849 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6850 ignore, BUILT_IN_NONE);
6851 if (target)
6852 return target;
6853 break;
6854
6855 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6856 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6857 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6858 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6859 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6860 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6861 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6862 ignore, BUILT_IN_NONE);
6863 if (target)
6864 return target;
6865 break;
6866
6867 case BUILT_IN_ATOMIC_FETCH_AND_1:
6868 case BUILT_IN_ATOMIC_FETCH_AND_2:
6869 case BUILT_IN_ATOMIC_FETCH_AND_4:
6870 case BUILT_IN_ATOMIC_FETCH_AND_8:
6871 case BUILT_IN_ATOMIC_FETCH_AND_16:
6872 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6873 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6874 ignore, BUILT_IN_NONE);
6875 if (target)
6876 return target;
6877 break;
6878
6879 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6880 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6881 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6882 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6883 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6884 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6885 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6886 ignore, BUILT_IN_NONE);
6887 if (target)
6888 return target;
6889 break;
6890
6891 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6892 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6893 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6894 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6895 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6896 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6897 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6898 ignore, BUILT_IN_NONE);
6899 if (target)
6900 return target;
6901 break;
6902
6903 case BUILT_IN_ATOMIC_FETCH_OR_1:
6904 case BUILT_IN_ATOMIC_FETCH_OR_2:
6905 case BUILT_IN_ATOMIC_FETCH_OR_4:
6906 case BUILT_IN_ATOMIC_FETCH_OR_8:
6907 case BUILT_IN_ATOMIC_FETCH_OR_16:
6908 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6909 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6910 ignore, BUILT_IN_NONE);
6911 if (target)
6912 return target;
6913 break;
6914
6915 case BUILT_IN_ATOMIC_TEST_AND_SET:
6916 return expand_builtin_atomic_test_and_set (exp, target);
6917
6918 case BUILT_IN_ATOMIC_CLEAR:
6919 return expand_builtin_atomic_clear (exp);
6920
6921 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6922 return expand_builtin_atomic_always_lock_free (exp);
6923
6924 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6925 target = expand_builtin_atomic_is_lock_free (exp);
6926 if (target)
6927 return target;
6928 break;
6929
6930 case BUILT_IN_ATOMIC_THREAD_FENCE:
6931 expand_builtin_atomic_thread_fence (exp);
6932 return const0_rtx;
6933
6934 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6935 expand_builtin_atomic_signal_fence (exp);
6936 return const0_rtx;
6937
6938 case BUILT_IN_OBJECT_SIZE:
6939 return expand_builtin_object_size (exp);
6940
6941 case BUILT_IN_MEMCPY_CHK:
6942 case BUILT_IN_MEMPCPY_CHK:
6943 case BUILT_IN_MEMMOVE_CHK:
6944 case BUILT_IN_MEMSET_CHK:
6945 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6946 if (target)
6947 return target;
6948 break;
6949
6950 case BUILT_IN_STRCPY_CHK:
6951 case BUILT_IN_STPCPY_CHK:
6952 case BUILT_IN_STRNCPY_CHK:
6953 case BUILT_IN_STPNCPY_CHK:
6954 case BUILT_IN_STRCAT_CHK:
6955 case BUILT_IN_STRNCAT_CHK:
6956 case BUILT_IN_SNPRINTF_CHK:
6957 case BUILT_IN_VSNPRINTF_CHK:
6958 maybe_emit_chk_warning (exp, fcode);
6959 break;
6960
6961 case BUILT_IN_SPRINTF_CHK:
6962 case BUILT_IN_VSPRINTF_CHK:
6963 maybe_emit_sprintf_chk_warning (exp, fcode);
6964 break;
6965
6966 case BUILT_IN_FREE:
6967 if (warn_free_nonheap_object)
6968 maybe_emit_free_warning (exp);
6969 break;
6970
6971 case BUILT_IN_THREAD_POINTER:
6972 return expand_builtin_thread_pointer (exp, target);
6973
6974 case BUILT_IN_SET_THREAD_POINTER:
6975 expand_builtin_set_thread_pointer (exp);
6976 return const0_rtx;
6977
6978 case BUILT_IN_CILK_DETACH:
6979 expand_builtin_cilk_detach (exp);
6980 return const0_rtx;
6981
6982 case BUILT_IN_CILK_POP_FRAME:
6983 expand_builtin_cilk_pop_frame (exp);
6984 return const0_rtx;
6985
6986 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6987 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6988 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6989 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6990 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6991 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6992 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6993 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6994 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6995 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6996 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6997 /* We allow user CHKP builtins if Pointer Bounds
6998 Checker is off. */
6999 if (!chkp_function_instrumented_p (current_function_decl))
7000 {
7001 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7002 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7003 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7004 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7005 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7006 return expand_normal (CALL_EXPR_ARG (exp, 0));
7007 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7008 return expand_normal (size_zero_node);
7009 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7010 return expand_normal (size_int (-1));
7011 else
7012 return const0_rtx;
7013 }
7014 /* FALLTHROUGH */
7015
7016 case BUILT_IN_CHKP_BNDMK:
7017 case BUILT_IN_CHKP_BNDSTX:
7018 case BUILT_IN_CHKP_BNDCL:
7019 case BUILT_IN_CHKP_BNDCU:
7020 case BUILT_IN_CHKP_BNDLDX:
7021 case BUILT_IN_CHKP_BNDRET:
7022 case BUILT_IN_CHKP_INTERSECT:
7023 case BUILT_IN_CHKP_NARROW:
7024 case BUILT_IN_CHKP_EXTRACT_LOWER:
7025 case BUILT_IN_CHKP_EXTRACT_UPPER:
7026 /* Software implementation of Pointer Bounds Checker is NYI.
7027 Target support is required. */
7028 error ("Your target platform does not support -fcheck-pointer-bounds");
7029 break;
7030
7031 default: /* just do library call, if unknown builtin */
7032 break;
7033 }
7034
7035 /* The switch statement above can drop through to cause the function
7036 to be called normally. */
7037 return expand_call (exp, target, ignore);
7038 }
7039
7040 /* Similar to expand_builtin but is used for instrumented calls. */
7041
7042 rtx
7043 expand_builtin_with_bounds (tree exp, rtx target,
7044 rtx subtarget ATTRIBUTE_UNUSED,
7045 machine_mode mode, int ignore)
7046 {
7047 tree fndecl = get_callee_fndecl (exp);
7048 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7049
7050 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7051
7052 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7053 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7054
7055 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7056 && fcode < END_CHKP_BUILTINS);
7057
7058 switch (fcode)
7059 {
7060 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7061 target = expand_builtin_memcpy_with_bounds (exp, target);
7062 if (target)
7063 return target;
7064 break;
7065
7066 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7067 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7068 if (target)
7069 return target;
7070 break;
7071
7072 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7073 target = expand_builtin_memset_with_bounds (exp, target, mode);
7074 if (target)
7075 return target;
7076 break;
7077
7078 default:
7079 break;
7080 }
7081
7082 /* The switch statement above can drop through to cause the function
7083 to be called normally. */
7084 return expand_call (exp, target, ignore);
7085 }
7086
7087 /* Determine whether a tree node represents a call to a built-in
7088 function. If the tree T is a call to a built-in function with
7089 the right number of arguments of the appropriate types, return
7090 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7091 Otherwise the return value is END_BUILTINS. */
7092
7093 enum built_in_function
7094 builtin_mathfn_code (const_tree t)
7095 {
7096 const_tree fndecl, arg, parmlist;
7097 const_tree argtype, parmtype;
7098 const_call_expr_arg_iterator iter;
7099
7100 if (TREE_CODE (t) != CALL_EXPR
7101 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7102 return END_BUILTINS;
7103
7104 fndecl = get_callee_fndecl (t);
7105 if (fndecl == NULL_TREE
7106 || TREE_CODE (fndecl) != FUNCTION_DECL
7107 || ! DECL_BUILT_IN (fndecl)
7108 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7109 return END_BUILTINS;
7110
7111 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7112 init_const_call_expr_arg_iterator (t, &iter);
7113 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7114 {
7115 /* If a function doesn't take a variable number of arguments,
7116 the last element in the list will have type `void'. */
7117 parmtype = TREE_VALUE (parmlist);
7118 if (VOID_TYPE_P (parmtype))
7119 {
7120 if (more_const_call_expr_args_p (&iter))
7121 return END_BUILTINS;
7122 return DECL_FUNCTION_CODE (fndecl);
7123 }
7124
7125 if (! more_const_call_expr_args_p (&iter))
7126 return END_BUILTINS;
7127
7128 arg = next_const_call_expr_arg (&iter);
7129 argtype = TREE_TYPE (arg);
7130
7131 if (SCALAR_FLOAT_TYPE_P (parmtype))
7132 {
7133 if (! SCALAR_FLOAT_TYPE_P (argtype))
7134 return END_BUILTINS;
7135 }
7136 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7137 {
7138 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7139 return END_BUILTINS;
7140 }
7141 else if (POINTER_TYPE_P (parmtype))
7142 {
7143 if (! POINTER_TYPE_P (argtype))
7144 return END_BUILTINS;
7145 }
7146 else if (INTEGRAL_TYPE_P (parmtype))
7147 {
7148 if (! INTEGRAL_TYPE_P (argtype))
7149 return END_BUILTINS;
7150 }
7151 else
7152 return END_BUILTINS;
7153 }
7154
7155 /* Variable-length argument list. */
7156 return DECL_FUNCTION_CODE (fndecl);
7157 }
7158
7159 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7160 evaluate to a constant. */
7161
7162 static tree
7163 fold_builtin_constant_p (tree arg)
7164 {
7165 /* We return 1 for a numeric type that's known to be a constant
7166 value at compile-time or for an aggregate type that's a
7167 literal constant. */
7168 STRIP_NOPS (arg);
7169
7170 /* If we know this is a constant, emit the constant of one. */
7171 if (CONSTANT_CLASS_P (arg)
7172 || (TREE_CODE (arg) == CONSTRUCTOR
7173 && TREE_CONSTANT (arg)))
7174 return integer_one_node;
7175 if (TREE_CODE (arg) == ADDR_EXPR)
7176 {
7177 tree op = TREE_OPERAND (arg, 0);
7178 if (TREE_CODE (op) == STRING_CST
7179 || (TREE_CODE (op) == ARRAY_REF
7180 && integer_zerop (TREE_OPERAND (op, 1))
7181 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7182 return integer_one_node;
7183 }
7184
7185 /* If this expression has side effects, show we don't know it to be a
7186 constant. Likewise if it's a pointer or aggregate type since in
7187 those case we only want literals, since those are only optimized
7188 when generating RTL, not later.
7189 And finally, if we are compiling an initializer, not code, we
7190 need to return a definite result now; there's not going to be any
7191 more optimization done. */
7192 if (TREE_SIDE_EFFECTS (arg)
7193 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7194 || POINTER_TYPE_P (TREE_TYPE (arg))
7195 || cfun == 0
7196 || folding_initializer
7197 || force_folding_builtin_constant_p)
7198 return integer_zero_node;
7199
7200 return NULL_TREE;
7201 }
7202
7203 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7204 return it as a truthvalue. */
7205
7206 static tree
7207 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7208 tree predictor)
7209 {
7210 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7211
7212 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7213 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7214 ret_type = TREE_TYPE (TREE_TYPE (fn));
7215 pred_type = TREE_VALUE (arg_types);
7216 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7217
7218 pred = fold_convert_loc (loc, pred_type, pred);
7219 expected = fold_convert_loc (loc, expected_type, expected);
7220 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7221 predictor);
7222
7223 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7224 build_int_cst (ret_type, 0));
7225 }
7226
7227 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7228 NULL_TREE if no simplification is possible. */
7229
7230 tree
7231 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7232 {
7233 tree inner, fndecl, inner_arg0;
7234 enum tree_code code;
7235
7236 /* Distribute the expected value over short-circuiting operators.
7237 See through the cast from truthvalue_type_node to long. */
7238 inner_arg0 = arg0;
7239 while (CONVERT_EXPR_P (inner_arg0)
7240 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7241 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7242 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7243
7244 /* If this is a builtin_expect within a builtin_expect keep the
7245 inner one. See through a comparison against a constant. It
7246 might have been added to create a thruthvalue. */
7247 inner = inner_arg0;
7248
7249 if (COMPARISON_CLASS_P (inner)
7250 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7251 inner = TREE_OPERAND (inner, 0);
7252
7253 if (TREE_CODE (inner) == CALL_EXPR
7254 && (fndecl = get_callee_fndecl (inner))
7255 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7256 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7257 return arg0;
7258
7259 inner = inner_arg0;
7260 code = TREE_CODE (inner);
7261 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7262 {
7263 tree op0 = TREE_OPERAND (inner, 0);
7264 tree op1 = TREE_OPERAND (inner, 1);
7265
7266 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7267 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7268 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7269
7270 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7271 }
7272
7273 /* If the argument isn't invariant then there's nothing else we can do. */
7274 if (!TREE_CONSTANT (inner_arg0))
7275 return NULL_TREE;
7276
7277 /* If we expect that a comparison against the argument will fold to
7278 a constant return the constant. In practice, this means a true
7279 constant or the address of a non-weak symbol. */
7280 inner = inner_arg0;
7281 STRIP_NOPS (inner);
7282 if (TREE_CODE (inner) == ADDR_EXPR)
7283 {
7284 do
7285 {
7286 inner = TREE_OPERAND (inner, 0);
7287 }
7288 while (TREE_CODE (inner) == COMPONENT_REF
7289 || TREE_CODE (inner) == ARRAY_REF);
7290 if ((TREE_CODE (inner) == VAR_DECL
7291 || TREE_CODE (inner) == FUNCTION_DECL)
7292 && DECL_WEAK (inner))
7293 return NULL_TREE;
7294 }
7295
7296 /* Otherwise, ARG0 already has the proper type for the return value. */
7297 return arg0;
7298 }
7299
7300 /* Fold a call to __builtin_classify_type with argument ARG. */
7301
7302 static tree
7303 fold_builtin_classify_type (tree arg)
7304 {
7305 if (arg == 0)
7306 return build_int_cst (integer_type_node, no_type_class);
7307
7308 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7309 }
7310
7311 /* Fold a call to __builtin_strlen with argument ARG. */
7312
7313 static tree
7314 fold_builtin_strlen (location_t loc, tree type, tree arg)
7315 {
7316 if (!validate_arg (arg, POINTER_TYPE))
7317 return NULL_TREE;
7318 else
7319 {
7320 tree len = c_strlen (arg, 0);
7321
7322 if (len)
7323 return fold_convert_loc (loc, type, len);
7324
7325 return NULL_TREE;
7326 }
7327 }
7328
7329 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7330
7331 static tree
7332 fold_builtin_inf (location_t loc, tree type, int warn)
7333 {
7334 REAL_VALUE_TYPE real;
7335
7336 /* __builtin_inff is intended to be usable to define INFINITY on all
7337 targets. If an infinity is not available, INFINITY expands "to a
7338 positive constant of type float that overflows at translation
7339 time", footnote "In this case, using INFINITY will violate the
7340 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7341 Thus we pedwarn to ensure this constraint violation is
7342 diagnosed. */
7343 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7344 pedwarn (loc, 0, "target format does not support infinity");
7345
7346 real_inf (&real);
7347 return build_real (type, real);
7348 }
7349
7350 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7351
7352 static tree
7353 fold_builtin_nan (tree arg, tree type, int quiet)
7354 {
7355 REAL_VALUE_TYPE real;
7356 const char *str;
7357
7358 if (!validate_arg (arg, POINTER_TYPE))
7359 return NULL_TREE;
7360 str = c_getstr (arg);
7361 if (!str)
7362 return NULL_TREE;
7363
7364 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7365 return NULL_TREE;
7366
7367 return build_real (type, real);
7368 }
7369
7370 /* Return true if the floating point expression T has an integer value.
7371 We also allow +Inf, -Inf and NaN to be considered integer values. */
7372
7373 static bool
7374 integer_valued_real_p (tree t)
7375 {
7376 switch (TREE_CODE (t))
7377 {
7378 case FLOAT_EXPR:
7379 return true;
7380
7381 case ABS_EXPR:
7382 case SAVE_EXPR:
7383 return integer_valued_real_p (TREE_OPERAND (t, 0));
7384
7385 case COMPOUND_EXPR:
7386 case MODIFY_EXPR:
7387 case BIND_EXPR:
7388 return integer_valued_real_p (TREE_OPERAND (t, 1));
7389
7390 case PLUS_EXPR:
7391 case MINUS_EXPR:
7392 case MULT_EXPR:
7393 case MIN_EXPR:
7394 case MAX_EXPR:
7395 return integer_valued_real_p (TREE_OPERAND (t, 0))
7396 && integer_valued_real_p (TREE_OPERAND (t, 1));
7397
7398 case COND_EXPR:
7399 return integer_valued_real_p (TREE_OPERAND (t, 1))
7400 && integer_valued_real_p (TREE_OPERAND (t, 2));
7401
7402 case REAL_CST:
7403 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7404
7405 CASE_CONVERT:
7406 {
7407 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7408 if (TREE_CODE (type) == INTEGER_TYPE)
7409 return true;
7410 if (TREE_CODE (type) == REAL_TYPE)
7411 return integer_valued_real_p (TREE_OPERAND (t, 0));
7412 break;
7413 }
7414
7415 case CALL_EXPR:
7416 switch (builtin_mathfn_code (t))
7417 {
7418 CASE_FLT_FN (BUILT_IN_CEIL):
7419 CASE_FLT_FN (BUILT_IN_FLOOR):
7420 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7421 CASE_FLT_FN (BUILT_IN_RINT):
7422 CASE_FLT_FN (BUILT_IN_ROUND):
7423 CASE_FLT_FN (BUILT_IN_TRUNC):
7424 return true;
7425
7426 CASE_FLT_FN (BUILT_IN_FMIN):
7427 CASE_FLT_FN (BUILT_IN_FMAX):
7428 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7429 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7430
7431 default:
7432 break;
7433 }
7434 break;
7435
7436 default:
7437 break;
7438 }
7439 return false;
7440 }
7441
7442 /* FNDECL is assumed to be a builtin where truncation can be propagated
7443 across (for instance floor((double)f) == (double)floorf (f).
7444 Do the transformation for a call with argument ARG. */
7445
7446 static tree
7447 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7448 {
7449 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7450
7451 if (!validate_arg (arg, REAL_TYPE))
7452 return NULL_TREE;
7453
7454 /* Integer rounding functions are idempotent. */
7455 if (fcode == builtin_mathfn_code (arg))
7456 return arg;
7457
7458 /* If argument is already integer valued, and we don't need to worry
7459 about setting errno, there's no need to perform rounding. */
7460 if (! flag_errno_math && integer_valued_real_p (arg))
7461 return arg;
7462
7463 if (optimize)
7464 {
7465 tree arg0 = strip_float_extensions (arg);
7466 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7467 tree newtype = TREE_TYPE (arg0);
7468 tree decl;
7469
7470 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7471 && (decl = mathfn_built_in (newtype, fcode)))
7472 return fold_convert_loc (loc, ftype,
7473 build_call_expr_loc (loc, decl, 1,
7474 fold_convert_loc (loc,
7475 newtype,
7476 arg0)));
7477 }
7478 return NULL_TREE;
7479 }
7480
7481 /* FNDECL is assumed to be builtin which can narrow the FP type of
7482 the argument, for instance lround((double)f) -> lroundf (f).
7483 Do the transformation for a call with argument ARG. */
7484
7485 static tree
7486 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7487 {
7488 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7489
7490 if (!validate_arg (arg, REAL_TYPE))
7491 return NULL_TREE;
7492
7493 /* If argument is already integer valued, and we don't need to worry
7494 about setting errno, there's no need to perform rounding. */
7495 if (! flag_errno_math && integer_valued_real_p (arg))
7496 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7497 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7498
7499 if (optimize)
7500 {
7501 tree ftype = TREE_TYPE (arg);
7502 tree arg0 = strip_float_extensions (arg);
7503 tree newtype = TREE_TYPE (arg0);
7504 tree decl;
7505
7506 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7507 && (decl = mathfn_built_in (newtype, fcode)))
7508 return build_call_expr_loc (loc, decl, 1,
7509 fold_convert_loc (loc, newtype, arg0));
7510 }
7511
7512 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7513 sizeof (int) == sizeof (long). */
7514 if (TYPE_PRECISION (integer_type_node)
7515 == TYPE_PRECISION (long_integer_type_node))
7516 {
7517 tree newfn = NULL_TREE;
7518 switch (fcode)
7519 {
7520 CASE_FLT_FN (BUILT_IN_ICEIL):
7521 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7522 break;
7523
7524 CASE_FLT_FN (BUILT_IN_IFLOOR):
7525 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7526 break;
7527
7528 CASE_FLT_FN (BUILT_IN_IROUND):
7529 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7530 break;
7531
7532 CASE_FLT_FN (BUILT_IN_IRINT):
7533 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7534 break;
7535
7536 default:
7537 break;
7538 }
7539
7540 if (newfn)
7541 {
7542 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7543 return fold_convert_loc (loc,
7544 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7545 }
7546 }
7547
7548 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7549 sizeof (long long) == sizeof (long). */
7550 if (TYPE_PRECISION (long_long_integer_type_node)
7551 == TYPE_PRECISION (long_integer_type_node))
7552 {
7553 tree newfn = NULL_TREE;
7554 switch (fcode)
7555 {
7556 CASE_FLT_FN (BUILT_IN_LLCEIL):
7557 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7558 break;
7559
7560 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7561 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7562 break;
7563
7564 CASE_FLT_FN (BUILT_IN_LLROUND):
7565 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7566 break;
7567
7568 CASE_FLT_FN (BUILT_IN_LLRINT):
7569 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7570 break;
7571
7572 default:
7573 break;
7574 }
7575
7576 if (newfn)
7577 {
7578 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7579 return fold_convert_loc (loc,
7580 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7581 }
7582 }
7583
7584 return NULL_TREE;
7585 }
7586
7587 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7588 return type. Return NULL_TREE if no simplification can be made. */
7589
7590 static tree
7591 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7592 {
7593 tree res;
7594
7595 if (!validate_arg (arg, COMPLEX_TYPE)
7596 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7597 return NULL_TREE;
7598
7599 /* Calculate the result when the argument is a constant. */
7600 if (TREE_CODE (arg) == COMPLEX_CST
7601 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7602 type, mpfr_hypot)))
7603 return res;
7604
7605 if (TREE_CODE (arg) == COMPLEX_EXPR)
7606 {
7607 tree real = TREE_OPERAND (arg, 0);
7608 tree imag = TREE_OPERAND (arg, 1);
7609
7610 /* If either part is zero, cabs is fabs of the other. */
7611 if (real_zerop (real))
7612 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7613 if (real_zerop (imag))
7614 return fold_build1_loc (loc, ABS_EXPR, type, real);
7615
7616 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7617 if (flag_unsafe_math_optimizations
7618 && operand_equal_p (real, imag, OEP_PURE_SAME))
7619 {
7620 const REAL_VALUE_TYPE sqrt2_trunc
7621 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7622 STRIP_NOPS (real);
7623 return fold_build2_loc (loc, MULT_EXPR, type,
7624 fold_build1_loc (loc, ABS_EXPR, type, real),
7625 build_real (type, sqrt2_trunc));
7626 }
7627 }
7628
7629 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7630 if (TREE_CODE (arg) == NEGATE_EXPR
7631 || TREE_CODE (arg) == CONJ_EXPR)
7632 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7633
7634 /* Don't do this when optimizing for size. */
7635 if (flag_unsafe_math_optimizations
7636 && optimize && optimize_function_for_speed_p (cfun))
7637 {
7638 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7639
7640 if (sqrtfn != NULL_TREE)
7641 {
7642 tree rpart, ipart, result;
7643
7644 arg = builtin_save_expr (arg);
7645
7646 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7647 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7648
7649 rpart = builtin_save_expr (rpart);
7650 ipart = builtin_save_expr (ipart);
7651
7652 result = fold_build2_loc (loc, PLUS_EXPR, type,
7653 fold_build2_loc (loc, MULT_EXPR, type,
7654 rpart, rpart),
7655 fold_build2_loc (loc, MULT_EXPR, type,
7656 ipart, ipart));
7657
7658 return build_call_expr_loc (loc, sqrtfn, 1, result);
7659 }
7660 }
7661
7662 return NULL_TREE;
7663 }
7664
7665 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7666 complex tree type of the result. If NEG is true, the imaginary
7667 zero is negative. */
7668
7669 static tree
7670 build_complex_cproj (tree type, bool neg)
7671 {
7672 REAL_VALUE_TYPE rinf, rzero = dconst0;
7673
7674 real_inf (&rinf);
7675 rzero.sign = neg;
7676 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7677 build_real (TREE_TYPE (type), rzero));
7678 }
7679
7680 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7681 return type. Return NULL_TREE if no simplification can be made. */
7682
7683 static tree
7684 fold_builtin_cproj (location_t loc, tree arg, tree type)
7685 {
7686 if (!validate_arg (arg, COMPLEX_TYPE)
7687 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7688 return NULL_TREE;
7689
7690 /* If there are no infinities, return arg. */
7691 if (! HONOR_INFINITIES (type))
7692 return non_lvalue_loc (loc, arg);
7693
7694 /* Calculate the result when the argument is a constant. */
7695 if (TREE_CODE (arg) == COMPLEX_CST)
7696 {
7697 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7698 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7699
7700 if (real_isinf (real) || real_isinf (imag))
7701 return build_complex_cproj (type, imag->sign);
7702 else
7703 return arg;
7704 }
7705 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7706 {
7707 tree real = TREE_OPERAND (arg, 0);
7708 tree imag = TREE_OPERAND (arg, 1);
7709
7710 STRIP_NOPS (real);
7711 STRIP_NOPS (imag);
7712
7713 /* If the real part is inf and the imag part is known to be
7714 nonnegative, return (inf + 0i). Remember side-effects are
7715 possible in the imag part. */
7716 if (TREE_CODE (real) == REAL_CST
7717 && real_isinf (TREE_REAL_CST_PTR (real))
7718 && tree_expr_nonnegative_p (imag))
7719 return omit_one_operand_loc (loc, type,
7720 build_complex_cproj (type, false),
7721 arg);
7722
7723 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7724 Remember side-effects are possible in the real part. */
7725 if (TREE_CODE (imag) == REAL_CST
7726 && real_isinf (TREE_REAL_CST_PTR (imag)))
7727 return
7728 omit_one_operand_loc (loc, type,
7729 build_complex_cproj (type, TREE_REAL_CST_PTR
7730 (imag)->sign), arg);
7731 }
7732
7733 return NULL_TREE;
7734 }
7735
7736 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7737 Return NULL_TREE if no simplification can be made. */
7738
7739 static tree
7740 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7741 {
7742
7743 enum built_in_function fcode;
7744 tree res;
7745
7746 if (!validate_arg (arg, REAL_TYPE))
7747 return NULL_TREE;
7748
7749 /* Calculate the result when the argument is a constant. */
7750 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7751 return res;
7752
7753 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7754 fcode = builtin_mathfn_code (arg);
7755 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7756 {
7757 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7758 arg = fold_build2_loc (loc, MULT_EXPR, type,
7759 CALL_EXPR_ARG (arg, 0),
7760 build_real (type, dconsthalf));
7761 return build_call_expr_loc (loc, expfn, 1, arg);
7762 }
7763
7764 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7765 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7766 {
7767 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7768
7769 if (powfn)
7770 {
7771 tree arg0 = CALL_EXPR_ARG (arg, 0);
7772 tree tree_root;
7773 /* The inner root was either sqrt or cbrt. */
7774 /* This was a conditional expression but it triggered a bug
7775 in Sun C 5.5. */
7776 REAL_VALUE_TYPE dconstroot;
7777 if (BUILTIN_SQRT_P (fcode))
7778 dconstroot = dconsthalf;
7779 else
7780 dconstroot = dconst_third ();
7781
7782 /* Adjust for the outer root. */
7783 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7784 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7785 tree_root = build_real (type, dconstroot);
7786 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7787 }
7788 }
7789
7790 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7791 if (flag_unsafe_math_optimizations
7792 && (fcode == BUILT_IN_POW
7793 || fcode == BUILT_IN_POWF
7794 || fcode == BUILT_IN_POWL))
7795 {
7796 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7797 tree arg0 = CALL_EXPR_ARG (arg, 0);
7798 tree arg1 = CALL_EXPR_ARG (arg, 1);
7799 tree narg1;
7800 if (!tree_expr_nonnegative_p (arg0))
7801 arg0 = build1 (ABS_EXPR, type, arg0);
7802 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7803 build_real (type, dconsthalf));
7804 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7805 }
7806
7807 return NULL_TREE;
7808 }
7809
7810 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7811 Return NULL_TREE if no simplification can be made. */
7812
7813 static tree
7814 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7815 {
7816 const enum built_in_function fcode = builtin_mathfn_code (arg);
7817 tree res;
7818
7819 if (!validate_arg (arg, REAL_TYPE))
7820 return NULL_TREE;
7821
7822 /* Calculate the result when the argument is a constant. */
7823 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7824 return res;
7825
7826 if (flag_unsafe_math_optimizations)
7827 {
7828 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7829 if (BUILTIN_EXPONENT_P (fcode))
7830 {
7831 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7832 const REAL_VALUE_TYPE third_trunc =
7833 real_value_truncate (TYPE_MODE (type), dconst_third ());
7834 arg = fold_build2_loc (loc, MULT_EXPR, type,
7835 CALL_EXPR_ARG (arg, 0),
7836 build_real (type, third_trunc));
7837 return build_call_expr_loc (loc, expfn, 1, arg);
7838 }
7839
7840 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7841 if (BUILTIN_SQRT_P (fcode))
7842 {
7843 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7844
7845 if (powfn)
7846 {
7847 tree arg0 = CALL_EXPR_ARG (arg, 0);
7848 tree tree_root;
7849 REAL_VALUE_TYPE dconstroot = dconst_third ();
7850
7851 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7852 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7853 tree_root = build_real (type, dconstroot);
7854 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7855 }
7856 }
7857
7858 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7859 if (BUILTIN_CBRT_P (fcode))
7860 {
7861 tree arg0 = CALL_EXPR_ARG (arg, 0);
7862 if (tree_expr_nonnegative_p (arg0))
7863 {
7864 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7865
7866 if (powfn)
7867 {
7868 tree tree_root;
7869 REAL_VALUE_TYPE dconstroot;
7870
7871 real_arithmetic (&dconstroot, MULT_EXPR,
7872 dconst_third_ptr (), dconst_third_ptr ());
7873 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7874 tree_root = build_real (type, dconstroot);
7875 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7876 }
7877 }
7878 }
7879
7880 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7881 if (fcode == BUILT_IN_POW
7882 || fcode == BUILT_IN_POWF
7883 || fcode == BUILT_IN_POWL)
7884 {
7885 tree arg00 = CALL_EXPR_ARG (arg, 0);
7886 tree arg01 = CALL_EXPR_ARG (arg, 1);
7887 if (tree_expr_nonnegative_p (arg00))
7888 {
7889 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7890 const REAL_VALUE_TYPE dconstroot
7891 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7892 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7893 build_real (type, dconstroot));
7894 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7895 }
7896 }
7897 }
7898 return NULL_TREE;
7899 }
7900
7901 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7902 TYPE is the type of the return value. Return NULL_TREE if no
7903 simplification can be made. */
7904
7905 static tree
7906 fold_builtin_cos (location_t loc,
7907 tree arg, tree type, tree fndecl)
7908 {
7909 tree res, narg;
7910
7911 if (!validate_arg (arg, REAL_TYPE))
7912 return NULL_TREE;
7913
7914 /* Calculate the result when the argument is a constant. */
7915 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7916 return res;
7917
7918 /* Optimize cos(-x) into cos (x). */
7919 if ((narg = fold_strip_sign_ops (arg)))
7920 return build_call_expr_loc (loc, fndecl, 1, narg);
7921
7922 return NULL_TREE;
7923 }
7924
7925 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7926 Return NULL_TREE if no simplification can be made. */
7927
7928 static tree
7929 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7930 {
7931 if (validate_arg (arg, REAL_TYPE))
7932 {
7933 tree res, narg;
7934
7935 /* Calculate the result when the argument is a constant. */
7936 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7937 return res;
7938
7939 /* Optimize cosh(-x) into cosh (x). */
7940 if ((narg = fold_strip_sign_ops (arg)))
7941 return build_call_expr_loc (loc, fndecl, 1, narg);
7942 }
7943
7944 return NULL_TREE;
7945 }
7946
7947 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7948 argument ARG. TYPE is the type of the return value. Return
7949 NULL_TREE if no simplification can be made. */
7950
7951 static tree
7952 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7953 bool hyper)
7954 {
7955 if (validate_arg (arg, COMPLEX_TYPE)
7956 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7957 {
7958 tree tmp;
7959
7960 /* Calculate the result when the argument is a constant. */
7961 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7962 return tmp;
7963
7964 /* Optimize fn(-x) into fn(x). */
7965 if ((tmp = fold_strip_sign_ops (arg)))
7966 return build_call_expr_loc (loc, fndecl, 1, tmp);
7967 }
7968
7969 return NULL_TREE;
7970 }
7971
7972 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7973 Return NULL_TREE if no simplification can be made. */
7974
7975 static tree
7976 fold_builtin_tan (tree arg, tree type)
7977 {
7978 enum built_in_function fcode;
7979 tree res;
7980
7981 if (!validate_arg (arg, REAL_TYPE))
7982 return NULL_TREE;
7983
7984 /* Calculate the result when the argument is a constant. */
7985 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7986 return res;
7987
7988 /* Optimize tan(atan(x)) = x. */
7989 fcode = builtin_mathfn_code (arg);
7990 if (flag_unsafe_math_optimizations
7991 && (fcode == BUILT_IN_ATAN
7992 || fcode == BUILT_IN_ATANF
7993 || fcode == BUILT_IN_ATANL))
7994 return CALL_EXPR_ARG (arg, 0);
7995
7996 return NULL_TREE;
7997 }
7998
7999 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8000 NULL_TREE if no simplification can be made. */
8001
8002 static tree
8003 fold_builtin_sincos (location_t loc,
8004 tree arg0, tree arg1, tree arg2)
8005 {
8006 tree type;
8007 tree res, fn, call;
8008
8009 if (!validate_arg (arg0, REAL_TYPE)
8010 || !validate_arg (arg1, POINTER_TYPE)
8011 || !validate_arg (arg2, POINTER_TYPE))
8012 return NULL_TREE;
8013
8014 type = TREE_TYPE (arg0);
8015
8016 /* Calculate the result when the argument is a constant. */
8017 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
8018 return res;
8019
8020 /* Canonicalize sincos to cexpi. */
8021 if (!targetm.libc_has_function (function_c99_math_complex))
8022 return NULL_TREE;
8023 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
8024 if (!fn)
8025 return NULL_TREE;
8026
8027 call = build_call_expr_loc (loc, fn, 1, arg0);
8028 call = builtin_save_expr (call);
8029
8030 return build2 (COMPOUND_EXPR, void_type_node,
8031 build2 (MODIFY_EXPR, void_type_node,
8032 build_fold_indirect_ref_loc (loc, arg1),
8033 build1 (IMAGPART_EXPR, type, call)),
8034 build2 (MODIFY_EXPR, void_type_node,
8035 build_fold_indirect_ref_loc (loc, arg2),
8036 build1 (REALPART_EXPR, type, call)));
8037 }
8038
8039 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
8040 NULL_TREE if no simplification can be made. */
8041
8042 static tree
8043 fold_builtin_cexp (location_t loc, tree arg0, tree type)
8044 {
8045 tree rtype;
8046 tree realp, imagp, ifn;
8047 tree res;
8048
8049 if (!validate_arg (arg0, COMPLEX_TYPE)
8050 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
8051 return NULL_TREE;
8052
8053 /* Calculate the result when the argument is a constant. */
8054 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8055 return res;
8056
8057 rtype = TREE_TYPE (TREE_TYPE (arg0));
8058
8059 /* In case we can figure out the real part of arg0 and it is constant zero
8060 fold to cexpi. */
8061 if (!targetm.libc_has_function (function_c99_math_complex))
8062 return NULL_TREE;
8063 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8064 if (!ifn)
8065 return NULL_TREE;
8066
8067 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
8068 && real_zerop (realp))
8069 {
8070 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
8071 return build_call_expr_loc (loc, ifn, 1, narg);
8072 }
8073
8074 /* In case we can easily decompose real and imaginary parts split cexp
8075 to exp (r) * cexpi (i). */
8076 if (flag_unsafe_math_optimizations
8077 && realp)
8078 {
8079 tree rfn, rcall, icall;
8080
8081 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8082 if (!rfn)
8083 return NULL_TREE;
8084
8085 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
8086 if (!imagp)
8087 return NULL_TREE;
8088
8089 icall = build_call_expr_loc (loc, ifn, 1, imagp);
8090 icall = builtin_save_expr (icall);
8091 rcall = build_call_expr_loc (loc, rfn, 1, realp);
8092 rcall = builtin_save_expr (rcall);
8093 return fold_build2_loc (loc, COMPLEX_EXPR, type,
8094 fold_build2_loc (loc, MULT_EXPR, rtype,
8095 rcall,
8096 fold_build1_loc (loc, REALPART_EXPR,
8097 rtype, icall)),
8098 fold_build2_loc (loc, MULT_EXPR, rtype,
8099 rcall,
8100 fold_build1_loc (loc, IMAGPART_EXPR,
8101 rtype, icall)));
8102 }
8103
8104 return NULL_TREE;
8105 }
8106
8107 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8108 Return NULL_TREE if no simplification can be made. */
8109
8110 static tree
8111 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
8112 {
8113 if (!validate_arg (arg, REAL_TYPE))
8114 return NULL_TREE;
8115
8116 /* Optimize trunc of constant value. */
8117 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8118 {
8119 REAL_VALUE_TYPE r, x;
8120 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8121
8122 x = TREE_REAL_CST (arg);
8123 real_trunc (&r, TYPE_MODE (type), &x);
8124 return build_real (type, r);
8125 }
8126
8127 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8128 }
8129
8130 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8131 Return NULL_TREE if no simplification can be made. */
8132
8133 static tree
8134 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
8135 {
8136 if (!validate_arg (arg, REAL_TYPE))
8137 return NULL_TREE;
8138
8139 /* Optimize floor of constant value. */
8140 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8141 {
8142 REAL_VALUE_TYPE x;
8143
8144 x = TREE_REAL_CST (arg);
8145 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8146 {
8147 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8148 REAL_VALUE_TYPE r;
8149
8150 real_floor (&r, TYPE_MODE (type), &x);
8151 return build_real (type, r);
8152 }
8153 }
8154
8155 /* Fold floor (x) where x is nonnegative to trunc (x). */
8156 if (tree_expr_nonnegative_p (arg))
8157 {
8158 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8159 if (truncfn)
8160 return build_call_expr_loc (loc, truncfn, 1, arg);
8161 }
8162
8163 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8164 }
8165
8166 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8167 Return NULL_TREE if no simplification can be made. */
8168
8169 static tree
8170 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
8171 {
8172 if (!validate_arg (arg, REAL_TYPE))
8173 return NULL_TREE;
8174
8175 /* Optimize ceil of constant value. */
8176 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8177 {
8178 REAL_VALUE_TYPE x;
8179
8180 x = TREE_REAL_CST (arg);
8181 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8182 {
8183 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8184 REAL_VALUE_TYPE r;
8185
8186 real_ceil (&r, TYPE_MODE (type), &x);
8187 return build_real (type, r);
8188 }
8189 }
8190
8191 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8192 }
8193
8194 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8195 Return NULL_TREE if no simplification can be made. */
8196
8197 static tree
8198 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8199 {
8200 if (!validate_arg (arg, REAL_TYPE))
8201 return NULL_TREE;
8202
8203 /* Optimize round of constant value. */
8204 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8205 {
8206 REAL_VALUE_TYPE x;
8207
8208 x = TREE_REAL_CST (arg);
8209 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8210 {
8211 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8212 REAL_VALUE_TYPE r;
8213
8214 real_round (&r, TYPE_MODE (type), &x);
8215 return build_real (type, r);
8216 }
8217 }
8218
8219 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8220 }
8221
8222 /* Fold function call to builtin lround, lroundf or lroundl (or the
8223 corresponding long long versions) and other rounding functions. ARG
8224 is the argument to the call. Return NULL_TREE if no simplification
8225 can be made. */
8226
8227 static tree
8228 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8229 {
8230 if (!validate_arg (arg, REAL_TYPE))
8231 return NULL_TREE;
8232
8233 /* Optimize lround of constant value. */
8234 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8235 {
8236 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8237
8238 if (real_isfinite (&x))
8239 {
8240 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8241 tree ftype = TREE_TYPE (arg);
8242 REAL_VALUE_TYPE r;
8243 bool fail = false;
8244
8245 switch (DECL_FUNCTION_CODE (fndecl))
8246 {
8247 CASE_FLT_FN (BUILT_IN_IFLOOR):
8248 CASE_FLT_FN (BUILT_IN_LFLOOR):
8249 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8250 real_floor (&r, TYPE_MODE (ftype), &x);
8251 break;
8252
8253 CASE_FLT_FN (BUILT_IN_ICEIL):
8254 CASE_FLT_FN (BUILT_IN_LCEIL):
8255 CASE_FLT_FN (BUILT_IN_LLCEIL):
8256 real_ceil (&r, TYPE_MODE (ftype), &x);
8257 break;
8258
8259 CASE_FLT_FN (BUILT_IN_IROUND):
8260 CASE_FLT_FN (BUILT_IN_LROUND):
8261 CASE_FLT_FN (BUILT_IN_LLROUND):
8262 real_round (&r, TYPE_MODE (ftype), &x);
8263 break;
8264
8265 default:
8266 gcc_unreachable ();
8267 }
8268
8269 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
8270 if (!fail)
8271 return wide_int_to_tree (itype, val);
8272 }
8273 }
8274
8275 switch (DECL_FUNCTION_CODE (fndecl))
8276 {
8277 CASE_FLT_FN (BUILT_IN_LFLOOR):
8278 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8279 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8280 if (tree_expr_nonnegative_p (arg))
8281 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8282 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8283 break;
8284 default:;
8285 }
8286
8287 return fold_fixed_mathfn (loc, fndecl, arg);
8288 }
8289
8290 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8291 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8292 the argument to the call. Return NULL_TREE if no simplification can
8293 be made. */
8294
8295 static tree
8296 fold_builtin_bitop (tree fndecl, tree arg)
8297 {
8298 if (!validate_arg (arg, INTEGER_TYPE))
8299 return NULL_TREE;
8300
8301 /* Optimize for constant argument. */
8302 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8303 {
8304 tree type = TREE_TYPE (arg);
8305 int result;
8306
8307 switch (DECL_FUNCTION_CODE (fndecl))
8308 {
8309 CASE_INT_FN (BUILT_IN_FFS):
8310 result = wi::ffs (arg);
8311 break;
8312
8313 CASE_INT_FN (BUILT_IN_CLZ):
8314 if (wi::ne_p (arg, 0))
8315 result = wi::clz (arg);
8316 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8317 result = TYPE_PRECISION (type);
8318 break;
8319
8320 CASE_INT_FN (BUILT_IN_CTZ):
8321 if (wi::ne_p (arg, 0))
8322 result = wi::ctz (arg);
8323 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8324 result = TYPE_PRECISION (type);
8325 break;
8326
8327 CASE_INT_FN (BUILT_IN_CLRSB):
8328 result = wi::clrsb (arg);
8329 break;
8330
8331 CASE_INT_FN (BUILT_IN_POPCOUNT):
8332 result = wi::popcount (arg);
8333 break;
8334
8335 CASE_INT_FN (BUILT_IN_PARITY):
8336 result = wi::parity (arg);
8337 break;
8338
8339 default:
8340 gcc_unreachable ();
8341 }
8342
8343 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8344 }
8345
8346 return NULL_TREE;
8347 }
8348
8349 /* Fold function call to builtin_bswap and the short, long and long long
8350 variants. Return NULL_TREE if no simplification can be made. */
8351 static tree
8352 fold_builtin_bswap (tree fndecl, tree arg)
8353 {
8354 if (! validate_arg (arg, INTEGER_TYPE))
8355 return NULL_TREE;
8356
8357 /* Optimize constant value. */
8358 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8359 {
8360 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8361
8362 switch (DECL_FUNCTION_CODE (fndecl))
8363 {
8364 case BUILT_IN_BSWAP16:
8365 case BUILT_IN_BSWAP32:
8366 case BUILT_IN_BSWAP64:
8367 {
8368 signop sgn = TYPE_SIGN (type);
8369 tree result =
8370 wide_int_to_tree (type,
8371 wide_int::from (arg, TYPE_PRECISION (type),
8372 sgn).bswap ());
8373 return result;
8374 }
8375 default:
8376 gcc_unreachable ();
8377 }
8378 }
8379
8380 return NULL_TREE;
8381 }
8382
8383 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8384 NULL_TREE if no simplification can be made. */
8385
8386 static tree
8387 fold_builtin_hypot (location_t loc, tree fndecl,
8388 tree arg0, tree arg1, tree type)
8389 {
8390 tree res, narg0, narg1;
8391
8392 if (!validate_arg (arg0, REAL_TYPE)
8393 || !validate_arg (arg1, REAL_TYPE))
8394 return NULL_TREE;
8395
8396 /* Calculate the result when the argument is a constant. */
8397 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8398 return res;
8399
8400 /* If either argument to hypot has a negate or abs, strip that off.
8401 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8402 narg0 = fold_strip_sign_ops (arg0);
8403 narg1 = fold_strip_sign_ops (arg1);
8404 if (narg0 || narg1)
8405 {
8406 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8407 narg1 ? narg1 : arg1);
8408 }
8409
8410 /* If either argument is zero, hypot is fabs of the other. */
8411 if (real_zerop (arg0))
8412 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8413 else if (real_zerop (arg1))
8414 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8415
8416 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8417 if (flag_unsafe_math_optimizations
8418 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8419 {
8420 const REAL_VALUE_TYPE sqrt2_trunc
8421 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8422 return fold_build2_loc (loc, MULT_EXPR, type,
8423 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8424 build_real (type, sqrt2_trunc));
8425 }
8426
8427 return NULL_TREE;
8428 }
8429
8430
8431 /* Fold a builtin function call to pow, powf, or powl. Return
8432 NULL_TREE if no simplification can be made. */
8433 static tree
8434 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8435 {
8436 tree res;
8437
8438 if (!validate_arg (arg0, REAL_TYPE)
8439 || !validate_arg (arg1, REAL_TYPE))
8440 return NULL_TREE;
8441
8442 /* Calculate the result when the argument is a constant. */
8443 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8444 return res;
8445
8446 /* Optimize pow(1.0,y) = 1.0. */
8447 if (real_onep (arg0))
8448 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8449
8450 if (TREE_CODE (arg1) == REAL_CST
8451 && !TREE_OVERFLOW (arg1))
8452 {
8453 REAL_VALUE_TYPE cint;
8454 REAL_VALUE_TYPE c;
8455 HOST_WIDE_INT n;
8456
8457 c = TREE_REAL_CST (arg1);
8458
8459 /* Optimize pow(x,0.0) = 1.0. */
8460 if (REAL_VALUES_EQUAL (c, dconst0))
8461 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8462 arg0);
8463
8464 /* Optimize pow(x,1.0) = x. */
8465 if (REAL_VALUES_EQUAL (c, dconst1))
8466 return arg0;
8467
8468 /* Optimize pow(x,-1.0) = 1.0/x. */
8469 if (REAL_VALUES_EQUAL (c, dconstm1))
8470 return fold_build2_loc (loc, RDIV_EXPR, type,
8471 build_real (type, dconst1), arg0);
8472
8473 /* Optimize pow(x,0.5) = sqrt(x). */
8474 if (flag_unsafe_math_optimizations
8475 && REAL_VALUES_EQUAL (c, dconsthalf))
8476 {
8477 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8478
8479 if (sqrtfn != NULL_TREE)
8480 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8481 }
8482
8483 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8484 if (flag_unsafe_math_optimizations)
8485 {
8486 const REAL_VALUE_TYPE dconstroot
8487 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8488
8489 if (REAL_VALUES_EQUAL (c, dconstroot))
8490 {
8491 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8492 if (cbrtfn != NULL_TREE)
8493 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8494 }
8495 }
8496
8497 /* Check for an integer exponent. */
8498 n = real_to_integer (&c);
8499 real_from_integer (&cint, VOIDmode, n, SIGNED);
8500 if (real_identical (&c, &cint))
8501 {
8502 /* Attempt to evaluate pow at compile-time, unless this should
8503 raise an exception. */
8504 if (TREE_CODE (arg0) == REAL_CST
8505 && !TREE_OVERFLOW (arg0)
8506 && (n > 0
8507 || (!flag_trapping_math && !flag_errno_math)
8508 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8509 {
8510 REAL_VALUE_TYPE x;
8511 bool inexact;
8512
8513 x = TREE_REAL_CST (arg0);
8514 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8515 if (flag_unsafe_math_optimizations || !inexact)
8516 return build_real (type, x);
8517 }
8518
8519 /* Strip sign ops from even integer powers. */
8520 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8521 {
8522 tree narg0 = fold_strip_sign_ops (arg0);
8523 if (narg0)
8524 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8525 }
8526 }
8527 }
8528
8529 if (flag_unsafe_math_optimizations)
8530 {
8531 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8532
8533 /* Optimize pow(expN(x),y) = expN(x*y). */
8534 if (BUILTIN_EXPONENT_P (fcode))
8535 {
8536 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8537 tree arg = CALL_EXPR_ARG (arg0, 0);
8538 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8539 return build_call_expr_loc (loc, expfn, 1, arg);
8540 }
8541
8542 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8543 if (BUILTIN_SQRT_P (fcode))
8544 {
8545 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8546 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8547 build_real (type, dconsthalf));
8548 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8549 }
8550
8551 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8552 if (BUILTIN_CBRT_P (fcode))
8553 {
8554 tree arg = CALL_EXPR_ARG (arg0, 0);
8555 if (tree_expr_nonnegative_p (arg))
8556 {
8557 const REAL_VALUE_TYPE dconstroot
8558 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8559 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8560 build_real (type, dconstroot));
8561 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8562 }
8563 }
8564
8565 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8566 if (fcode == BUILT_IN_POW
8567 || fcode == BUILT_IN_POWF
8568 || fcode == BUILT_IN_POWL)
8569 {
8570 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8571 if (tree_expr_nonnegative_p (arg00))
8572 {
8573 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8574 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8575 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8576 }
8577 }
8578 }
8579
8580 return NULL_TREE;
8581 }
8582
8583 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8584 Return NULL_TREE if no simplification can be made. */
8585 static tree
8586 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8587 tree arg0, tree arg1, tree type)
8588 {
8589 if (!validate_arg (arg0, REAL_TYPE)
8590 || !validate_arg (arg1, INTEGER_TYPE))
8591 return NULL_TREE;
8592
8593 /* Optimize pow(1.0,y) = 1.0. */
8594 if (real_onep (arg0))
8595 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8596
8597 if (tree_fits_shwi_p (arg1))
8598 {
8599 HOST_WIDE_INT c = tree_to_shwi (arg1);
8600
8601 /* Evaluate powi at compile-time. */
8602 if (TREE_CODE (arg0) == REAL_CST
8603 && !TREE_OVERFLOW (arg0))
8604 {
8605 REAL_VALUE_TYPE x;
8606 x = TREE_REAL_CST (arg0);
8607 real_powi (&x, TYPE_MODE (type), &x, c);
8608 return build_real (type, x);
8609 }
8610
8611 /* Optimize pow(x,0) = 1.0. */
8612 if (c == 0)
8613 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8614 arg0);
8615
8616 /* Optimize pow(x,1) = x. */
8617 if (c == 1)
8618 return arg0;
8619
8620 /* Optimize pow(x,-1) = 1.0/x. */
8621 if (c == -1)
8622 return fold_build2_loc (loc, RDIV_EXPR, type,
8623 build_real (type, dconst1), arg0);
8624 }
8625
8626 return NULL_TREE;
8627 }
8628
8629 /* A subroutine of fold_builtin to fold the various exponent
8630 functions. Return NULL_TREE if no simplification can be made.
8631 FUNC is the corresponding MPFR exponent function. */
8632
8633 static tree
8634 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8635 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8636 {
8637 if (validate_arg (arg, REAL_TYPE))
8638 {
8639 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8640 tree res;
8641
8642 /* Calculate the result when the argument is a constant. */
8643 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8644 return res;
8645
8646 /* Optimize expN(logN(x)) = x. */
8647 if (flag_unsafe_math_optimizations)
8648 {
8649 const enum built_in_function fcode = builtin_mathfn_code (arg);
8650
8651 if ((func == mpfr_exp
8652 && (fcode == BUILT_IN_LOG
8653 || fcode == BUILT_IN_LOGF
8654 || fcode == BUILT_IN_LOGL))
8655 || (func == mpfr_exp2
8656 && (fcode == BUILT_IN_LOG2
8657 || fcode == BUILT_IN_LOG2F
8658 || fcode == BUILT_IN_LOG2L))
8659 || (func == mpfr_exp10
8660 && (fcode == BUILT_IN_LOG10
8661 || fcode == BUILT_IN_LOG10F
8662 || fcode == BUILT_IN_LOG10L)))
8663 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8664 }
8665 }
8666
8667 return NULL_TREE;
8668 }
8669
8670 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8671 arguments to the call, and TYPE is its return type.
8672 Return NULL_TREE if no simplification can be made. */
8673
8674 static tree
8675 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8676 {
8677 if (!validate_arg (arg1, POINTER_TYPE)
8678 || !validate_arg (arg2, INTEGER_TYPE)
8679 || !validate_arg (len, INTEGER_TYPE))
8680 return NULL_TREE;
8681 else
8682 {
8683 const char *p1;
8684
8685 if (TREE_CODE (arg2) != INTEGER_CST
8686 || !tree_fits_uhwi_p (len))
8687 return NULL_TREE;
8688
8689 p1 = c_getstr (arg1);
8690 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8691 {
8692 char c;
8693 const char *r;
8694 tree tem;
8695
8696 if (target_char_cast (arg2, &c))
8697 return NULL_TREE;
8698
8699 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
8700
8701 if (r == NULL)
8702 return build_int_cst (TREE_TYPE (arg1), 0);
8703
8704 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8705 return fold_convert_loc (loc, type, tem);
8706 }
8707 return NULL_TREE;
8708 }
8709 }
8710
8711 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8712 Return NULL_TREE if no simplification can be made. */
8713
8714 static tree
8715 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8716 {
8717 const char *p1, *p2;
8718
8719 if (!validate_arg (arg1, POINTER_TYPE)
8720 || !validate_arg (arg2, POINTER_TYPE)
8721 || !validate_arg (len, INTEGER_TYPE))
8722 return NULL_TREE;
8723
8724 /* If the LEN parameter is zero, return zero. */
8725 if (integer_zerop (len))
8726 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8727 arg1, arg2);
8728
8729 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8730 if (operand_equal_p (arg1, arg2, 0))
8731 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8732
8733 p1 = c_getstr (arg1);
8734 p2 = c_getstr (arg2);
8735
8736 /* If all arguments are constant, and the value of len is not greater
8737 than the lengths of arg1 and arg2, evaluate at compile-time. */
8738 if (tree_fits_uhwi_p (len) && p1 && p2
8739 && compare_tree_int (len, strlen (p1) + 1) <= 0
8740 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8741 {
8742 const int r = memcmp (p1, p2, tree_to_uhwi (len));
8743
8744 if (r > 0)
8745 return integer_one_node;
8746 else if (r < 0)
8747 return integer_minus_one_node;
8748 else
8749 return integer_zero_node;
8750 }
8751
8752 /* If len parameter is one, return an expression corresponding to
8753 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8754 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8755 {
8756 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8757 tree cst_uchar_ptr_node
8758 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8759
8760 tree ind1
8761 = fold_convert_loc (loc, integer_type_node,
8762 build1 (INDIRECT_REF, cst_uchar_node,
8763 fold_convert_loc (loc,
8764 cst_uchar_ptr_node,
8765 arg1)));
8766 tree ind2
8767 = fold_convert_loc (loc, integer_type_node,
8768 build1 (INDIRECT_REF, cst_uchar_node,
8769 fold_convert_loc (loc,
8770 cst_uchar_ptr_node,
8771 arg2)));
8772 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8773 }
8774
8775 return NULL_TREE;
8776 }
8777
8778 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8779 Return NULL_TREE if no simplification can be made. */
8780
8781 static tree
8782 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8783 {
8784 const char *p1, *p2;
8785
8786 if (!validate_arg (arg1, POINTER_TYPE)
8787 || !validate_arg (arg2, POINTER_TYPE))
8788 return NULL_TREE;
8789
8790 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8791 if (operand_equal_p (arg1, arg2, 0))
8792 return integer_zero_node;
8793
8794 p1 = c_getstr (arg1);
8795 p2 = c_getstr (arg2);
8796
8797 if (p1 && p2)
8798 {
8799 const int i = strcmp (p1, p2);
8800 if (i < 0)
8801 return integer_minus_one_node;
8802 else if (i > 0)
8803 return integer_one_node;
8804 else
8805 return integer_zero_node;
8806 }
8807
8808 /* If the second arg is "", return *(const unsigned char*)arg1. */
8809 if (p2 && *p2 == '\0')
8810 {
8811 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8812 tree cst_uchar_ptr_node
8813 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8814
8815 return fold_convert_loc (loc, integer_type_node,
8816 build1 (INDIRECT_REF, cst_uchar_node,
8817 fold_convert_loc (loc,
8818 cst_uchar_ptr_node,
8819 arg1)));
8820 }
8821
8822 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8823 if (p1 && *p1 == '\0')
8824 {
8825 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8826 tree cst_uchar_ptr_node
8827 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8828
8829 tree temp
8830 = fold_convert_loc (loc, integer_type_node,
8831 build1 (INDIRECT_REF, cst_uchar_node,
8832 fold_convert_loc (loc,
8833 cst_uchar_ptr_node,
8834 arg2)));
8835 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8836 }
8837
8838 return NULL_TREE;
8839 }
8840
8841 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8842 Return NULL_TREE if no simplification can be made. */
8843
8844 static tree
8845 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8846 {
8847 const char *p1, *p2;
8848
8849 if (!validate_arg (arg1, POINTER_TYPE)
8850 || !validate_arg (arg2, POINTER_TYPE)
8851 || !validate_arg (len, INTEGER_TYPE))
8852 return NULL_TREE;
8853
8854 /* If the LEN parameter is zero, return zero. */
8855 if (integer_zerop (len))
8856 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8857 arg1, arg2);
8858
8859 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8860 if (operand_equal_p (arg1, arg2, 0))
8861 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8862
8863 p1 = c_getstr (arg1);
8864 p2 = c_getstr (arg2);
8865
8866 if (tree_fits_uhwi_p (len) && p1 && p2)
8867 {
8868 const int i = strncmp (p1, p2, tree_to_uhwi (len));
8869 if (i > 0)
8870 return integer_one_node;
8871 else if (i < 0)
8872 return integer_minus_one_node;
8873 else
8874 return integer_zero_node;
8875 }
8876
8877 /* If the second arg is "", and the length is greater than zero,
8878 return *(const unsigned char*)arg1. */
8879 if (p2 && *p2 == '\0'
8880 && TREE_CODE (len) == INTEGER_CST
8881 && tree_int_cst_sgn (len) == 1)
8882 {
8883 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8884 tree cst_uchar_ptr_node
8885 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8886
8887 return fold_convert_loc (loc, integer_type_node,
8888 build1 (INDIRECT_REF, cst_uchar_node,
8889 fold_convert_loc (loc,
8890 cst_uchar_ptr_node,
8891 arg1)));
8892 }
8893
8894 /* If the first arg is "", and the length is greater than zero,
8895 return -*(const unsigned char*)arg2. */
8896 if (p1 && *p1 == '\0'
8897 && TREE_CODE (len) == INTEGER_CST
8898 && tree_int_cst_sgn (len) == 1)
8899 {
8900 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8901 tree cst_uchar_ptr_node
8902 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8903
8904 tree temp = fold_convert_loc (loc, integer_type_node,
8905 build1 (INDIRECT_REF, cst_uchar_node,
8906 fold_convert_loc (loc,
8907 cst_uchar_ptr_node,
8908 arg2)));
8909 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8910 }
8911
8912 /* If len parameter is one, return an expression corresponding to
8913 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8914 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8915 {
8916 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8917 tree cst_uchar_ptr_node
8918 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8919
8920 tree ind1 = fold_convert_loc (loc, integer_type_node,
8921 build1 (INDIRECT_REF, cst_uchar_node,
8922 fold_convert_loc (loc,
8923 cst_uchar_ptr_node,
8924 arg1)));
8925 tree ind2 = fold_convert_loc (loc, integer_type_node,
8926 build1 (INDIRECT_REF, cst_uchar_node,
8927 fold_convert_loc (loc,
8928 cst_uchar_ptr_node,
8929 arg2)));
8930 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8931 }
8932
8933 return NULL_TREE;
8934 }
8935
8936 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8937 ARG. Return NULL_TREE if no simplification can be made. */
8938
8939 static tree
8940 fold_builtin_signbit (location_t loc, tree arg, tree type)
8941 {
8942 if (!validate_arg (arg, REAL_TYPE))
8943 return NULL_TREE;
8944
8945 /* If ARG is a compile-time constant, determine the result. */
8946 if (TREE_CODE (arg) == REAL_CST
8947 && !TREE_OVERFLOW (arg))
8948 {
8949 REAL_VALUE_TYPE c;
8950
8951 c = TREE_REAL_CST (arg);
8952 return (REAL_VALUE_NEGATIVE (c)
8953 ? build_one_cst (type)
8954 : build_zero_cst (type));
8955 }
8956
8957 /* If ARG is non-negative, the result is always zero. */
8958 if (tree_expr_nonnegative_p (arg))
8959 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8960
8961 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8962 if (!HONOR_SIGNED_ZEROS (arg))
8963 return fold_convert (type,
8964 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
8965 build_real (TREE_TYPE (arg), dconst0)));
8966
8967 return NULL_TREE;
8968 }
8969
8970 /* Fold function call to builtin copysign, copysignf or copysignl with
8971 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8972 be made. */
8973
8974 static tree
8975 fold_builtin_copysign (location_t loc, tree fndecl,
8976 tree arg1, tree arg2, tree type)
8977 {
8978 tree tem;
8979
8980 if (!validate_arg (arg1, REAL_TYPE)
8981 || !validate_arg (arg2, REAL_TYPE))
8982 return NULL_TREE;
8983
8984 /* copysign(X,X) is X. */
8985 if (operand_equal_p (arg1, arg2, 0))
8986 return fold_convert_loc (loc, type, arg1);
8987
8988 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8989 if (TREE_CODE (arg1) == REAL_CST
8990 && TREE_CODE (arg2) == REAL_CST
8991 && !TREE_OVERFLOW (arg1)
8992 && !TREE_OVERFLOW (arg2))
8993 {
8994 REAL_VALUE_TYPE c1, c2;
8995
8996 c1 = TREE_REAL_CST (arg1);
8997 c2 = TREE_REAL_CST (arg2);
8998 /* c1.sign := c2.sign. */
8999 real_copysign (&c1, &c2);
9000 return build_real (type, c1);
9001 }
9002
9003 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9004 Remember to evaluate Y for side-effects. */
9005 if (tree_expr_nonnegative_p (arg2))
9006 return omit_one_operand_loc (loc, type,
9007 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9008 arg2);
9009
9010 /* Strip sign changing operations for the first argument. */
9011 tem = fold_strip_sign_ops (arg1);
9012 if (tem)
9013 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9014
9015 return NULL_TREE;
9016 }
9017
9018 /* Fold a call to builtin isascii with argument ARG. */
9019
9020 static tree
9021 fold_builtin_isascii (location_t loc, tree arg)
9022 {
9023 if (!validate_arg (arg, INTEGER_TYPE))
9024 return NULL_TREE;
9025 else
9026 {
9027 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9028 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9029 build_int_cst (integer_type_node,
9030 ~ (unsigned HOST_WIDE_INT) 0x7f));
9031 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9032 arg, integer_zero_node);
9033 }
9034 }
9035
9036 /* Fold a call to builtin toascii with argument ARG. */
9037
9038 static tree
9039 fold_builtin_toascii (location_t loc, tree arg)
9040 {
9041 if (!validate_arg (arg, INTEGER_TYPE))
9042 return NULL_TREE;
9043
9044 /* Transform toascii(c) -> (c & 0x7f). */
9045 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9046 build_int_cst (integer_type_node, 0x7f));
9047 }
9048
9049 /* Fold a call to builtin isdigit with argument ARG. */
9050
9051 static tree
9052 fold_builtin_isdigit (location_t loc, tree arg)
9053 {
9054 if (!validate_arg (arg, INTEGER_TYPE))
9055 return NULL_TREE;
9056 else
9057 {
9058 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9059 /* According to the C standard, isdigit is unaffected by locale.
9060 However, it definitely is affected by the target character set. */
9061 unsigned HOST_WIDE_INT target_digit0
9062 = lang_hooks.to_target_charset ('0');
9063
9064 if (target_digit0 == 0)
9065 return NULL_TREE;
9066
9067 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9068 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9069 build_int_cst (unsigned_type_node, target_digit0));
9070 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9071 build_int_cst (unsigned_type_node, 9));
9072 }
9073 }
9074
9075 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9076
9077 static tree
9078 fold_builtin_fabs (location_t loc, tree arg, tree type)
9079 {
9080 if (!validate_arg (arg, REAL_TYPE))
9081 return NULL_TREE;
9082
9083 arg = fold_convert_loc (loc, type, arg);
9084 if (TREE_CODE (arg) == REAL_CST)
9085 return fold_abs_const (arg, type);
9086 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9087 }
9088
9089 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9090
9091 static tree
9092 fold_builtin_abs (location_t loc, tree arg, tree type)
9093 {
9094 if (!validate_arg (arg, INTEGER_TYPE))
9095 return NULL_TREE;
9096
9097 arg = fold_convert_loc (loc, type, arg);
9098 if (TREE_CODE (arg) == INTEGER_CST)
9099 return fold_abs_const (arg, type);
9100 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9101 }
9102
9103 /* Fold a fma operation with arguments ARG[012]. */
9104
9105 tree
9106 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9107 tree type, tree arg0, tree arg1, tree arg2)
9108 {
9109 if (TREE_CODE (arg0) == REAL_CST
9110 && TREE_CODE (arg1) == REAL_CST
9111 && TREE_CODE (arg2) == REAL_CST)
9112 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9113
9114 return NULL_TREE;
9115 }
9116
9117 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9118
9119 static tree
9120 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9121 {
9122 if (validate_arg (arg0, REAL_TYPE)
9123 && validate_arg (arg1, REAL_TYPE)
9124 && validate_arg (arg2, REAL_TYPE))
9125 {
9126 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9127 if (tem)
9128 return tem;
9129
9130 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9131 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9132 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9133 }
9134 return NULL_TREE;
9135 }
9136
9137 /* Fold a call to builtin fmin or fmax. */
9138
9139 static tree
9140 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9141 tree type, bool max)
9142 {
9143 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9144 {
9145 /* Calculate the result when the argument is a constant. */
9146 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9147
9148 if (res)
9149 return res;
9150
9151 /* If either argument is NaN, return the other one. Avoid the
9152 transformation if we get (and honor) a signalling NaN. Using
9153 omit_one_operand() ensures we create a non-lvalue. */
9154 if (TREE_CODE (arg0) == REAL_CST
9155 && real_isnan (&TREE_REAL_CST (arg0))
9156 && (! HONOR_SNANS (arg0)
9157 || ! TREE_REAL_CST (arg0).signalling))
9158 return omit_one_operand_loc (loc, type, arg1, arg0);
9159 if (TREE_CODE (arg1) == REAL_CST
9160 && real_isnan (&TREE_REAL_CST (arg1))
9161 && (! HONOR_SNANS (arg1)
9162 || ! TREE_REAL_CST (arg1).signalling))
9163 return omit_one_operand_loc (loc, type, arg0, arg1);
9164
9165 /* Transform fmin/fmax(x,x) -> x. */
9166 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9167 return omit_one_operand_loc (loc, type, arg0, arg1);
9168
9169 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9170 functions to return the numeric arg if the other one is NaN.
9171 These tree codes don't honor that, so only transform if
9172 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9173 handled, so we don't have to worry about it either. */
9174 if (flag_finite_math_only)
9175 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9176 fold_convert_loc (loc, type, arg0),
9177 fold_convert_loc (loc, type, arg1));
9178 }
9179 return NULL_TREE;
9180 }
9181
9182 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9183
9184 static tree
9185 fold_builtin_carg (location_t loc, tree arg, tree type)
9186 {
9187 if (validate_arg (arg, COMPLEX_TYPE)
9188 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9189 {
9190 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9191
9192 if (atan2_fn)
9193 {
9194 tree new_arg = builtin_save_expr (arg);
9195 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9196 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9197 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9198 }
9199 }
9200
9201 return NULL_TREE;
9202 }
9203
9204 /* Fold a call to builtin logb/ilogb. */
9205
9206 static tree
9207 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9208 {
9209 if (! validate_arg (arg, REAL_TYPE))
9210 return NULL_TREE;
9211
9212 STRIP_NOPS (arg);
9213
9214 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9215 {
9216 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9217
9218 switch (value->cl)
9219 {
9220 case rvc_nan:
9221 case rvc_inf:
9222 /* If arg is Inf or NaN and we're logb, return it. */
9223 if (TREE_CODE (rettype) == REAL_TYPE)
9224 {
9225 /* For logb(-Inf) we have to return +Inf. */
9226 if (real_isinf (value) && real_isneg (value))
9227 {
9228 REAL_VALUE_TYPE tem;
9229 real_inf (&tem);
9230 return build_real (rettype, tem);
9231 }
9232 return fold_convert_loc (loc, rettype, arg);
9233 }
9234 /* Fall through... */
9235 case rvc_zero:
9236 /* Zero may set errno and/or raise an exception for logb, also
9237 for ilogb we don't know FP_ILOGB0. */
9238 return NULL_TREE;
9239 case rvc_normal:
9240 /* For normal numbers, proceed iff radix == 2. In GCC,
9241 normalized significands are in the range [0.5, 1.0). We
9242 want the exponent as if they were [1.0, 2.0) so get the
9243 exponent and subtract 1. */
9244 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9245 return fold_convert_loc (loc, rettype,
9246 build_int_cst (integer_type_node,
9247 REAL_EXP (value)-1));
9248 break;
9249 }
9250 }
9251
9252 return NULL_TREE;
9253 }
9254
9255 /* Fold a call to builtin significand, if radix == 2. */
9256
9257 static tree
9258 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9259 {
9260 if (! validate_arg (arg, REAL_TYPE))
9261 return NULL_TREE;
9262
9263 STRIP_NOPS (arg);
9264
9265 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9266 {
9267 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9268
9269 switch (value->cl)
9270 {
9271 case rvc_zero:
9272 case rvc_nan:
9273 case rvc_inf:
9274 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9275 return fold_convert_loc (loc, rettype, arg);
9276 case rvc_normal:
9277 /* For normal numbers, proceed iff radix == 2. */
9278 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9279 {
9280 REAL_VALUE_TYPE result = *value;
9281 /* In GCC, normalized significands are in the range [0.5,
9282 1.0). We want them to be [1.0, 2.0) so set the
9283 exponent to 1. */
9284 SET_REAL_EXP (&result, 1);
9285 return build_real (rettype, result);
9286 }
9287 break;
9288 }
9289 }
9290
9291 return NULL_TREE;
9292 }
9293
9294 /* Fold a call to builtin frexp, we can assume the base is 2. */
9295
9296 static tree
9297 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9298 {
9299 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9300 return NULL_TREE;
9301
9302 STRIP_NOPS (arg0);
9303
9304 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9305 return NULL_TREE;
9306
9307 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9308
9309 /* Proceed if a valid pointer type was passed in. */
9310 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9311 {
9312 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9313 tree frac, exp;
9314
9315 switch (value->cl)
9316 {
9317 case rvc_zero:
9318 /* For +-0, return (*exp = 0, +-0). */
9319 exp = integer_zero_node;
9320 frac = arg0;
9321 break;
9322 case rvc_nan:
9323 case rvc_inf:
9324 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9325 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9326 case rvc_normal:
9327 {
9328 /* Since the frexp function always expects base 2, and in
9329 GCC normalized significands are already in the range
9330 [0.5, 1.0), we have exactly what frexp wants. */
9331 REAL_VALUE_TYPE frac_rvt = *value;
9332 SET_REAL_EXP (&frac_rvt, 0);
9333 frac = build_real (rettype, frac_rvt);
9334 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9335 }
9336 break;
9337 default:
9338 gcc_unreachable ();
9339 }
9340
9341 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9342 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9343 TREE_SIDE_EFFECTS (arg1) = 1;
9344 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9345 }
9346
9347 return NULL_TREE;
9348 }
9349
9350 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9351 then we can assume the base is two. If it's false, then we have to
9352 check the mode of the TYPE parameter in certain cases. */
9353
9354 static tree
9355 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9356 tree type, bool ldexp)
9357 {
9358 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9359 {
9360 STRIP_NOPS (arg0);
9361 STRIP_NOPS (arg1);
9362
9363 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9364 if (real_zerop (arg0) || integer_zerop (arg1)
9365 || (TREE_CODE (arg0) == REAL_CST
9366 && !real_isfinite (&TREE_REAL_CST (arg0))))
9367 return omit_one_operand_loc (loc, type, arg0, arg1);
9368
9369 /* If both arguments are constant, then try to evaluate it. */
9370 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9371 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9372 && tree_fits_shwi_p (arg1))
9373 {
9374 /* Bound the maximum adjustment to twice the range of the
9375 mode's valid exponents. Use abs to ensure the range is
9376 positive as a sanity check. */
9377 const long max_exp_adj = 2 *
9378 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9379 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9380
9381 /* Get the user-requested adjustment. */
9382 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9383
9384 /* The requested adjustment must be inside this range. This
9385 is a preliminary cap to avoid things like overflow, we
9386 may still fail to compute the result for other reasons. */
9387 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9388 {
9389 REAL_VALUE_TYPE initial_result;
9390
9391 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9392
9393 /* Ensure we didn't overflow. */
9394 if (! real_isinf (&initial_result))
9395 {
9396 const REAL_VALUE_TYPE trunc_result
9397 = real_value_truncate (TYPE_MODE (type), initial_result);
9398
9399 /* Only proceed if the target mode can hold the
9400 resulting value. */
9401 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9402 return build_real (type, trunc_result);
9403 }
9404 }
9405 }
9406 }
9407
9408 return NULL_TREE;
9409 }
9410
9411 /* Fold a call to builtin modf. */
9412
9413 static tree
9414 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9415 {
9416 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9417 return NULL_TREE;
9418
9419 STRIP_NOPS (arg0);
9420
9421 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9422 return NULL_TREE;
9423
9424 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9425
9426 /* Proceed if a valid pointer type was passed in. */
9427 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9428 {
9429 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9430 REAL_VALUE_TYPE trunc, frac;
9431
9432 switch (value->cl)
9433 {
9434 case rvc_nan:
9435 case rvc_zero:
9436 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9437 trunc = frac = *value;
9438 break;
9439 case rvc_inf:
9440 /* For +-Inf, return (*arg1 = arg0, +-0). */
9441 frac = dconst0;
9442 frac.sign = value->sign;
9443 trunc = *value;
9444 break;
9445 case rvc_normal:
9446 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9447 real_trunc (&trunc, VOIDmode, value);
9448 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9449 /* If the original number was negative and already
9450 integral, then the fractional part is -0.0. */
9451 if (value->sign && frac.cl == rvc_zero)
9452 frac.sign = value->sign;
9453 break;
9454 }
9455
9456 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9457 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9458 build_real (rettype, trunc));
9459 TREE_SIDE_EFFECTS (arg1) = 1;
9460 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9461 build_real (rettype, frac));
9462 }
9463
9464 return NULL_TREE;
9465 }
9466
9467 /* Given a location LOC, an interclass builtin function decl FNDECL
9468 and its single argument ARG, return an folded expression computing
9469 the same, or NULL_TREE if we either couldn't or didn't want to fold
9470 (the latter happen if there's an RTL instruction available). */
9471
9472 static tree
9473 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9474 {
9475 machine_mode mode;
9476
9477 if (!validate_arg (arg, REAL_TYPE))
9478 return NULL_TREE;
9479
9480 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9481 return NULL_TREE;
9482
9483 mode = TYPE_MODE (TREE_TYPE (arg));
9484
9485 /* If there is no optab, try generic code. */
9486 switch (DECL_FUNCTION_CODE (fndecl))
9487 {
9488 tree result;
9489
9490 CASE_FLT_FN (BUILT_IN_ISINF):
9491 {
9492 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9493 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9494 tree const type = TREE_TYPE (arg);
9495 REAL_VALUE_TYPE r;
9496 char buf[128];
9497
9498 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9499 real_from_string (&r, buf);
9500 result = build_call_expr (isgr_fn, 2,
9501 fold_build1_loc (loc, ABS_EXPR, type, arg),
9502 build_real (type, r));
9503 return result;
9504 }
9505 CASE_FLT_FN (BUILT_IN_FINITE):
9506 case BUILT_IN_ISFINITE:
9507 {
9508 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9509 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9510 tree const type = TREE_TYPE (arg);
9511 REAL_VALUE_TYPE r;
9512 char buf[128];
9513
9514 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9515 real_from_string (&r, buf);
9516 result = build_call_expr (isle_fn, 2,
9517 fold_build1_loc (loc, ABS_EXPR, type, arg),
9518 build_real (type, r));
9519 /*result = fold_build2_loc (loc, UNGT_EXPR,
9520 TREE_TYPE (TREE_TYPE (fndecl)),
9521 fold_build1_loc (loc, ABS_EXPR, type, arg),
9522 build_real (type, r));
9523 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9524 TREE_TYPE (TREE_TYPE (fndecl)),
9525 result);*/
9526 return result;
9527 }
9528 case BUILT_IN_ISNORMAL:
9529 {
9530 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9531 islessequal(fabs(x),DBL_MAX). */
9532 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9533 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9534 tree const type = TREE_TYPE (arg);
9535 REAL_VALUE_TYPE rmax, rmin;
9536 char buf[128];
9537
9538 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9539 real_from_string (&rmax, buf);
9540 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9541 real_from_string (&rmin, buf);
9542 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9543 result = build_call_expr (isle_fn, 2, arg,
9544 build_real (type, rmax));
9545 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9546 build_call_expr (isge_fn, 2, arg,
9547 build_real (type, rmin)));
9548 return result;
9549 }
9550 default:
9551 break;
9552 }
9553
9554 return NULL_TREE;
9555 }
9556
9557 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9558 ARG is the argument for the call. */
9559
9560 static tree
9561 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9562 {
9563 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9564 REAL_VALUE_TYPE r;
9565
9566 if (!validate_arg (arg, REAL_TYPE))
9567 return NULL_TREE;
9568
9569 switch (builtin_index)
9570 {
9571 case BUILT_IN_ISINF:
9572 if (!HONOR_INFINITIES (arg))
9573 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9574
9575 if (TREE_CODE (arg) == REAL_CST)
9576 {
9577 r = TREE_REAL_CST (arg);
9578 if (real_isinf (&r))
9579 return real_compare (GT_EXPR, &r, &dconst0)
9580 ? integer_one_node : integer_minus_one_node;
9581 else
9582 return integer_zero_node;
9583 }
9584
9585 return NULL_TREE;
9586
9587 case BUILT_IN_ISINF_SIGN:
9588 {
9589 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9590 /* In a boolean context, GCC will fold the inner COND_EXPR to
9591 1. So e.g. "if (isinf_sign(x))" would be folded to just
9592 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9593 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9594 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9595 tree tmp = NULL_TREE;
9596
9597 arg = builtin_save_expr (arg);
9598
9599 if (signbit_fn && isinf_fn)
9600 {
9601 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9602 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9603
9604 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9605 signbit_call, integer_zero_node);
9606 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9607 isinf_call, integer_zero_node);
9608
9609 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9610 integer_minus_one_node, integer_one_node);
9611 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9612 isinf_call, tmp,
9613 integer_zero_node);
9614 }
9615
9616 return tmp;
9617 }
9618
9619 case BUILT_IN_ISFINITE:
9620 if (!HONOR_NANS (arg)
9621 && !HONOR_INFINITIES (arg))
9622 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9623
9624 if (TREE_CODE (arg) == REAL_CST)
9625 {
9626 r = TREE_REAL_CST (arg);
9627 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9628 }
9629
9630 return NULL_TREE;
9631
9632 case BUILT_IN_ISNAN:
9633 if (!HONOR_NANS (arg))
9634 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9635
9636 if (TREE_CODE (arg) == REAL_CST)
9637 {
9638 r = TREE_REAL_CST (arg);
9639 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9640 }
9641
9642 arg = builtin_save_expr (arg);
9643 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9644
9645 default:
9646 gcc_unreachable ();
9647 }
9648 }
9649
9650 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9651 This builtin will generate code to return the appropriate floating
9652 point classification depending on the value of the floating point
9653 number passed in. The possible return values must be supplied as
9654 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9655 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9656 one floating point argument which is "type generic". */
9657
9658 static tree
9659 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9660 {
9661 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9662 arg, type, res, tmp;
9663 machine_mode mode;
9664 REAL_VALUE_TYPE r;
9665 char buf[128];
9666
9667 /* Verify the required arguments in the original call. */
9668 if (nargs != 6
9669 || !validate_arg (args[0], INTEGER_TYPE)
9670 || !validate_arg (args[1], INTEGER_TYPE)
9671 || !validate_arg (args[2], INTEGER_TYPE)
9672 || !validate_arg (args[3], INTEGER_TYPE)
9673 || !validate_arg (args[4], INTEGER_TYPE)
9674 || !validate_arg (args[5], REAL_TYPE))
9675 return NULL_TREE;
9676
9677 fp_nan = args[0];
9678 fp_infinite = args[1];
9679 fp_normal = args[2];
9680 fp_subnormal = args[3];
9681 fp_zero = args[4];
9682 arg = args[5];
9683 type = TREE_TYPE (arg);
9684 mode = TYPE_MODE (type);
9685 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9686
9687 /* fpclassify(x) ->
9688 isnan(x) ? FP_NAN :
9689 (fabs(x) == Inf ? FP_INFINITE :
9690 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9691 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9692
9693 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9694 build_real (type, dconst0));
9695 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9696 tmp, fp_zero, fp_subnormal);
9697
9698 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9699 real_from_string (&r, buf);
9700 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9701 arg, build_real (type, r));
9702 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9703
9704 if (HONOR_INFINITIES (mode))
9705 {
9706 real_inf (&r);
9707 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9708 build_real (type, r));
9709 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9710 fp_infinite, res);
9711 }
9712
9713 if (HONOR_NANS (mode))
9714 {
9715 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9716 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9717 }
9718
9719 return res;
9720 }
9721
9722 /* Fold a call to an unordered comparison function such as
9723 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9724 being called and ARG0 and ARG1 are the arguments for the call.
9725 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9726 the opposite of the desired result. UNORDERED_CODE is used
9727 for modes that can hold NaNs and ORDERED_CODE is used for
9728 the rest. */
9729
9730 static tree
9731 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9732 enum tree_code unordered_code,
9733 enum tree_code ordered_code)
9734 {
9735 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9736 enum tree_code code;
9737 tree type0, type1;
9738 enum tree_code code0, code1;
9739 tree cmp_type = NULL_TREE;
9740
9741 type0 = TREE_TYPE (arg0);
9742 type1 = TREE_TYPE (arg1);
9743
9744 code0 = TREE_CODE (type0);
9745 code1 = TREE_CODE (type1);
9746
9747 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9748 /* Choose the wider of two real types. */
9749 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9750 ? type0 : type1;
9751 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9752 cmp_type = type0;
9753 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9754 cmp_type = type1;
9755
9756 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9757 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9758
9759 if (unordered_code == UNORDERED_EXPR)
9760 {
9761 if (!HONOR_NANS (arg0))
9762 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9763 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9764 }
9765
9766 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9767 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9768 fold_build2_loc (loc, code, type, arg0, arg1));
9769 }
9770
9771 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9772 arithmetics if it can never overflow, or into internal functions that
9773 return both result of arithmetics and overflowed boolean flag in
9774 a complex integer result, or some other check for overflow. */
9775
9776 static tree
9777 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9778 tree arg0, tree arg1, tree arg2)
9779 {
9780 enum internal_fn ifn = IFN_LAST;
9781 tree type = TREE_TYPE (TREE_TYPE (arg2));
9782 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9783 switch (fcode)
9784 {
9785 case BUILT_IN_ADD_OVERFLOW:
9786 case BUILT_IN_SADD_OVERFLOW:
9787 case BUILT_IN_SADDL_OVERFLOW:
9788 case BUILT_IN_SADDLL_OVERFLOW:
9789 case BUILT_IN_UADD_OVERFLOW:
9790 case BUILT_IN_UADDL_OVERFLOW:
9791 case BUILT_IN_UADDLL_OVERFLOW:
9792 ifn = IFN_ADD_OVERFLOW;
9793 break;
9794 case BUILT_IN_SUB_OVERFLOW:
9795 case BUILT_IN_SSUB_OVERFLOW:
9796 case BUILT_IN_SSUBL_OVERFLOW:
9797 case BUILT_IN_SSUBLL_OVERFLOW:
9798 case BUILT_IN_USUB_OVERFLOW:
9799 case BUILT_IN_USUBL_OVERFLOW:
9800 case BUILT_IN_USUBLL_OVERFLOW:
9801 ifn = IFN_SUB_OVERFLOW;
9802 break;
9803 case BUILT_IN_MUL_OVERFLOW:
9804 case BUILT_IN_SMUL_OVERFLOW:
9805 case BUILT_IN_SMULL_OVERFLOW:
9806 case BUILT_IN_SMULLL_OVERFLOW:
9807 case BUILT_IN_UMUL_OVERFLOW:
9808 case BUILT_IN_UMULL_OVERFLOW:
9809 case BUILT_IN_UMULLL_OVERFLOW:
9810 ifn = IFN_MUL_OVERFLOW;
9811 break;
9812 default:
9813 gcc_unreachable ();
9814 }
9815 tree ctype = build_complex_type (type);
9816 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9817 2, arg0, arg1);
9818 tree tgt = save_expr (call);
9819 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9820 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9821 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9822 tree store
9823 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9824 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9825 }
9826
9827 /* Fold a call to built-in function FNDECL with 0 arguments.
9828 This function returns NULL_TREE if no simplification was possible. */
9829
9830 static tree
9831 fold_builtin_0 (location_t loc, tree fndecl)
9832 {
9833 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9834 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9835 switch (fcode)
9836 {
9837 CASE_FLT_FN (BUILT_IN_INF):
9838 case BUILT_IN_INFD32:
9839 case BUILT_IN_INFD64:
9840 case BUILT_IN_INFD128:
9841 return fold_builtin_inf (loc, type, true);
9842
9843 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9844 return fold_builtin_inf (loc, type, false);
9845
9846 case BUILT_IN_CLASSIFY_TYPE:
9847 return fold_builtin_classify_type (NULL_TREE);
9848
9849 default:
9850 break;
9851 }
9852 return NULL_TREE;
9853 }
9854
9855 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9856 This function returns NULL_TREE if no simplification was possible. */
9857
9858 static tree
9859 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9860 {
9861 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9862 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9863 switch (fcode)
9864 {
9865 case BUILT_IN_CONSTANT_P:
9866 {
9867 tree val = fold_builtin_constant_p (arg0);
9868
9869 /* Gimplification will pull the CALL_EXPR for the builtin out of
9870 an if condition. When not optimizing, we'll not CSE it back.
9871 To avoid link error types of regressions, return false now. */
9872 if (!val && !optimize)
9873 val = integer_zero_node;
9874
9875 return val;
9876 }
9877
9878 case BUILT_IN_CLASSIFY_TYPE:
9879 return fold_builtin_classify_type (arg0);
9880
9881 case BUILT_IN_STRLEN:
9882 return fold_builtin_strlen (loc, type, arg0);
9883
9884 CASE_FLT_FN (BUILT_IN_FABS):
9885 case BUILT_IN_FABSD32:
9886 case BUILT_IN_FABSD64:
9887 case BUILT_IN_FABSD128:
9888 return fold_builtin_fabs (loc, arg0, type);
9889
9890 case BUILT_IN_ABS:
9891 case BUILT_IN_LABS:
9892 case BUILT_IN_LLABS:
9893 case BUILT_IN_IMAXABS:
9894 return fold_builtin_abs (loc, arg0, type);
9895
9896 CASE_FLT_FN (BUILT_IN_CONJ):
9897 if (validate_arg (arg0, COMPLEX_TYPE)
9898 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9899 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9900 break;
9901
9902 CASE_FLT_FN (BUILT_IN_CREAL):
9903 if (validate_arg (arg0, COMPLEX_TYPE)
9904 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9905 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9906 break;
9907
9908 CASE_FLT_FN (BUILT_IN_CIMAG):
9909 if (validate_arg (arg0, COMPLEX_TYPE)
9910 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9911 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9912 break;
9913
9914 CASE_FLT_FN (BUILT_IN_CCOS):
9915 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
9916
9917 CASE_FLT_FN (BUILT_IN_CCOSH):
9918 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
9919
9920 CASE_FLT_FN (BUILT_IN_CPROJ):
9921 return fold_builtin_cproj (loc, arg0, type);
9922
9923 CASE_FLT_FN (BUILT_IN_CSIN):
9924 if (validate_arg (arg0, COMPLEX_TYPE)
9925 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9926 return do_mpc_arg1 (arg0, type, mpc_sin);
9927 break;
9928
9929 CASE_FLT_FN (BUILT_IN_CSINH):
9930 if (validate_arg (arg0, COMPLEX_TYPE)
9931 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9932 return do_mpc_arg1 (arg0, type, mpc_sinh);
9933 break;
9934
9935 CASE_FLT_FN (BUILT_IN_CTAN):
9936 if (validate_arg (arg0, COMPLEX_TYPE)
9937 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9938 return do_mpc_arg1 (arg0, type, mpc_tan);
9939 break;
9940
9941 CASE_FLT_FN (BUILT_IN_CTANH):
9942 if (validate_arg (arg0, COMPLEX_TYPE)
9943 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9944 return do_mpc_arg1 (arg0, type, mpc_tanh);
9945 break;
9946
9947 CASE_FLT_FN (BUILT_IN_CLOG):
9948 if (validate_arg (arg0, COMPLEX_TYPE)
9949 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9950 return do_mpc_arg1 (arg0, type, mpc_log);
9951 break;
9952
9953 CASE_FLT_FN (BUILT_IN_CSQRT):
9954 if (validate_arg (arg0, COMPLEX_TYPE)
9955 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9956 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9957 break;
9958
9959 CASE_FLT_FN (BUILT_IN_CASIN):
9960 if (validate_arg (arg0, COMPLEX_TYPE)
9961 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9962 return do_mpc_arg1 (arg0, type, mpc_asin);
9963 break;
9964
9965 CASE_FLT_FN (BUILT_IN_CACOS):
9966 if (validate_arg (arg0, COMPLEX_TYPE)
9967 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9968 return do_mpc_arg1 (arg0, type, mpc_acos);
9969 break;
9970
9971 CASE_FLT_FN (BUILT_IN_CATAN):
9972 if (validate_arg (arg0, COMPLEX_TYPE)
9973 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9974 return do_mpc_arg1 (arg0, type, mpc_atan);
9975 break;
9976
9977 CASE_FLT_FN (BUILT_IN_CASINH):
9978 if (validate_arg (arg0, COMPLEX_TYPE)
9979 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9980 return do_mpc_arg1 (arg0, type, mpc_asinh);
9981 break;
9982
9983 CASE_FLT_FN (BUILT_IN_CACOSH):
9984 if (validate_arg (arg0, COMPLEX_TYPE)
9985 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9986 return do_mpc_arg1 (arg0, type, mpc_acosh);
9987 break;
9988
9989 CASE_FLT_FN (BUILT_IN_CATANH):
9990 if (validate_arg (arg0, COMPLEX_TYPE)
9991 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9992 return do_mpc_arg1 (arg0, type, mpc_atanh);
9993 break;
9994
9995 CASE_FLT_FN (BUILT_IN_CABS):
9996 return fold_builtin_cabs (loc, arg0, type, fndecl);
9997
9998 CASE_FLT_FN (BUILT_IN_CARG):
9999 return fold_builtin_carg (loc, arg0, type);
10000
10001 CASE_FLT_FN (BUILT_IN_SQRT):
10002 return fold_builtin_sqrt (loc, arg0, type);
10003
10004 CASE_FLT_FN (BUILT_IN_CBRT):
10005 return fold_builtin_cbrt (loc, arg0, type);
10006
10007 CASE_FLT_FN (BUILT_IN_ASIN):
10008 if (validate_arg (arg0, REAL_TYPE))
10009 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10010 &dconstm1, &dconst1, true);
10011 break;
10012
10013 CASE_FLT_FN (BUILT_IN_ACOS):
10014 if (validate_arg (arg0, REAL_TYPE))
10015 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10016 &dconstm1, &dconst1, true);
10017 break;
10018
10019 CASE_FLT_FN (BUILT_IN_ATAN):
10020 if (validate_arg (arg0, REAL_TYPE))
10021 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10022 break;
10023
10024 CASE_FLT_FN (BUILT_IN_ASINH):
10025 if (validate_arg (arg0, REAL_TYPE))
10026 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10027 break;
10028
10029 CASE_FLT_FN (BUILT_IN_ACOSH):
10030 if (validate_arg (arg0, REAL_TYPE))
10031 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10032 &dconst1, NULL, true);
10033 break;
10034
10035 CASE_FLT_FN (BUILT_IN_ATANH):
10036 if (validate_arg (arg0, REAL_TYPE))
10037 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10038 &dconstm1, &dconst1, false);
10039 break;
10040
10041 CASE_FLT_FN (BUILT_IN_SIN):
10042 if (validate_arg (arg0, REAL_TYPE))
10043 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10044 break;
10045
10046 CASE_FLT_FN (BUILT_IN_COS):
10047 return fold_builtin_cos (loc, arg0, type, fndecl);
10048
10049 CASE_FLT_FN (BUILT_IN_TAN):
10050 return fold_builtin_tan (arg0, type);
10051
10052 CASE_FLT_FN (BUILT_IN_CEXP):
10053 return fold_builtin_cexp (loc, arg0, type);
10054
10055 CASE_FLT_FN (BUILT_IN_CEXPI):
10056 if (validate_arg (arg0, REAL_TYPE))
10057 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10058 break;
10059
10060 CASE_FLT_FN (BUILT_IN_SINH):
10061 if (validate_arg (arg0, REAL_TYPE))
10062 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10063 break;
10064
10065 CASE_FLT_FN (BUILT_IN_COSH):
10066 return fold_builtin_cosh (loc, arg0, type, fndecl);
10067
10068 CASE_FLT_FN (BUILT_IN_TANH):
10069 if (validate_arg (arg0, REAL_TYPE))
10070 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10071 break;
10072
10073 CASE_FLT_FN (BUILT_IN_ERF):
10074 if (validate_arg (arg0, REAL_TYPE))
10075 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10076 break;
10077
10078 CASE_FLT_FN (BUILT_IN_ERFC):
10079 if (validate_arg (arg0, REAL_TYPE))
10080 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10081 break;
10082
10083 CASE_FLT_FN (BUILT_IN_TGAMMA):
10084 if (validate_arg (arg0, REAL_TYPE))
10085 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10086 break;
10087
10088 CASE_FLT_FN (BUILT_IN_EXP):
10089 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10090
10091 CASE_FLT_FN (BUILT_IN_EXP2):
10092 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10093
10094 CASE_FLT_FN (BUILT_IN_EXP10):
10095 CASE_FLT_FN (BUILT_IN_POW10):
10096 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10097
10098 CASE_FLT_FN (BUILT_IN_EXPM1):
10099 if (validate_arg (arg0, REAL_TYPE))
10100 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10101 break;
10102
10103 CASE_FLT_FN (BUILT_IN_LOG):
10104 if (validate_arg (arg0, REAL_TYPE))
10105 return do_mpfr_arg1 (arg0, type, mpfr_log, &dconst0, NULL, false);
10106 break;
10107
10108 CASE_FLT_FN (BUILT_IN_LOG2):
10109 if (validate_arg (arg0, REAL_TYPE))
10110 return do_mpfr_arg1 (arg0, type, mpfr_log2, &dconst0, NULL, false);
10111 break;
10112
10113 CASE_FLT_FN (BUILT_IN_LOG10):
10114 if (validate_arg (arg0, REAL_TYPE))
10115 return do_mpfr_arg1 (arg0, type, mpfr_log10, &dconst0, NULL, false);
10116 break;
10117
10118 CASE_FLT_FN (BUILT_IN_LOG1P):
10119 if (validate_arg (arg0, REAL_TYPE))
10120 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10121 &dconstm1, NULL, false);
10122 break;
10123
10124 CASE_FLT_FN (BUILT_IN_J0):
10125 if (validate_arg (arg0, REAL_TYPE))
10126 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10127 NULL, NULL, 0);
10128 break;
10129
10130 CASE_FLT_FN (BUILT_IN_J1):
10131 if (validate_arg (arg0, REAL_TYPE))
10132 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10133 NULL, NULL, 0);
10134 break;
10135
10136 CASE_FLT_FN (BUILT_IN_Y0):
10137 if (validate_arg (arg0, REAL_TYPE))
10138 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10139 &dconst0, NULL, false);
10140 break;
10141
10142 CASE_FLT_FN (BUILT_IN_Y1):
10143 if (validate_arg (arg0, REAL_TYPE))
10144 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10145 &dconst0, NULL, false);
10146 break;
10147
10148 CASE_FLT_FN (BUILT_IN_NAN):
10149 case BUILT_IN_NAND32:
10150 case BUILT_IN_NAND64:
10151 case BUILT_IN_NAND128:
10152 return fold_builtin_nan (arg0, type, true);
10153
10154 CASE_FLT_FN (BUILT_IN_NANS):
10155 return fold_builtin_nan (arg0, type, false);
10156
10157 CASE_FLT_FN (BUILT_IN_FLOOR):
10158 return fold_builtin_floor (loc, fndecl, arg0);
10159
10160 CASE_FLT_FN (BUILT_IN_CEIL):
10161 return fold_builtin_ceil (loc, fndecl, arg0);
10162
10163 CASE_FLT_FN (BUILT_IN_TRUNC):
10164 return fold_builtin_trunc (loc, fndecl, arg0);
10165
10166 CASE_FLT_FN (BUILT_IN_ROUND):
10167 return fold_builtin_round (loc, fndecl, arg0);
10168
10169 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10170 CASE_FLT_FN (BUILT_IN_RINT):
10171 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10172
10173 CASE_FLT_FN (BUILT_IN_ICEIL):
10174 CASE_FLT_FN (BUILT_IN_LCEIL):
10175 CASE_FLT_FN (BUILT_IN_LLCEIL):
10176 CASE_FLT_FN (BUILT_IN_LFLOOR):
10177 CASE_FLT_FN (BUILT_IN_IFLOOR):
10178 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10179 CASE_FLT_FN (BUILT_IN_IROUND):
10180 CASE_FLT_FN (BUILT_IN_LROUND):
10181 CASE_FLT_FN (BUILT_IN_LLROUND):
10182 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10183
10184 CASE_FLT_FN (BUILT_IN_IRINT):
10185 CASE_FLT_FN (BUILT_IN_LRINT):
10186 CASE_FLT_FN (BUILT_IN_LLRINT):
10187 return fold_fixed_mathfn (loc, fndecl, arg0);
10188
10189 case BUILT_IN_BSWAP16:
10190 case BUILT_IN_BSWAP32:
10191 case BUILT_IN_BSWAP64:
10192 return fold_builtin_bswap (fndecl, arg0);
10193
10194 CASE_INT_FN (BUILT_IN_FFS):
10195 CASE_INT_FN (BUILT_IN_CLZ):
10196 CASE_INT_FN (BUILT_IN_CTZ):
10197 CASE_INT_FN (BUILT_IN_CLRSB):
10198 CASE_INT_FN (BUILT_IN_POPCOUNT):
10199 CASE_INT_FN (BUILT_IN_PARITY):
10200 return fold_builtin_bitop (fndecl, arg0);
10201
10202 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10203 return fold_builtin_signbit (loc, arg0, type);
10204
10205 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10206 return fold_builtin_significand (loc, arg0, type);
10207
10208 CASE_FLT_FN (BUILT_IN_ILOGB):
10209 CASE_FLT_FN (BUILT_IN_LOGB):
10210 return fold_builtin_logb (loc, arg0, type);
10211
10212 case BUILT_IN_ISASCII:
10213 return fold_builtin_isascii (loc, arg0);
10214
10215 case BUILT_IN_TOASCII:
10216 return fold_builtin_toascii (loc, arg0);
10217
10218 case BUILT_IN_ISDIGIT:
10219 return fold_builtin_isdigit (loc, arg0);
10220
10221 CASE_FLT_FN (BUILT_IN_FINITE):
10222 case BUILT_IN_FINITED32:
10223 case BUILT_IN_FINITED64:
10224 case BUILT_IN_FINITED128:
10225 case BUILT_IN_ISFINITE:
10226 {
10227 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10228 if (ret)
10229 return ret;
10230 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10231 }
10232
10233 CASE_FLT_FN (BUILT_IN_ISINF):
10234 case BUILT_IN_ISINFD32:
10235 case BUILT_IN_ISINFD64:
10236 case BUILT_IN_ISINFD128:
10237 {
10238 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10239 if (ret)
10240 return ret;
10241 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10242 }
10243
10244 case BUILT_IN_ISNORMAL:
10245 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10246
10247 case BUILT_IN_ISINF_SIGN:
10248 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10249
10250 CASE_FLT_FN (BUILT_IN_ISNAN):
10251 case BUILT_IN_ISNAND32:
10252 case BUILT_IN_ISNAND64:
10253 case BUILT_IN_ISNAND128:
10254 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10255
10256 case BUILT_IN_FREE:
10257 if (integer_zerop (arg0))
10258 return build_empty_stmt (loc);
10259 break;
10260
10261 default:
10262 break;
10263 }
10264
10265 return NULL_TREE;
10266
10267 }
10268
10269 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10270 This function returns NULL_TREE if no simplification was possible. */
10271
10272 static tree
10273 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
10274 {
10275 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10276 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10277
10278 switch (fcode)
10279 {
10280 CASE_FLT_FN (BUILT_IN_JN):
10281 if (validate_arg (arg0, INTEGER_TYPE)
10282 && validate_arg (arg1, REAL_TYPE))
10283 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10284 break;
10285
10286 CASE_FLT_FN (BUILT_IN_YN):
10287 if (validate_arg (arg0, INTEGER_TYPE)
10288 && validate_arg (arg1, REAL_TYPE))
10289 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10290 &dconst0, false);
10291 break;
10292
10293 CASE_FLT_FN (BUILT_IN_DREM):
10294 CASE_FLT_FN (BUILT_IN_REMAINDER):
10295 if (validate_arg (arg0, REAL_TYPE)
10296 && validate_arg (arg1, REAL_TYPE))
10297 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10298 break;
10299
10300 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10301 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10302 if (validate_arg (arg0, REAL_TYPE)
10303 && validate_arg (arg1, POINTER_TYPE))
10304 return do_mpfr_lgamma_r (arg0, arg1, type);
10305 break;
10306
10307 CASE_FLT_FN (BUILT_IN_ATAN2):
10308 if (validate_arg (arg0, REAL_TYPE)
10309 && validate_arg (arg1, REAL_TYPE))
10310 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10311 break;
10312
10313 CASE_FLT_FN (BUILT_IN_FDIM):
10314 if (validate_arg (arg0, REAL_TYPE)
10315 && validate_arg (arg1, REAL_TYPE))
10316 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10317 break;
10318
10319 CASE_FLT_FN (BUILT_IN_HYPOT):
10320 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10321
10322 CASE_FLT_FN (BUILT_IN_CPOW):
10323 if (validate_arg (arg0, COMPLEX_TYPE)
10324 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10325 && validate_arg (arg1, COMPLEX_TYPE)
10326 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10327 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10328 break;
10329
10330 CASE_FLT_FN (BUILT_IN_LDEXP):
10331 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10332 CASE_FLT_FN (BUILT_IN_SCALBN):
10333 CASE_FLT_FN (BUILT_IN_SCALBLN):
10334 return fold_builtin_load_exponent (loc, arg0, arg1,
10335 type, /*ldexp=*/false);
10336
10337 CASE_FLT_FN (BUILT_IN_FREXP):
10338 return fold_builtin_frexp (loc, arg0, arg1, type);
10339
10340 CASE_FLT_FN (BUILT_IN_MODF):
10341 return fold_builtin_modf (loc, arg0, arg1, type);
10342
10343 case BUILT_IN_STRSTR:
10344 return fold_builtin_strstr (loc, arg0, arg1, type);
10345
10346 case BUILT_IN_STRSPN:
10347 return fold_builtin_strspn (loc, arg0, arg1);
10348
10349 case BUILT_IN_STRCSPN:
10350 return fold_builtin_strcspn (loc, arg0, arg1);
10351
10352 case BUILT_IN_STRCHR:
10353 case BUILT_IN_INDEX:
10354 return fold_builtin_strchr (loc, arg0, arg1, type);
10355
10356 case BUILT_IN_STRRCHR:
10357 case BUILT_IN_RINDEX:
10358 return fold_builtin_strrchr (loc, arg0, arg1, type);
10359
10360 case BUILT_IN_STRCMP:
10361 return fold_builtin_strcmp (loc, arg0, arg1);
10362
10363 case BUILT_IN_STRPBRK:
10364 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10365
10366 case BUILT_IN_EXPECT:
10367 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10368
10369 CASE_FLT_FN (BUILT_IN_POW):
10370 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10371
10372 CASE_FLT_FN (BUILT_IN_POWI):
10373 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10374
10375 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10376 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10377
10378 CASE_FLT_FN (BUILT_IN_FMIN):
10379 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10380
10381 CASE_FLT_FN (BUILT_IN_FMAX):
10382 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10383
10384 case BUILT_IN_ISGREATER:
10385 return fold_builtin_unordered_cmp (loc, fndecl,
10386 arg0, arg1, UNLE_EXPR, LE_EXPR);
10387 case BUILT_IN_ISGREATEREQUAL:
10388 return fold_builtin_unordered_cmp (loc, fndecl,
10389 arg0, arg1, UNLT_EXPR, LT_EXPR);
10390 case BUILT_IN_ISLESS:
10391 return fold_builtin_unordered_cmp (loc, fndecl,
10392 arg0, arg1, UNGE_EXPR, GE_EXPR);
10393 case BUILT_IN_ISLESSEQUAL:
10394 return fold_builtin_unordered_cmp (loc, fndecl,
10395 arg0, arg1, UNGT_EXPR, GT_EXPR);
10396 case BUILT_IN_ISLESSGREATER:
10397 return fold_builtin_unordered_cmp (loc, fndecl,
10398 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10399 case BUILT_IN_ISUNORDERED:
10400 return fold_builtin_unordered_cmp (loc, fndecl,
10401 arg0, arg1, UNORDERED_EXPR,
10402 NOP_EXPR);
10403
10404 /* We do the folding for va_start in the expander. */
10405 case BUILT_IN_VA_START:
10406 break;
10407
10408 case BUILT_IN_OBJECT_SIZE:
10409 return fold_builtin_object_size (arg0, arg1);
10410
10411 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10412 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10413
10414 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10415 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10416
10417 default:
10418 break;
10419 }
10420 return NULL_TREE;
10421 }
10422
10423 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10424 and ARG2.
10425 This function returns NULL_TREE if no simplification was possible. */
10426
10427 static tree
10428 fold_builtin_3 (location_t loc, tree fndecl,
10429 tree arg0, tree arg1, tree arg2)
10430 {
10431 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10432 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10433 switch (fcode)
10434 {
10435
10436 CASE_FLT_FN (BUILT_IN_SINCOS):
10437 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10438
10439 CASE_FLT_FN (BUILT_IN_FMA):
10440 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10441 break;
10442
10443 CASE_FLT_FN (BUILT_IN_REMQUO):
10444 if (validate_arg (arg0, REAL_TYPE)
10445 && validate_arg (arg1, REAL_TYPE)
10446 && validate_arg (arg2, POINTER_TYPE))
10447 return do_mpfr_remquo (arg0, arg1, arg2);
10448 break;
10449
10450 case BUILT_IN_STRNCMP:
10451 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10452
10453 case BUILT_IN_MEMCHR:
10454 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10455
10456 case BUILT_IN_BCMP:
10457 case BUILT_IN_MEMCMP:
10458 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10459
10460 case BUILT_IN_EXPECT:
10461 return fold_builtin_expect (loc, arg0, arg1, arg2);
10462
10463 case BUILT_IN_ADD_OVERFLOW:
10464 case BUILT_IN_SUB_OVERFLOW:
10465 case BUILT_IN_MUL_OVERFLOW:
10466 case BUILT_IN_SADD_OVERFLOW:
10467 case BUILT_IN_SADDL_OVERFLOW:
10468 case BUILT_IN_SADDLL_OVERFLOW:
10469 case BUILT_IN_SSUB_OVERFLOW:
10470 case BUILT_IN_SSUBL_OVERFLOW:
10471 case BUILT_IN_SSUBLL_OVERFLOW:
10472 case BUILT_IN_SMUL_OVERFLOW:
10473 case BUILT_IN_SMULL_OVERFLOW:
10474 case BUILT_IN_SMULLL_OVERFLOW:
10475 case BUILT_IN_UADD_OVERFLOW:
10476 case BUILT_IN_UADDL_OVERFLOW:
10477 case BUILT_IN_UADDLL_OVERFLOW:
10478 case BUILT_IN_USUB_OVERFLOW:
10479 case BUILT_IN_USUBL_OVERFLOW:
10480 case BUILT_IN_USUBLL_OVERFLOW:
10481 case BUILT_IN_UMUL_OVERFLOW:
10482 case BUILT_IN_UMULL_OVERFLOW:
10483 case BUILT_IN_UMULLL_OVERFLOW:
10484 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10485
10486 default:
10487 break;
10488 }
10489 return NULL_TREE;
10490 }
10491
10492 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10493 arguments. IGNORE is true if the result of the
10494 function call is ignored. This function returns NULL_TREE if no
10495 simplification was possible. */
10496
10497 tree
10498 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
10499 {
10500 tree ret = NULL_TREE;
10501
10502 switch (nargs)
10503 {
10504 case 0:
10505 ret = fold_builtin_0 (loc, fndecl);
10506 break;
10507 case 1:
10508 ret = fold_builtin_1 (loc, fndecl, args[0]);
10509 break;
10510 case 2:
10511 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
10512 break;
10513 case 3:
10514 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10515 break;
10516 default:
10517 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10518 break;
10519 }
10520 if (ret)
10521 {
10522 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10523 SET_EXPR_LOCATION (ret, loc);
10524 TREE_NO_WARNING (ret) = 1;
10525 return ret;
10526 }
10527 return NULL_TREE;
10528 }
10529
10530 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10531 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10532 of arguments in ARGS to be omitted. OLDNARGS is the number of
10533 elements in ARGS. */
10534
10535 static tree
10536 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10537 int skip, tree fndecl, int n, va_list newargs)
10538 {
10539 int nargs = oldnargs - skip + n;
10540 tree *buffer;
10541
10542 if (n > 0)
10543 {
10544 int i, j;
10545
10546 buffer = XALLOCAVEC (tree, nargs);
10547 for (i = 0; i < n; i++)
10548 buffer[i] = va_arg (newargs, tree);
10549 for (j = skip; j < oldnargs; j++, i++)
10550 buffer[i] = args[j];
10551 }
10552 else
10553 buffer = args + skip;
10554
10555 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10556 }
10557
10558 /* Return true if FNDECL shouldn't be folded right now.
10559 If a built-in function has an inline attribute always_inline
10560 wrapper, defer folding it after always_inline functions have
10561 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10562 might not be performed. */
10563
10564 bool
10565 avoid_folding_inline_builtin (tree fndecl)
10566 {
10567 return (DECL_DECLARED_INLINE_P (fndecl)
10568 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10569 && cfun
10570 && !cfun->always_inline_functions_inlined
10571 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10572 }
10573
10574 /* A wrapper function for builtin folding that prevents warnings for
10575 "statement without effect" and the like, caused by removing the
10576 call node earlier than the warning is generated. */
10577
10578 tree
10579 fold_call_expr (location_t loc, tree exp, bool ignore)
10580 {
10581 tree ret = NULL_TREE;
10582 tree fndecl = get_callee_fndecl (exp);
10583 if (fndecl
10584 && TREE_CODE (fndecl) == FUNCTION_DECL
10585 && DECL_BUILT_IN (fndecl)
10586 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10587 yet. Defer folding until we see all the arguments
10588 (after inlining). */
10589 && !CALL_EXPR_VA_ARG_PACK (exp))
10590 {
10591 int nargs = call_expr_nargs (exp);
10592
10593 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10594 instead last argument is __builtin_va_arg_pack (). Defer folding
10595 even in that case, until arguments are finalized. */
10596 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10597 {
10598 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10599 if (fndecl2
10600 && TREE_CODE (fndecl2) == FUNCTION_DECL
10601 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10602 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10603 return NULL_TREE;
10604 }
10605
10606 if (avoid_folding_inline_builtin (fndecl))
10607 return NULL_TREE;
10608
10609 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10610 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10611 CALL_EXPR_ARGP (exp), ignore);
10612 else
10613 {
10614 tree *args = CALL_EXPR_ARGP (exp);
10615 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10616 if (ret)
10617 return ret;
10618 }
10619 }
10620 return NULL_TREE;
10621 }
10622
10623 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10624 N arguments are passed in the array ARGARRAY. Return a folded
10625 expression or NULL_TREE if no simplification was possible. */
10626
10627 tree
10628 fold_builtin_call_array (location_t loc, tree,
10629 tree fn,
10630 int n,
10631 tree *argarray)
10632 {
10633 if (TREE_CODE (fn) != ADDR_EXPR)
10634 return NULL_TREE;
10635
10636 tree fndecl = TREE_OPERAND (fn, 0);
10637 if (TREE_CODE (fndecl) == FUNCTION_DECL
10638 && DECL_BUILT_IN (fndecl))
10639 {
10640 /* If last argument is __builtin_va_arg_pack (), arguments to this
10641 function are not finalized yet. Defer folding until they are. */
10642 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10643 {
10644 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10645 if (fndecl2
10646 && TREE_CODE (fndecl2) == FUNCTION_DECL
10647 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10648 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10649 return NULL_TREE;
10650 }
10651 if (avoid_folding_inline_builtin (fndecl))
10652 return NULL_TREE;
10653 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10654 return targetm.fold_builtin (fndecl, n, argarray, false);
10655 else
10656 return fold_builtin_n (loc, fndecl, argarray, n, false);
10657 }
10658
10659 return NULL_TREE;
10660 }
10661
10662 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10663 along with N new arguments specified as the "..." parameters. SKIP
10664 is the number of arguments in EXP to be omitted. This function is used
10665 to do varargs-to-varargs transformations. */
10666
10667 static tree
10668 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10669 {
10670 va_list ap;
10671 tree t;
10672
10673 va_start (ap, n);
10674 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10675 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10676 va_end (ap);
10677
10678 return t;
10679 }
10680
10681 /* Validate a single argument ARG against a tree code CODE representing
10682 a type. */
10683
10684 static bool
10685 validate_arg (const_tree arg, enum tree_code code)
10686 {
10687 if (!arg)
10688 return false;
10689 else if (code == POINTER_TYPE)
10690 return POINTER_TYPE_P (TREE_TYPE (arg));
10691 else if (code == INTEGER_TYPE)
10692 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10693 return code == TREE_CODE (TREE_TYPE (arg));
10694 }
10695
10696 /* This function validates the types of a function call argument list
10697 against a specified list of tree_codes. If the last specifier is a 0,
10698 that represents an ellipses, otherwise the last specifier must be a
10699 VOID_TYPE.
10700
10701 This is the GIMPLE version of validate_arglist. Eventually we want to
10702 completely convert builtins.c to work from GIMPLEs and the tree based
10703 validate_arglist will then be removed. */
10704
10705 bool
10706 validate_gimple_arglist (const gcall *call, ...)
10707 {
10708 enum tree_code code;
10709 bool res = 0;
10710 va_list ap;
10711 const_tree arg;
10712 size_t i;
10713
10714 va_start (ap, call);
10715 i = 0;
10716
10717 do
10718 {
10719 code = (enum tree_code) va_arg (ap, int);
10720 switch (code)
10721 {
10722 case 0:
10723 /* This signifies an ellipses, any further arguments are all ok. */
10724 res = true;
10725 goto end;
10726 case VOID_TYPE:
10727 /* This signifies an endlink, if no arguments remain, return
10728 true, otherwise return false. */
10729 res = (i == gimple_call_num_args (call));
10730 goto end;
10731 default:
10732 /* If no parameters remain or the parameter's code does not
10733 match the specified code, return false. Otherwise continue
10734 checking any remaining arguments. */
10735 arg = gimple_call_arg (call, i++);
10736 if (!validate_arg (arg, code))
10737 goto end;
10738 break;
10739 }
10740 }
10741 while (1);
10742
10743 /* We need gotos here since we can only have one VA_CLOSE in a
10744 function. */
10745 end: ;
10746 va_end (ap);
10747
10748 return res;
10749 }
10750
10751 /* Default target-specific builtin expander that does nothing. */
10752
10753 rtx
10754 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10755 rtx target ATTRIBUTE_UNUSED,
10756 rtx subtarget ATTRIBUTE_UNUSED,
10757 machine_mode mode ATTRIBUTE_UNUSED,
10758 int ignore ATTRIBUTE_UNUSED)
10759 {
10760 return NULL_RTX;
10761 }
10762
10763 /* Returns true is EXP represents data that would potentially reside
10764 in a readonly section. */
10765
10766 bool
10767 readonly_data_expr (tree exp)
10768 {
10769 STRIP_NOPS (exp);
10770
10771 if (TREE_CODE (exp) != ADDR_EXPR)
10772 return false;
10773
10774 exp = get_base_address (TREE_OPERAND (exp, 0));
10775 if (!exp)
10776 return false;
10777
10778 /* Make sure we call decl_readonly_section only for trees it
10779 can handle (since it returns true for everything it doesn't
10780 understand). */
10781 if (TREE_CODE (exp) == STRING_CST
10782 || TREE_CODE (exp) == CONSTRUCTOR
10783 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10784 return decl_readonly_section (exp, 0);
10785 else
10786 return false;
10787 }
10788
10789 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10790 to the call, and TYPE is its return type.
10791
10792 Return NULL_TREE if no simplification was possible, otherwise return the
10793 simplified form of the call as a tree.
10794
10795 The simplified form may be a constant or other expression which
10796 computes the same value, but in a more efficient manner (including
10797 calls to other builtin functions).
10798
10799 The call may contain arguments which need to be evaluated, but
10800 which are not useful to determine the result of the call. In
10801 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10802 COMPOUND_EXPR will be an argument which must be evaluated.
10803 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10804 COMPOUND_EXPR in the chain will contain the tree for the simplified
10805 form of the builtin function call. */
10806
10807 static tree
10808 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10809 {
10810 if (!validate_arg (s1, POINTER_TYPE)
10811 || !validate_arg (s2, POINTER_TYPE))
10812 return NULL_TREE;
10813 else
10814 {
10815 tree fn;
10816 const char *p1, *p2;
10817
10818 p2 = c_getstr (s2);
10819 if (p2 == NULL)
10820 return NULL_TREE;
10821
10822 p1 = c_getstr (s1);
10823 if (p1 != NULL)
10824 {
10825 const char *r = strstr (p1, p2);
10826 tree tem;
10827
10828 if (r == NULL)
10829 return build_int_cst (TREE_TYPE (s1), 0);
10830
10831 /* Return an offset into the constant string argument. */
10832 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10833 return fold_convert_loc (loc, type, tem);
10834 }
10835
10836 /* The argument is const char *, and the result is char *, so we need
10837 a type conversion here to avoid a warning. */
10838 if (p2[0] == '\0')
10839 return fold_convert_loc (loc, type, s1);
10840
10841 if (p2[1] != '\0')
10842 return NULL_TREE;
10843
10844 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10845 if (!fn)
10846 return NULL_TREE;
10847
10848 /* New argument list transforming strstr(s1, s2) to
10849 strchr(s1, s2[0]). */
10850 return build_call_expr_loc (loc, fn, 2, s1,
10851 build_int_cst (integer_type_node, p2[0]));
10852 }
10853 }
10854
10855 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10856 the call, and TYPE is its return type.
10857
10858 Return NULL_TREE if no simplification was possible, otherwise return the
10859 simplified form of the call as a tree.
10860
10861 The simplified form may be a constant or other expression which
10862 computes the same value, but in a more efficient manner (including
10863 calls to other builtin functions).
10864
10865 The call may contain arguments which need to be evaluated, but
10866 which are not useful to determine the result of the call. In
10867 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10868 COMPOUND_EXPR will be an argument which must be evaluated.
10869 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10870 COMPOUND_EXPR in the chain will contain the tree for the simplified
10871 form of the builtin function call. */
10872
10873 static tree
10874 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10875 {
10876 if (!validate_arg (s1, POINTER_TYPE)
10877 || !validate_arg (s2, INTEGER_TYPE))
10878 return NULL_TREE;
10879 else
10880 {
10881 const char *p1;
10882
10883 if (TREE_CODE (s2) != INTEGER_CST)
10884 return NULL_TREE;
10885
10886 p1 = c_getstr (s1);
10887 if (p1 != NULL)
10888 {
10889 char c;
10890 const char *r;
10891 tree tem;
10892
10893 if (target_char_cast (s2, &c))
10894 return NULL_TREE;
10895
10896 r = strchr (p1, c);
10897
10898 if (r == NULL)
10899 return build_int_cst (TREE_TYPE (s1), 0);
10900
10901 /* Return an offset into the constant string argument. */
10902 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10903 return fold_convert_loc (loc, type, tem);
10904 }
10905 return NULL_TREE;
10906 }
10907 }
10908
10909 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10910 the call, and TYPE is its return type.
10911
10912 Return NULL_TREE if no simplification was possible, otherwise return the
10913 simplified form of the call as a tree.
10914
10915 The simplified form may be a constant or other expression which
10916 computes the same value, but in a more efficient manner (including
10917 calls to other builtin functions).
10918
10919 The call may contain arguments which need to be evaluated, but
10920 which are not useful to determine the result of the call. In
10921 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10922 COMPOUND_EXPR will be an argument which must be evaluated.
10923 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10924 COMPOUND_EXPR in the chain will contain the tree for the simplified
10925 form of the builtin function call. */
10926
10927 static tree
10928 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10929 {
10930 if (!validate_arg (s1, POINTER_TYPE)
10931 || !validate_arg (s2, INTEGER_TYPE))
10932 return NULL_TREE;
10933 else
10934 {
10935 tree fn;
10936 const char *p1;
10937
10938 if (TREE_CODE (s2) != INTEGER_CST)
10939 return NULL_TREE;
10940
10941 p1 = c_getstr (s1);
10942 if (p1 != NULL)
10943 {
10944 char c;
10945 const char *r;
10946 tree tem;
10947
10948 if (target_char_cast (s2, &c))
10949 return NULL_TREE;
10950
10951 r = strrchr (p1, c);
10952
10953 if (r == NULL)
10954 return build_int_cst (TREE_TYPE (s1), 0);
10955
10956 /* Return an offset into the constant string argument. */
10957 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10958 return fold_convert_loc (loc, type, tem);
10959 }
10960
10961 if (! integer_zerop (s2))
10962 return NULL_TREE;
10963
10964 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10965 if (!fn)
10966 return NULL_TREE;
10967
10968 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10969 return build_call_expr_loc (loc, fn, 2, s1, s2);
10970 }
10971 }
10972
10973 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10974 to the call, and TYPE is its return type.
10975
10976 Return NULL_TREE if no simplification was possible, otherwise return the
10977 simplified form of the call as a tree.
10978
10979 The simplified form may be a constant or other expression which
10980 computes the same value, but in a more efficient manner (including
10981 calls to other builtin functions).
10982
10983 The call may contain arguments which need to be evaluated, but
10984 which are not useful to determine the result of the call. In
10985 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10986 COMPOUND_EXPR will be an argument which must be evaluated.
10987 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10988 COMPOUND_EXPR in the chain will contain the tree for the simplified
10989 form of the builtin function call. */
10990
10991 static tree
10992 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10993 {
10994 if (!validate_arg (s1, POINTER_TYPE)
10995 || !validate_arg (s2, POINTER_TYPE))
10996 return NULL_TREE;
10997 else
10998 {
10999 tree fn;
11000 const char *p1, *p2;
11001
11002 p2 = c_getstr (s2);
11003 if (p2 == NULL)
11004 return NULL_TREE;
11005
11006 p1 = c_getstr (s1);
11007 if (p1 != NULL)
11008 {
11009 const char *r = strpbrk (p1, p2);
11010 tree tem;
11011
11012 if (r == NULL)
11013 return build_int_cst (TREE_TYPE (s1), 0);
11014
11015 /* Return an offset into the constant string argument. */
11016 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11017 return fold_convert_loc (loc, type, tem);
11018 }
11019
11020 if (p2[0] == '\0')
11021 /* strpbrk(x, "") == NULL.
11022 Evaluate and ignore s1 in case it had side-effects. */
11023 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11024
11025 if (p2[1] != '\0')
11026 return NULL_TREE; /* Really call strpbrk. */
11027
11028 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11029 if (!fn)
11030 return NULL_TREE;
11031
11032 /* New argument list transforming strpbrk(s1, s2) to
11033 strchr(s1, s2[0]). */
11034 return build_call_expr_loc (loc, fn, 2, s1,
11035 build_int_cst (integer_type_node, p2[0]));
11036 }
11037 }
11038
11039 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11040 to the call.
11041
11042 Return NULL_TREE if no simplification was possible, otherwise return the
11043 simplified form of the call as a tree.
11044
11045 The simplified form may be a constant or other expression which
11046 computes the same value, but in a more efficient manner (including
11047 calls to other builtin functions).
11048
11049 The call may contain arguments which need to be evaluated, but
11050 which are not useful to determine the result of the call. In
11051 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11052 COMPOUND_EXPR will be an argument which must be evaluated.
11053 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11054 COMPOUND_EXPR in the chain will contain the tree for the simplified
11055 form of the builtin function call. */
11056
11057 static tree
11058 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11059 {
11060 if (!validate_arg (s1, POINTER_TYPE)
11061 || !validate_arg (s2, POINTER_TYPE))
11062 return NULL_TREE;
11063 else
11064 {
11065 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11066
11067 /* If both arguments are constants, evaluate at compile-time. */
11068 if (p1 && p2)
11069 {
11070 const size_t r = strspn (p1, p2);
11071 return build_int_cst (size_type_node, r);
11072 }
11073
11074 /* If either argument is "", return NULL_TREE. */
11075 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11076 /* Evaluate and ignore both arguments in case either one has
11077 side-effects. */
11078 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11079 s1, s2);
11080 return NULL_TREE;
11081 }
11082 }
11083
11084 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11085 to the call.
11086
11087 Return NULL_TREE if no simplification was possible, otherwise return the
11088 simplified form of the call as a tree.
11089
11090 The simplified form may be a constant or other expression which
11091 computes the same value, but in a more efficient manner (including
11092 calls to other builtin functions).
11093
11094 The call may contain arguments which need to be evaluated, but
11095 which are not useful to determine the result of the call. In
11096 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11097 COMPOUND_EXPR will be an argument which must be evaluated.
11098 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11099 COMPOUND_EXPR in the chain will contain the tree for the simplified
11100 form of the builtin function call. */
11101
11102 static tree
11103 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11104 {
11105 if (!validate_arg (s1, POINTER_TYPE)
11106 || !validate_arg (s2, POINTER_TYPE))
11107 return NULL_TREE;
11108 else
11109 {
11110 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11111
11112 /* If both arguments are constants, evaluate at compile-time. */
11113 if (p1 && p2)
11114 {
11115 const size_t r = strcspn (p1, p2);
11116 return build_int_cst (size_type_node, r);
11117 }
11118
11119 /* If the first argument is "", return NULL_TREE. */
11120 if (p1 && *p1 == '\0')
11121 {
11122 /* Evaluate and ignore argument s2 in case it has
11123 side-effects. */
11124 return omit_one_operand_loc (loc, size_type_node,
11125 size_zero_node, s2);
11126 }
11127
11128 /* If the second argument is "", return __builtin_strlen(s1). */
11129 if (p2 && *p2 == '\0')
11130 {
11131 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11132
11133 /* If the replacement _DECL isn't initialized, don't do the
11134 transformation. */
11135 if (!fn)
11136 return NULL_TREE;
11137
11138 return build_call_expr_loc (loc, fn, 1, s1);
11139 }
11140 return NULL_TREE;
11141 }
11142 }
11143
11144 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11145 produced. False otherwise. This is done so that we don't output the error
11146 or warning twice or three times. */
11147
11148 bool
11149 fold_builtin_next_arg (tree exp, bool va_start_p)
11150 {
11151 tree fntype = TREE_TYPE (current_function_decl);
11152 int nargs = call_expr_nargs (exp);
11153 tree arg;
11154 /* There is good chance the current input_location points inside the
11155 definition of the va_start macro (perhaps on the token for
11156 builtin) in a system header, so warnings will not be emitted.
11157 Use the location in real source code. */
11158 source_location current_location =
11159 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11160 NULL);
11161
11162 if (!stdarg_p (fntype))
11163 {
11164 error ("%<va_start%> used in function with fixed args");
11165 return true;
11166 }
11167
11168 if (va_start_p)
11169 {
11170 if (va_start_p && (nargs != 2))
11171 {
11172 error ("wrong number of arguments to function %<va_start%>");
11173 return true;
11174 }
11175 arg = CALL_EXPR_ARG (exp, 1);
11176 }
11177 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11178 when we checked the arguments and if needed issued a warning. */
11179 else
11180 {
11181 if (nargs == 0)
11182 {
11183 /* Evidently an out of date version of <stdarg.h>; can't validate
11184 va_start's second argument, but can still work as intended. */
11185 warning_at (current_location,
11186 OPT_Wvarargs,
11187 "%<__builtin_next_arg%> called without an argument");
11188 return true;
11189 }
11190 else if (nargs > 1)
11191 {
11192 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11193 return true;
11194 }
11195 arg = CALL_EXPR_ARG (exp, 0);
11196 }
11197
11198 if (TREE_CODE (arg) == SSA_NAME)
11199 arg = SSA_NAME_VAR (arg);
11200
11201 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11202 or __builtin_next_arg (0) the first time we see it, after checking
11203 the arguments and if needed issuing a warning. */
11204 if (!integer_zerop (arg))
11205 {
11206 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11207
11208 /* Strip off all nops for the sake of the comparison. This
11209 is not quite the same as STRIP_NOPS. It does more.
11210 We must also strip off INDIRECT_EXPR for C++ reference
11211 parameters. */
11212 while (CONVERT_EXPR_P (arg)
11213 || TREE_CODE (arg) == INDIRECT_REF)
11214 arg = TREE_OPERAND (arg, 0);
11215 if (arg != last_parm)
11216 {
11217 /* FIXME: Sometimes with the tree optimizers we can get the
11218 not the last argument even though the user used the last
11219 argument. We just warn and set the arg to be the last
11220 argument so that we will get wrong-code because of
11221 it. */
11222 warning_at (current_location,
11223 OPT_Wvarargs,
11224 "second parameter of %<va_start%> not last named argument");
11225 }
11226
11227 /* Undefined by C99 7.15.1.4p4 (va_start):
11228 "If the parameter parmN is declared with the register storage
11229 class, with a function or array type, or with a type that is
11230 not compatible with the type that results after application of
11231 the default argument promotions, the behavior is undefined."
11232 */
11233 else if (DECL_REGISTER (arg))
11234 {
11235 warning_at (current_location,
11236 OPT_Wvarargs,
11237 "undefined behaviour when second parameter of "
11238 "%<va_start%> is declared with %<register%> storage");
11239 }
11240
11241 /* We want to verify the second parameter just once before the tree
11242 optimizers are run and then avoid keeping it in the tree,
11243 as otherwise we could warn even for correct code like:
11244 void foo (int i, ...)
11245 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11246 if (va_start_p)
11247 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11248 else
11249 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11250 }
11251 return false;
11252 }
11253
11254
11255 /* Expand a call EXP to __builtin_object_size. */
11256
11257 static rtx
11258 expand_builtin_object_size (tree exp)
11259 {
11260 tree ost;
11261 int object_size_type;
11262 tree fndecl = get_callee_fndecl (exp);
11263
11264 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11265 {
11266 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11267 exp, fndecl);
11268 expand_builtin_trap ();
11269 return const0_rtx;
11270 }
11271
11272 ost = CALL_EXPR_ARG (exp, 1);
11273 STRIP_NOPS (ost);
11274
11275 if (TREE_CODE (ost) != INTEGER_CST
11276 || tree_int_cst_sgn (ost) < 0
11277 || compare_tree_int (ost, 3) > 0)
11278 {
11279 error ("%Klast argument of %D is not integer constant between 0 and 3",
11280 exp, fndecl);
11281 expand_builtin_trap ();
11282 return const0_rtx;
11283 }
11284
11285 object_size_type = tree_to_shwi (ost);
11286
11287 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11288 }
11289
11290 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11291 FCODE is the BUILT_IN_* to use.
11292 Return NULL_RTX if we failed; the caller should emit a normal call,
11293 otherwise try to get the result in TARGET, if convenient (and in
11294 mode MODE if that's convenient). */
11295
11296 static rtx
11297 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11298 enum built_in_function fcode)
11299 {
11300 tree dest, src, len, size;
11301
11302 if (!validate_arglist (exp,
11303 POINTER_TYPE,
11304 fcode == BUILT_IN_MEMSET_CHK
11305 ? INTEGER_TYPE : POINTER_TYPE,
11306 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11307 return NULL_RTX;
11308
11309 dest = CALL_EXPR_ARG (exp, 0);
11310 src = CALL_EXPR_ARG (exp, 1);
11311 len = CALL_EXPR_ARG (exp, 2);
11312 size = CALL_EXPR_ARG (exp, 3);
11313
11314 if (! tree_fits_uhwi_p (size))
11315 return NULL_RTX;
11316
11317 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11318 {
11319 tree fn;
11320
11321 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11322 {
11323 warning_at (tree_nonartificial_location (exp),
11324 0, "%Kcall to %D will always overflow destination buffer",
11325 exp, get_callee_fndecl (exp));
11326 return NULL_RTX;
11327 }
11328
11329 fn = NULL_TREE;
11330 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11331 mem{cpy,pcpy,move,set} is available. */
11332 switch (fcode)
11333 {
11334 case BUILT_IN_MEMCPY_CHK:
11335 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11336 break;
11337 case BUILT_IN_MEMPCPY_CHK:
11338 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11339 break;
11340 case BUILT_IN_MEMMOVE_CHK:
11341 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11342 break;
11343 case BUILT_IN_MEMSET_CHK:
11344 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11345 break;
11346 default:
11347 break;
11348 }
11349
11350 if (! fn)
11351 return NULL_RTX;
11352
11353 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11354 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11355 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11356 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11357 }
11358 else if (fcode == BUILT_IN_MEMSET_CHK)
11359 return NULL_RTX;
11360 else
11361 {
11362 unsigned int dest_align = get_pointer_alignment (dest);
11363
11364 /* If DEST is not a pointer type, call the normal function. */
11365 if (dest_align == 0)
11366 return NULL_RTX;
11367
11368 /* If SRC and DEST are the same (and not volatile), do nothing. */
11369 if (operand_equal_p (src, dest, 0))
11370 {
11371 tree expr;
11372
11373 if (fcode != BUILT_IN_MEMPCPY_CHK)
11374 {
11375 /* Evaluate and ignore LEN in case it has side-effects. */
11376 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11377 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11378 }
11379
11380 expr = fold_build_pointer_plus (dest, len);
11381 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11382 }
11383
11384 /* __memmove_chk special case. */
11385 if (fcode == BUILT_IN_MEMMOVE_CHK)
11386 {
11387 unsigned int src_align = get_pointer_alignment (src);
11388
11389 if (src_align == 0)
11390 return NULL_RTX;
11391
11392 /* If src is categorized for a readonly section we can use
11393 normal __memcpy_chk. */
11394 if (readonly_data_expr (src))
11395 {
11396 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11397 if (!fn)
11398 return NULL_RTX;
11399 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11400 dest, src, len, size);
11401 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11402 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11403 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11404 }
11405 }
11406 return NULL_RTX;
11407 }
11408 }
11409
11410 /* Emit warning if a buffer overflow is detected at compile time. */
11411
11412 static void
11413 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11414 {
11415 int is_strlen = 0;
11416 tree len, size;
11417 location_t loc = tree_nonartificial_location (exp);
11418
11419 switch (fcode)
11420 {
11421 case BUILT_IN_STRCPY_CHK:
11422 case BUILT_IN_STPCPY_CHK:
11423 /* For __strcat_chk the warning will be emitted only if overflowing
11424 by at least strlen (dest) + 1 bytes. */
11425 case BUILT_IN_STRCAT_CHK:
11426 len = CALL_EXPR_ARG (exp, 1);
11427 size = CALL_EXPR_ARG (exp, 2);
11428 is_strlen = 1;
11429 break;
11430 case BUILT_IN_STRNCAT_CHK:
11431 case BUILT_IN_STRNCPY_CHK:
11432 case BUILT_IN_STPNCPY_CHK:
11433 len = CALL_EXPR_ARG (exp, 2);
11434 size = CALL_EXPR_ARG (exp, 3);
11435 break;
11436 case BUILT_IN_SNPRINTF_CHK:
11437 case BUILT_IN_VSNPRINTF_CHK:
11438 len = CALL_EXPR_ARG (exp, 1);
11439 size = CALL_EXPR_ARG (exp, 3);
11440 break;
11441 default:
11442 gcc_unreachable ();
11443 }
11444
11445 if (!len || !size)
11446 return;
11447
11448 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11449 return;
11450
11451 if (is_strlen)
11452 {
11453 len = c_strlen (len, 1);
11454 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11455 return;
11456 }
11457 else if (fcode == BUILT_IN_STRNCAT_CHK)
11458 {
11459 tree src = CALL_EXPR_ARG (exp, 1);
11460 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11461 return;
11462 src = c_strlen (src, 1);
11463 if (! src || ! tree_fits_uhwi_p (src))
11464 {
11465 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11466 exp, get_callee_fndecl (exp));
11467 return;
11468 }
11469 else if (tree_int_cst_lt (src, size))
11470 return;
11471 }
11472 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
11473 return;
11474
11475 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11476 exp, get_callee_fndecl (exp));
11477 }
11478
11479 /* Emit warning if a buffer overflow is detected at compile time
11480 in __sprintf_chk/__vsprintf_chk calls. */
11481
11482 static void
11483 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11484 {
11485 tree size, len, fmt;
11486 const char *fmt_str;
11487 int nargs = call_expr_nargs (exp);
11488
11489 /* Verify the required arguments in the original call. */
11490
11491 if (nargs < 4)
11492 return;
11493 size = CALL_EXPR_ARG (exp, 2);
11494 fmt = CALL_EXPR_ARG (exp, 3);
11495
11496 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11497 return;
11498
11499 /* Check whether the format is a literal string constant. */
11500 fmt_str = c_getstr (fmt);
11501 if (fmt_str == NULL)
11502 return;
11503
11504 if (!init_target_chars ())
11505 return;
11506
11507 /* If the format doesn't contain % args or %%, we know its size. */
11508 if (strchr (fmt_str, target_percent) == 0)
11509 len = build_int_cstu (size_type_node, strlen (fmt_str));
11510 /* If the format is "%s" and first ... argument is a string literal,
11511 we know it too. */
11512 else if (fcode == BUILT_IN_SPRINTF_CHK
11513 && strcmp (fmt_str, target_percent_s) == 0)
11514 {
11515 tree arg;
11516
11517 if (nargs < 5)
11518 return;
11519 arg = CALL_EXPR_ARG (exp, 4);
11520 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11521 return;
11522
11523 len = c_strlen (arg, 1);
11524 if (!len || ! tree_fits_uhwi_p (len))
11525 return;
11526 }
11527 else
11528 return;
11529
11530 if (! tree_int_cst_lt (len, size))
11531 warning_at (tree_nonartificial_location (exp),
11532 0, "%Kcall to %D will always overflow destination buffer",
11533 exp, get_callee_fndecl (exp));
11534 }
11535
11536 /* Emit warning if a free is called with address of a variable. */
11537
11538 static void
11539 maybe_emit_free_warning (tree exp)
11540 {
11541 tree arg = CALL_EXPR_ARG (exp, 0);
11542
11543 STRIP_NOPS (arg);
11544 if (TREE_CODE (arg) != ADDR_EXPR)
11545 return;
11546
11547 arg = get_base_address (TREE_OPERAND (arg, 0));
11548 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11549 return;
11550
11551 if (SSA_VAR_P (arg))
11552 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11553 "%Kattempt to free a non-heap object %qD", exp, arg);
11554 else
11555 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11556 "%Kattempt to free a non-heap object", exp);
11557 }
11558
11559 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11560 if possible. */
11561
11562 static tree
11563 fold_builtin_object_size (tree ptr, tree ost)
11564 {
11565 unsigned HOST_WIDE_INT bytes;
11566 int object_size_type;
11567
11568 if (!validate_arg (ptr, POINTER_TYPE)
11569 || !validate_arg (ost, INTEGER_TYPE))
11570 return NULL_TREE;
11571
11572 STRIP_NOPS (ost);
11573
11574 if (TREE_CODE (ost) != INTEGER_CST
11575 || tree_int_cst_sgn (ost) < 0
11576 || compare_tree_int (ost, 3) > 0)
11577 return NULL_TREE;
11578
11579 object_size_type = tree_to_shwi (ost);
11580
11581 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11582 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11583 and (size_t) 0 for types 2 and 3. */
11584 if (TREE_SIDE_EFFECTS (ptr))
11585 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11586
11587 if (TREE_CODE (ptr) == ADDR_EXPR)
11588 {
11589 bytes = compute_builtin_object_size (ptr, object_size_type);
11590 if (wi::fits_to_tree_p (bytes, size_type_node))
11591 return build_int_cstu (size_type_node, bytes);
11592 }
11593 else if (TREE_CODE (ptr) == SSA_NAME)
11594 {
11595 /* If object size is not known yet, delay folding until
11596 later. Maybe subsequent passes will help determining
11597 it. */
11598 bytes = compute_builtin_object_size (ptr, object_size_type);
11599 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11600 && wi::fits_to_tree_p (bytes, size_type_node))
11601 return build_int_cstu (size_type_node, bytes);
11602 }
11603
11604 return NULL_TREE;
11605 }
11606
11607 /* Builtins with folding operations that operate on "..." arguments
11608 need special handling; we need to store the arguments in a convenient
11609 data structure before attempting any folding. Fortunately there are
11610 only a few builtins that fall into this category. FNDECL is the
11611 function, EXP is the CALL_EXPR for the call. */
11612
11613 static tree
11614 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11615 {
11616 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11617 tree ret = NULL_TREE;
11618
11619 switch (fcode)
11620 {
11621 case BUILT_IN_FPCLASSIFY:
11622 ret = fold_builtin_fpclassify (loc, args, nargs);
11623 break;
11624
11625 default:
11626 break;
11627 }
11628 if (ret)
11629 {
11630 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11631 SET_EXPR_LOCATION (ret, loc);
11632 TREE_NO_WARNING (ret) = 1;
11633 return ret;
11634 }
11635 return NULL_TREE;
11636 }
11637
11638 /* Initialize format string characters in the target charset. */
11639
11640 bool
11641 init_target_chars (void)
11642 {
11643 static bool init;
11644 if (!init)
11645 {
11646 target_newline = lang_hooks.to_target_charset ('\n');
11647 target_percent = lang_hooks.to_target_charset ('%');
11648 target_c = lang_hooks.to_target_charset ('c');
11649 target_s = lang_hooks.to_target_charset ('s');
11650 if (target_newline == 0 || target_percent == 0 || target_c == 0
11651 || target_s == 0)
11652 return false;
11653
11654 target_percent_c[0] = target_percent;
11655 target_percent_c[1] = target_c;
11656 target_percent_c[2] = '\0';
11657
11658 target_percent_s[0] = target_percent;
11659 target_percent_s[1] = target_s;
11660 target_percent_s[2] = '\0';
11661
11662 target_percent_s_newline[0] = target_percent;
11663 target_percent_s_newline[1] = target_s;
11664 target_percent_s_newline[2] = target_newline;
11665 target_percent_s_newline[3] = '\0';
11666
11667 init = true;
11668 }
11669 return true;
11670 }
11671
11672 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11673 and no overflow/underflow occurred. INEXACT is true if M was not
11674 exactly calculated. TYPE is the tree type for the result. This
11675 function assumes that you cleared the MPFR flags and then
11676 calculated M to see if anything subsequently set a flag prior to
11677 entering this function. Return NULL_TREE if any checks fail. */
11678
11679 static tree
11680 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11681 {
11682 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11683 overflow/underflow occurred. If -frounding-math, proceed iff the
11684 result of calling FUNC was exact. */
11685 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11686 && (!flag_rounding_math || !inexact))
11687 {
11688 REAL_VALUE_TYPE rr;
11689
11690 real_from_mpfr (&rr, m, type, GMP_RNDN);
11691 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11692 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11693 but the mpft_t is not, then we underflowed in the
11694 conversion. */
11695 if (real_isfinite (&rr)
11696 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11697 {
11698 REAL_VALUE_TYPE rmode;
11699
11700 real_convert (&rmode, TYPE_MODE (type), &rr);
11701 /* Proceed iff the specified mode can hold the value. */
11702 if (real_identical (&rmode, &rr))
11703 return build_real (type, rmode);
11704 }
11705 }
11706 return NULL_TREE;
11707 }
11708
11709 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11710 number and no overflow/underflow occurred. INEXACT is true if M
11711 was not exactly calculated. TYPE is the tree type for the result.
11712 This function assumes that you cleared the MPFR flags and then
11713 calculated M to see if anything subsequently set a flag prior to
11714 entering this function. Return NULL_TREE if any checks fail, if
11715 FORCE_CONVERT is true, then bypass the checks. */
11716
11717 static tree
11718 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11719 {
11720 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11721 overflow/underflow occurred. If -frounding-math, proceed iff the
11722 result of calling FUNC was exact. */
11723 if (force_convert
11724 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11725 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11726 && (!flag_rounding_math || !inexact)))
11727 {
11728 REAL_VALUE_TYPE re, im;
11729
11730 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
11731 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
11732 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11733 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11734 but the mpft_t is not, then we underflowed in the
11735 conversion. */
11736 if (force_convert
11737 || (real_isfinite (&re) && real_isfinite (&im)
11738 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11739 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11740 {
11741 REAL_VALUE_TYPE re_mode, im_mode;
11742
11743 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11744 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11745 /* Proceed iff the specified mode can hold the value. */
11746 if (force_convert
11747 || (real_identical (&re_mode, &re)
11748 && real_identical (&im_mode, &im)))
11749 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11750 build_real (TREE_TYPE (type), im_mode));
11751 }
11752 }
11753 return NULL_TREE;
11754 }
11755
11756 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11757 FUNC on it and return the resulting value as a tree with type TYPE.
11758 If MIN and/or MAX are not NULL, then the supplied ARG must be
11759 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11760 acceptable values, otherwise they are not. The mpfr precision is
11761 set to the precision of TYPE. We assume that function FUNC returns
11762 zero if the result could be calculated exactly within the requested
11763 precision. */
11764
11765 static tree
11766 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
11767 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
11768 bool inclusive)
11769 {
11770 tree result = NULL_TREE;
11771
11772 STRIP_NOPS (arg);
11773
11774 /* To proceed, MPFR must exactly represent the target floating point
11775 format, which only happens when the target base equals two. */
11776 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11777 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
11778 {
11779 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11780
11781 if (real_isfinite (ra)
11782 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
11783 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
11784 {
11785 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11786 const int prec = fmt->p;
11787 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11788 int inexact;
11789 mpfr_t m;
11790
11791 mpfr_init2 (m, prec);
11792 mpfr_from_real (m, ra, GMP_RNDN);
11793 mpfr_clear_flags ();
11794 inexact = func (m, m, rnd);
11795 result = do_mpfr_ckconv (m, type, inexact);
11796 mpfr_clear (m);
11797 }
11798 }
11799
11800 return result;
11801 }
11802
11803 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
11804 FUNC on it and return the resulting value as a tree with type TYPE.
11805 The mpfr precision is set to the precision of TYPE. We assume that
11806 function FUNC returns zero if the result could be calculated
11807 exactly within the requested precision. */
11808
11809 static tree
11810 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
11811 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11812 {
11813 tree result = NULL_TREE;
11814
11815 STRIP_NOPS (arg1);
11816 STRIP_NOPS (arg2);
11817
11818 /* To proceed, MPFR must exactly represent the target floating point
11819 format, which only happens when the target base equals two. */
11820 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11821 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11822 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
11823 {
11824 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11825 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11826
11827 if (real_isfinite (ra1) && real_isfinite (ra2))
11828 {
11829 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11830 const int prec = fmt->p;
11831 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11832 int inexact;
11833 mpfr_t m1, m2;
11834
11835 mpfr_inits2 (prec, m1, m2, NULL);
11836 mpfr_from_real (m1, ra1, GMP_RNDN);
11837 mpfr_from_real (m2, ra2, GMP_RNDN);
11838 mpfr_clear_flags ();
11839 inexact = func (m1, m1, m2, rnd);
11840 result = do_mpfr_ckconv (m1, type, inexact);
11841 mpfr_clears (m1, m2, NULL);
11842 }
11843 }
11844
11845 return result;
11846 }
11847
11848 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
11849 FUNC on it and return the resulting value as a tree with type TYPE.
11850 The mpfr precision is set to the precision of TYPE. We assume that
11851 function FUNC returns zero if the result could be calculated
11852 exactly within the requested precision. */
11853
11854 static tree
11855 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
11856 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11857 {
11858 tree result = NULL_TREE;
11859
11860 STRIP_NOPS (arg1);
11861 STRIP_NOPS (arg2);
11862 STRIP_NOPS (arg3);
11863
11864 /* To proceed, MPFR must exactly represent the target floating point
11865 format, which only happens when the target base equals two. */
11866 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11867 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11868 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
11869 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
11870 {
11871 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11872 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11873 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
11874
11875 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
11876 {
11877 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11878 const int prec = fmt->p;
11879 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11880 int inexact;
11881 mpfr_t m1, m2, m3;
11882
11883 mpfr_inits2 (prec, m1, m2, m3, NULL);
11884 mpfr_from_real (m1, ra1, GMP_RNDN);
11885 mpfr_from_real (m2, ra2, GMP_RNDN);
11886 mpfr_from_real (m3, ra3, GMP_RNDN);
11887 mpfr_clear_flags ();
11888 inexact = func (m1, m1, m2, m3, rnd);
11889 result = do_mpfr_ckconv (m1, type, inexact);
11890 mpfr_clears (m1, m2, m3, NULL);
11891 }
11892 }
11893
11894 return result;
11895 }
11896
11897 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11898 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
11899 If ARG_SINP and ARG_COSP are NULL then the result is returned
11900 as a complex value.
11901 The type is taken from the type of ARG and is used for setting the
11902 precision of the calculation and results. */
11903
11904 static tree
11905 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
11906 {
11907 tree const type = TREE_TYPE (arg);
11908 tree result = NULL_TREE;
11909
11910 STRIP_NOPS (arg);
11911
11912 /* To proceed, MPFR must exactly represent the target floating point
11913 format, which only happens when the target base equals two. */
11914 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11915 && TREE_CODE (arg) == REAL_CST
11916 && !TREE_OVERFLOW (arg))
11917 {
11918 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11919
11920 if (real_isfinite (ra))
11921 {
11922 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11923 const int prec = fmt->p;
11924 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11925 tree result_s, result_c;
11926 int inexact;
11927 mpfr_t m, ms, mc;
11928
11929 mpfr_inits2 (prec, m, ms, mc, NULL);
11930 mpfr_from_real (m, ra, GMP_RNDN);
11931 mpfr_clear_flags ();
11932 inexact = mpfr_sin_cos (ms, mc, m, rnd);
11933 result_s = do_mpfr_ckconv (ms, type, inexact);
11934 result_c = do_mpfr_ckconv (mc, type, inexact);
11935 mpfr_clears (m, ms, mc, NULL);
11936 if (result_s && result_c)
11937 {
11938 /* If we are to return in a complex value do so. */
11939 if (!arg_sinp && !arg_cosp)
11940 return build_complex (build_complex_type (type),
11941 result_c, result_s);
11942
11943 /* Dereference the sin/cos pointer arguments. */
11944 arg_sinp = build_fold_indirect_ref (arg_sinp);
11945 arg_cosp = build_fold_indirect_ref (arg_cosp);
11946 /* Proceed if valid pointer type were passed in. */
11947 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
11948 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
11949 {
11950 /* Set the values. */
11951 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
11952 result_s);
11953 TREE_SIDE_EFFECTS (result_s) = 1;
11954 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
11955 result_c);
11956 TREE_SIDE_EFFECTS (result_c) = 1;
11957 /* Combine the assignments into a compound expr. */
11958 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11959 result_s, result_c));
11960 }
11961 }
11962 }
11963 }
11964 return result;
11965 }
11966
11967 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
11968 two-argument mpfr order N Bessel function FUNC on them and return
11969 the resulting value as a tree with type TYPE. The mpfr precision
11970 is set to the precision of TYPE. We assume that function FUNC
11971 returns zero if the result could be calculated exactly within the
11972 requested precision. */
11973 static tree
11974 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
11975 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
11976 const REAL_VALUE_TYPE *min, bool inclusive)
11977 {
11978 tree result = NULL_TREE;
11979
11980 STRIP_NOPS (arg1);
11981 STRIP_NOPS (arg2);
11982
11983 /* To proceed, MPFR must exactly represent the target floating point
11984 format, which only happens when the target base equals two. */
11985 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11986 && tree_fits_shwi_p (arg1)
11987 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
11988 {
11989 const HOST_WIDE_INT n = tree_to_shwi (arg1);
11990 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
11991
11992 if (n == (long)n
11993 && real_isfinite (ra)
11994 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
11995 {
11996 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11997 const int prec = fmt->p;
11998 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11999 int inexact;
12000 mpfr_t m;
12001
12002 mpfr_init2 (m, prec);
12003 mpfr_from_real (m, ra, GMP_RNDN);
12004 mpfr_clear_flags ();
12005 inexact = func (m, n, m, rnd);
12006 result = do_mpfr_ckconv (m, type, inexact);
12007 mpfr_clear (m);
12008 }
12009 }
12010
12011 return result;
12012 }
12013
12014 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12015 the pointer *(ARG_QUO) and return the result. The type is taken
12016 from the type of ARG0 and is used for setting the precision of the
12017 calculation and results. */
12018
12019 static tree
12020 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12021 {
12022 tree const type = TREE_TYPE (arg0);
12023 tree result = NULL_TREE;
12024
12025 STRIP_NOPS (arg0);
12026 STRIP_NOPS (arg1);
12027
12028 /* To proceed, MPFR must exactly represent the target floating point
12029 format, which only happens when the target base equals two. */
12030 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12031 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12032 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12033 {
12034 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12035 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12036
12037 if (real_isfinite (ra0) && real_isfinite (ra1))
12038 {
12039 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12040 const int prec = fmt->p;
12041 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12042 tree result_rem;
12043 long integer_quo;
12044 mpfr_t m0, m1;
12045
12046 mpfr_inits2 (prec, m0, m1, NULL);
12047 mpfr_from_real (m0, ra0, GMP_RNDN);
12048 mpfr_from_real (m1, ra1, GMP_RNDN);
12049 mpfr_clear_flags ();
12050 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12051 /* Remquo is independent of the rounding mode, so pass
12052 inexact=0 to do_mpfr_ckconv(). */
12053 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12054 mpfr_clears (m0, m1, NULL);
12055 if (result_rem)
12056 {
12057 /* MPFR calculates quo in the host's long so it may
12058 return more bits in quo than the target int can hold
12059 if sizeof(host long) > sizeof(target int). This can
12060 happen even for native compilers in LP64 mode. In
12061 these cases, modulo the quo value with the largest
12062 number that the target int can hold while leaving one
12063 bit for the sign. */
12064 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12065 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12066
12067 /* Dereference the quo pointer argument. */
12068 arg_quo = build_fold_indirect_ref (arg_quo);
12069 /* Proceed iff a valid pointer type was passed in. */
12070 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12071 {
12072 /* Set the value. */
12073 tree result_quo
12074 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12075 build_int_cst (TREE_TYPE (arg_quo),
12076 integer_quo));
12077 TREE_SIDE_EFFECTS (result_quo) = 1;
12078 /* Combine the quo assignment with the rem. */
12079 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12080 result_quo, result_rem));
12081 }
12082 }
12083 }
12084 }
12085 return result;
12086 }
12087
12088 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12089 resulting value as a tree with type TYPE. The mpfr precision is
12090 set to the precision of TYPE. We assume that this mpfr function
12091 returns zero if the result could be calculated exactly within the
12092 requested precision. In addition, the integer pointer represented
12093 by ARG_SG will be dereferenced and set to the appropriate signgam
12094 (-1,1) value. */
12095
12096 static tree
12097 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12098 {
12099 tree result = NULL_TREE;
12100
12101 STRIP_NOPS (arg);
12102
12103 /* To proceed, MPFR must exactly represent the target floating point
12104 format, which only happens when the target base equals two. Also
12105 verify ARG is a constant and that ARG_SG is an int pointer. */
12106 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12107 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12108 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12109 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12110 {
12111 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12112
12113 /* In addition to NaN and Inf, the argument cannot be zero or a
12114 negative integer. */
12115 if (real_isfinite (ra)
12116 && ra->cl != rvc_zero
12117 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12118 {
12119 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12120 const int prec = fmt->p;
12121 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12122 int inexact, sg;
12123 mpfr_t m;
12124 tree result_lg;
12125
12126 mpfr_init2 (m, prec);
12127 mpfr_from_real (m, ra, GMP_RNDN);
12128 mpfr_clear_flags ();
12129 inexact = mpfr_lgamma (m, &sg, m, rnd);
12130 result_lg = do_mpfr_ckconv (m, type, inexact);
12131 mpfr_clear (m);
12132 if (result_lg)
12133 {
12134 tree result_sg;
12135
12136 /* Dereference the arg_sg pointer argument. */
12137 arg_sg = build_fold_indirect_ref (arg_sg);
12138 /* Assign the signgam value into *arg_sg. */
12139 result_sg = fold_build2 (MODIFY_EXPR,
12140 TREE_TYPE (arg_sg), arg_sg,
12141 build_int_cst (TREE_TYPE (arg_sg), sg));
12142 TREE_SIDE_EFFECTS (result_sg) = 1;
12143 /* Combine the signgam assignment with the lgamma result. */
12144 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12145 result_sg, result_lg));
12146 }
12147 }
12148 }
12149
12150 return result;
12151 }
12152
12153 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12154 function FUNC on it and return the resulting value as a tree with
12155 type TYPE. The mpfr precision is set to the precision of TYPE. We
12156 assume that function FUNC returns zero if the result could be
12157 calculated exactly within the requested precision. */
12158
12159 static tree
12160 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12161 {
12162 tree result = NULL_TREE;
12163
12164 STRIP_NOPS (arg);
12165
12166 /* To proceed, MPFR must exactly represent the target floating point
12167 format, which only happens when the target base equals two. */
12168 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12169 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12170 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12171 {
12172 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12173 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12174
12175 if (real_isfinite (re) && real_isfinite (im))
12176 {
12177 const struct real_format *const fmt =
12178 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12179 const int prec = fmt->p;
12180 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12181 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12182 int inexact;
12183 mpc_t m;
12184
12185 mpc_init2 (m, prec);
12186 mpfr_from_real (mpc_realref (m), re, rnd);
12187 mpfr_from_real (mpc_imagref (m), im, rnd);
12188 mpfr_clear_flags ();
12189 inexact = func (m, m, crnd);
12190 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
12191 mpc_clear (m);
12192 }
12193 }
12194
12195 return result;
12196 }
12197
12198 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12199 mpc function FUNC on it and return the resulting value as a tree
12200 with type TYPE. The mpfr precision is set to the precision of
12201 TYPE. We assume that function FUNC returns zero if the result
12202 could be calculated exactly within the requested precision. If
12203 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12204 in the arguments and/or results. */
12205
12206 tree
12207 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12208 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12209 {
12210 tree result = NULL_TREE;
12211
12212 STRIP_NOPS (arg0);
12213 STRIP_NOPS (arg1);
12214
12215 /* To proceed, MPFR must exactly represent the target floating point
12216 format, which only happens when the target base equals two. */
12217 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12218 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12219 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12220 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12221 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12222 {
12223 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12224 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12225 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12226 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12227
12228 if (do_nonfinite
12229 || (real_isfinite (re0) && real_isfinite (im0)
12230 && real_isfinite (re1) && real_isfinite (im1)))
12231 {
12232 const struct real_format *const fmt =
12233 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12234 const int prec = fmt->p;
12235 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12236 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12237 int inexact;
12238 mpc_t m0, m1;
12239
12240 mpc_init2 (m0, prec);
12241 mpc_init2 (m1, prec);
12242 mpfr_from_real (mpc_realref (m0), re0, rnd);
12243 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12244 mpfr_from_real (mpc_realref (m1), re1, rnd);
12245 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12246 mpfr_clear_flags ();
12247 inexact = func (m0, m0, m1, crnd);
12248 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12249 mpc_clear (m0);
12250 mpc_clear (m1);
12251 }
12252 }
12253
12254 return result;
12255 }
12256
12257 /* A wrapper function for builtin folding that prevents warnings for
12258 "statement without effect" and the like, caused by removing the
12259 call node earlier than the warning is generated. */
12260
12261 tree
12262 fold_call_stmt (gcall *stmt, bool ignore)
12263 {
12264 tree ret = NULL_TREE;
12265 tree fndecl = gimple_call_fndecl (stmt);
12266 location_t loc = gimple_location (stmt);
12267 if (fndecl
12268 && TREE_CODE (fndecl) == FUNCTION_DECL
12269 && DECL_BUILT_IN (fndecl)
12270 && !gimple_call_va_arg_pack_p (stmt))
12271 {
12272 int nargs = gimple_call_num_args (stmt);
12273 tree *args = (nargs > 0
12274 ? gimple_call_arg_ptr (stmt, 0)
12275 : &error_mark_node);
12276
12277 if (avoid_folding_inline_builtin (fndecl))
12278 return NULL_TREE;
12279 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12280 {
12281 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12282 }
12283 else
12284 {
12285 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
12286 if (ret)
12287 {
12288 /* Propagate location information from original call to
12289 expansion of builtin. Otherwise things like
12290 maybe_emit_chk_warning, that operate on the expansion
12291 of a builtin, will use the wrong location information. */
12292 if (gimple_has_location (stmt))
12293 {
12294 tree realret = ret;
12295 if (TREE_CODE (ret) == NOP_EXPR)
12296 realret = TREE_OPERAND (ret, 0);
12297 if (CAN_HAVE_LOCATION_P (realret)
12298 && !EXPR_HAS_LOCATION (realret))
12299 SET_EXPR_LOCATION (realret, loc);
12300 return realret;
12301 }
12302 return ret;
12303 }
12304 }
12305 }
12306 return NULL_TREE;
12307 }
12308
12309 /* Look up the function in builtin_decl that corresponds to DECL
12310 and set ASMSPEC as its user assembler name. DECL must be a
12311 function decl that declares a builtin. */
12312
12313 void
12314 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12315 {
12316 tree builtin;
12317 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12318 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12319 && asmspec != 0);
12320
12321 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12322 set_user_assembler_name (builtin, asmspec);
12323 switch (DECL_FUNCTION_CODE (decl))
12324 {
12325 case BUILT_IN_MEMCPY:
12326 init_block_move_fn (asmspec);
12327 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12328 break;
12329 case BUILT_IN_MEMSET:
12330 init_block_clear_fn (asmspec);
12331 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12332 break;
12333 case BUILT_IN_MEMMOVE:
12334 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12335 break;
12336 case BUILT_IN_MEMCMP:
12337 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12338 break;
12339 case BUILT_IN_ABORT:
12340 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12341 break;
12342 case BUILT_IN_FFS:
12343 if (INT_TYPE_SIZE < BITS_PER_WORD)
12344 {
12345 set_user_assembler_libfunc ("ffs", asmspec);
12346 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12347 MODE_INT, 0), "ffs");
12348 }
12349 break;
12350 default:
12351 break;
12352 }
12353 }
12354
12355 /* Return true if DECL is a builtin that expands to a constant or similarly
12356 simple code. */
12357 bool
12358 is_simple_builtin (tree decl)
12359 {
12360 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12361 switch (DECL_FUNCTION_CODE (decl))
12362 {
12363 /* Builtins that expand to constants. */
12364 case BUILT_IN_CONSTANT_P:
12365 case BUILT_IN_EXPECT:
12366 case BUILT_IN_OBJECT_SIZE:
12367 case BUILT_IN_UNREACHABLE:
12368 /* Simple register moves or loads from stack. */
12369 case BUILT_IN_ASSUME_ALIGNED:
12370 case BUILT_IN_RETURN_ADDRESS:
12371 case BUILT_IN_EXTRACT_RETURN_ADDR:
12372 case BUILT_IN_FROB_RETURN_ADDR:
12373 case BUILT_IN_RETURN:
12374 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12375 case BUILT_IN_FRAME_ADDRESS:
12376 case BUILT_IN_VA_END:
12377 case BUILT_IN_STACK_SAVE:
12378 case BUILT_IN_STACK_RESTORE:
12379 /* Exception state returns or moves registers around. */
12380 case BUILT_IN_EH_FILTER:
12381 case BUILT_IN_EH_POINTER:
12382 case BUILT_IN_EH_COPY_VALUES:
12383 return true;
12384
12385 default:
12386 return false;
12387 }
12388
12389 return false;
12390 }
12391
12392 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12393 most probably expanded inline into reasonably simple code. This is a
12394 superset of is_simple_builtin. */
12395 bool
12396 is_inexpensive_builtin (tree decl)
12397 {
12398 if (!decl)
12399 return false;
12400 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12401 return true;
12402 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12403 switch (DECL_FUNCTION_CODE (decl))
12404 {
12405 case BUILT_IN_ABS:
12406 case BUILT_IN_ALLOCA:
12407 case BUILT_IN_ALLOCA_WITH_ALIGN:
12408 case BUILT_IN_BSWAP16:
12409 case BUILT_IN_BSWAP32:
12410 case BUILT_IN_BSWAP64:
12411 case BUILT_IN_CLZ:
12412 case BUILT_IN_CLZIMAX:
12413 case BUILT_IN_CLZL:
12414 case BUILT_IN_CLZLL:
12415 case BUILT_IN_CTZ:
12416 case BUILT_IN_CTZIMAX:
12417 case BUILT_IN_CTZL:
12418 case BUILT_IN_CTZLL:
12419 case BUILT_IN_FFS:
12420 case BUILT_IN_FFSIMAX:
12421 case BUILT_IN_FFSL:
12422 case BUILT_IN_FFSLL:
12423 case BUILT_IN_IMAXABS:
12424 case BUILT_IN_FINITE:
12425 case BUILT_IN_FINITEF:
12426 case BUILT_IN_FINITEL:
12427 case BUILT_IN_FINITED32:
12428 case BUILT_IN_FINITED64:
12429 case BUILT_IN_FINITED128:
12430 case BUILT_IN_FPCLASSIFY:
12431 case BUILT_IN_ISFINITE:
12432 case BUILT_IN_ISINF_SIGN:
12433 case BUILT_IN_ISINF:
12434 case BUILT_IN_ISINFF:
12435 case BUILT_IN_ISINFL:
12436 case BUILT_IN_ISINFD32:
12437 case BUILT_IN_ISINFD64:
12438 case BUILT_IN_ISINFD128:
12439 case BUILT_IN_ISNAN:
12440 case BUILT_IN_ISNANF:
12441 case BUILT_IN_ISNANL:
12442 case BUILT_IN_ISNAND32:
12443 case BUILT_IN_ISNAND64:
12444 case BUILT_IN_ISNAND128:
12445 case BUILT_IN_ISNORMAL:
12446 case BUILT_IN_ISGREATER:
12447 case BUILT_IN_ISGREATEREQUAL:
12448 case BUILT_IN_ISLESS:
12449 case BUILT_IN_ISLESSEQUAL:
12450 case BUILT_IN_ISLESSGREATER:
12451 case BUILT_IN_ISUNORDERED:
12452 case BUILT_IN_VA_ARG_PACK:
12453 case BUILT_IN_VA_ARG_PACK_LEN:
12454 case BUILT_IN_VA_COPY:
12455 case BUILT_IN_TRAP:
12456 case BUILT_IN_SAVEREGS:
12457 case BUILT_IN_POPCOUNTL:
12458 case BUILT_IN_POPCOUNTLL:
12459 case BUILT_IN_POPCOUNTIMAX:
12460 case BUILT_IN_POPCOUNT:
12461 case BUILT_IN_PARITYL:
12462 case BUILT_IN_PARITYLL:
12463 case BUILT_IN_PARITYIMAX:
12464 case BUILT_IN_PARITY:
12465 case BUILT_IN_LABS:
12466 case BUILT_IN_LLABS:
12467 case BUILT_IN_PREFETCH:
12468 return true;
12469
12470 default:
12471 return is_simple_builtin (decl);
12472 }
12473
12474 return false;
12475 }