rtl.h (always_void_p): New function.
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "hash-set.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "stringpool.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "varasm.h"
40 #include "tree-object-size.h"
41 #include "realmpfr.h"
42 #include "predict.h"
43 #include "hashtab.h"
44 #include "hard-reg-set.h"
45 #include "function.h"
46 #include "cfgrtl.h"
47 #include "basic-block.h"
48 #include "tree-ssa-alias.h"
49 #include "internal-fn.h"
50 #include "gimple-expr.h"
51 #include "is-a.h"
52 #include "gimple.h"
53 #include "flags.h"
54 #include "regs.h"
55 #include "except.h"
56 #include "insn-config.h"
57 #include "statistics.h"
58 #include "real.h"
59 #include "fixed-value.h"
60 #include "expmed.h"
61 #include "dojump.h"
62 #include "explow.h"
63 #include "emit-rtl.h"
64 #include "stmt.h"
65 #include "expr.h"
66 #include "insn-codes.h"
67 #include "optabs.h"
68 #include "libfuncs.h"
69 #include "recog.h"
70 #include "output.h"
71 #include "typeclass.h"
72 #include "tm_p.h"
73 #include "target.h"
74 #include "langhooks.h"
75 #include "tree-ssanames.h"
76 #include "tree-dfa.h"
77 #include "value-prof.h"
78 #include "diagnostic-core.h"
79 #include "builtins.h"
80 #include "asan.h"
81 #include "cilk.h"
82 #include "ipa-ref.h"
83 #include "lto-streamer.h"
84 #include "cgraph.h"
85 #include "tree-chkp.h"
86 #include "rtl-chkp.h"
87 #include "gomp-constants.h"
88
89
90 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
91
92 struct target_builtins default_target_builtins;
93 #if SWITCHABLE_TARGET
94 struct target_builtins *this_target_builtins = &default_target_builtins;
95 #endif
96
97 /* Define the names of the builtin function types and codes. */
98 const char *const built_in_class_names[BUILT_IN_LAST]
99 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
100
101 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
102 const char * built_in_names[(int) END_BUILTINS] =
103 {
104 #include "builtins.def"
105 };
106 #undef DEF_BUILTIN
107
108 /* Setup an array of builtin_info_type, make sure each element decl is
109 initialized to NULL_TREE. */
110 builtin_info_type builtin_info[(int)END_BUILTINS];
111
112 /* Non-zero if __builtin_constant_p should be folded right away. */
113 bool force_folding_builtin_constant_p;
114
115 static rtx c_readstr (const char *, machine_mode);
116 static int target_char_cast (tree, char *);
117 static rtx get_memory_rtx (tree, tree);
118 static int apply_args_size (void);
119 static int apply_result_size (void);
120 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
121 static rtx result_vector (int, rtx);
122 #endif
123 static void expand_builtin_update_setjmp_buf (rtx);
124 static void expand_builtin_prefetch (tree);
125 static rtx expand_builtin_apply_args (void);
126 static rtx expand_builtin_apply_args_1 (void);
127 static rtx expand_builtin_apply (rtx, rtx, rtx);
128 static void expand_builtin_return (rtx);
129 static enum type_class type_to_class (tree);
130 static rtx expand_builtin_classify_type (tree);
131 static void expand_errno_check (tree, rtx);
132 static rtx expand_builtin_mathfn (tree, rtx, rtx);
133 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
134 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
135 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
136 static rtx expand_builtin_interclass_mathfn (tree, rtx);
137 static rtx expand_builtin_sincos (tree);
138 static rtx expand_builtin_cexpi (tree, rtx);
139 static rtx expand_builtin_int_roundingfn (tree, rtx);
140 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
141 static rtx expand_builtin_next_arg (void);
142 static rtx expand_builtin_va_start (tree);
143 static rtx expand_builtin_va_end (tree);
144 static rtx expand_builtin_va_copy (tree);
145 static rtx expand_builtin_memcmp (tree, rtx, machine_mode);
146 static rtx expand_builtin_strcmp (tree, rtx);
147 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
148 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
149 static rtx expand_builtin_memcpy (tree, rtx);
150 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
151 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
152 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
153 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
154 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
155 machine_mode, int, tree);
156 static rtx expand_builtin_strcpy (tree, rtx);
157 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
158 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
159 static rtx expand_builtin_strncpy (tree, rtx);
160 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
161 static rtx expand_builtin_memset (tree, rtx, machine_mode);
162 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
163 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
164 static rtx expand_builtin_bzero (tree);
165 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
166 static rtx expand_builtin_alloca (tree, bool);
167 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
168 static rtx expand_builtin_frame_address (tree, tree);
169 static tree stabilize_va_list_loc (location_t, tree, int);
170 static rtx expand_builtin_expect (tree, rtx);
171 static tree fold_builtin_constant_p (tree);
172 static tree fold_builtin_classify_type (tree);
173 static tree fold_builtin_strlen (location_t, tree, tree);
174 static tree fold_builtin_inf (location_t, tree, int);
175 static tree fold_builtin_nan (tree, tree, int);
176 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
177 static bool validate_arg (const_tree, enum tree_code code);
178 static bool integer_valued_real_p (tree);
179 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
180 static rtx expand_builtin_fabs (tree, rtx, rtx);
181 static rtx expand_builtin_signbit (tree, rtx);
182 static tree fold_builtin_sqrt (location_t, tree, tree);
183 static tree fold_builtin_cbrt (location_t, tree, tree);
184 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
185 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
186 static tree fold_builtin_cos (location_t, tree, tree, tree);
187 static tree fold_builtin_cosh (location_t, tree, tree, tree);
188 static tree fold_builtin_tan (tree, tree);
189 static tree fold_builtin_trunc (location_t, tree, tree);
190 static tree fold_builtin_floor (location_t, tree, tree);
191 static tree fold_builtin_ceil (location_t, tree, tree);
192 static tree fold_builtin_round (location_t, tree, tree);
193 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
194 static tree fold_builtin_bitop (tree, tree);
195 static tree fold_builtin_strchr (location_t, tree, tree, tree);
196 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
197 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
198 static tree fold_builtin_strcmp (location_t, tree, tree);
199 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
200 static tree fold_builtin_signbit (location_t, tree, tree);
201 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
202 static tree fold_builtin_isascii (location_t, tree);
203 static tree fold_builtin_toascii (location_t, tree);
204 static tree fold_builtin_isdigit (location_t, tree);
205 static tree fold_builtin_fabs (location_t, tree, tree);
206 static tree fold_builtin_abs (location_t, tree, tree);
207 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
208 enum tree_code);
209 static tree fold_builtin_0 (location_t, tree);
210 static tree fold_builtin_1 (location_t, tree, tree);
211 static tree fold_builtin_2 (location_t, tree, tree, tree);
212 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
213 static tree fold_builtin_varargs (location_t, tree, tree*, int);
214
215 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
216 static tree fold_builtin_strstr (location_t, tree, tree, tree);
217 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
218 static tree fold_builtin_strspn (location_t, tree, tree);
219 static tree fold_builtin_strcspn (location_t, tree, tree);
220
221 static rtx expand_builtin_object_size (tree);
222 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
223 enum built_in_function);
224 static void maybe_emit_chk_warning (tree, enum built_in_function);
225 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
226 static void maybe_emit_free_warning (tree);
227 static tree fold_builtin_object_size (tree, tree);
228
229 unsigned HOST_WIDE_INT target_newline;
230 unsigned HOST_WIDE_INT target_percent;
231 static unsigned HOST_WIDE_INT target_c;
232 static unsigned HOST_WIDE_INT target_s;
233 char target_percent_c[3];
234 char target_percent_s[3];
235 char target_percent_s_newline[4];
236 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
237 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
238 static tree do_mpfr_arg2 (tree, tree, tree,
239 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
240 static tree do_mpfr_arg3 (tree, tree, tree, tree,
241 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
242 static tree do_mpfr_sincos (tree, tree, tree);
243 static tree do_mpfr_bessel_n (tree, tree, tree,
244 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
245 const REAL_VALUE_TYPE *, bool);
246 static tree do_mpfr_remquo (tree, tree, tree);
247 static tree do_mpfr_lgamma_r (tree, tree, tree);
248 static void expand_builtin_sync_synchronize (void);
249
250 /* Return true if NAME starts with __builtin_ or __sync_. */
251
252 static bool
253 is_builtin_name (const char *name)
254 {
255 if (strncmp (name, "__builtin_", 10) == 0)
256 return true;
257 if (strncmp (name, "__sync_", 7) == 0)
258 return true;
259 if (strncmp (name, "__atomic_", 9) == 0)
260 return true;
261 if (flag_cilkplus
262 && (!strcmp (name, "__cilkrts_detach")
263 || !strcmp (name, "__cilkrts_pop_frame")))
264 return true;
265 return false;
266 }
267
268
269 /* Return true if DECL is a function symbol representing a built-in. */
270
271 bool
272 is_builtin_fn (tree decl)
273 {
274 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
275 }
276
277 /* Return true if NODE should be considered for inline expansion regardless
278 of the optimization level. This means whenever a function is invoked with
279 its "internal" name, which normally contains the prefix "__builtin". */
280
281 static bool
282 called_as_built_in (tree node)
283 {
284 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
285 we want the name used to call the function, not the name it
286 will have. */
287 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
288 return is_builtin_name (name);
289 }
290
291 /* Compute values M and N such that M divides (address of EXP - N) and such
292 that N < M. If these numbers can be determined, store M in alignp and N in
293 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
294 *alignp and any bit-offset to *bitposp.
295
296 Note that the address (and thus the alignment) computed here is based
297 on the address to which a symbol resolves, whereas DECL_ALIGN is based
298 on the address at which an object is actually located. These two
299 addresses are not always the same. For example, on ARM targets,
300 the address &foo of a Thumb function foo() has the lowest bit set,
301 whereas foo() itself starts on an even address.
302
303 If ADDR_P is true we are taking the address of the memory reference EXP
304 and thus cannot rely on the access taking place. */
305
306 static bool
307 get_object_alignment_2 (tree exp, unsigned int *alignp,
308 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
309 {
310 HOST_WIDE_INT bitsize, bitpos;
311 tree offset;
312 machine_mode mode;
313 int unsignedp, volatilep;
314 unsigned int align = BITS_PER_UNIT;
315 bool known_alignment = false;
316
317 /* Get the innermost object and the constant (bitpos) and possibly
318 variable (offset) offset of the access. */
319 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
320 &mode, &unsignedp, &volatilep, true);
321
322 /* Extract alignment information from the innermost object and
323 possibly adjust bitpos and offset. */
324 if (TREE_CODE (exp) == FUNCTION_DECL)
325 {
326 /* Function addresses can encode extra information besides their
327 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
328 allows the low bit to be used as a virtual bit, we know
329 that the address itself must be at least 2-byte aligned. */
330 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
331 align = 2 * BITS_PER_UNIT;
332 }
333 else if (TREE_CODE (exp) == LABEL_DECL)
334 ;
335 else if (TREE_CODE (exp) == CONST_DECL)
336 {
337 /* The alignment of a CONST_DECL is determined by its initializer. */
338 exp = DECL_INITIAL (exp);
339 align = TYPE_ALIGN (TREE_TYPE (exp));
340 #ifdef CONSTANT_ALIGNMENT
341 if (CONSTANT_CLASS_P (exp))
342 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
343 #endif
344 known_alignment = true;
345 }
346 else if (DECL_P (exp))
347 {
348 align = DECL_ALIGN (exp);
349 known_alignment = true;
350 }
351 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
352 {
353 align = TYPE_ALIGN (TREE_TYPE (exp));
354 }
355 else if (TREE_CODE (exp) == INDIRECT_REF
356 || TREE_CODE (exp) == MEM_REF
357 || TREE_CODE (exp) == TARGET_MEM_REF)
358 {
359 tree addr = TREE_OPERAND (exp, 0);
360 unsigned ptr_align;
361 unsigned HOST_WIDE_INT ptr_bitpos;
362 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
363
364 /* If the address is explicitely aligned, handle that. */
365 if (TREE_CODE (addr) == BIT_AND_EXPR
366 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
367 {
368 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
369 ptr_bitmask *= BITS_PER_UNIT;
370 align = ptr_bitmask & -ptr_bitmask;
371 addr = TREE_OPERAND (addr, 0);
372 }
373
374 known_alignment
375 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
376 align = MAX (ptr_align, align);
377
378 /* Re-apply explicit alignment to the bitpos. */
379 ptr_bitpos &= ptr_bitmask;
380
381 /* The alignment of the pointer operand in a TARGET_MEM_REF
382 has to take the variable offset parts into account. */
383 if (TREE_CODE (exp) == TARGET_MEM_REF)
384 {
385 if (TMR_INDEX (exp))
386 {
387 unsigned HOST_WIDE_INT step = 1;
388 if (TMR_STEP (exp))
389 step = TREE_INT_CST_LOW (TMR_STEP (exp));
390 align = MIN (align, (step & -step) * BITS_PER_UNIT);
391 }
392 if (TMR_INDEX2 (exp))
393 align = BITS_PER_UNIT;
394 known_alignment = false;
395 }
396
397 /* When EXP is an actual memory reference then we can use
398 TYPE_ALIGN of a pointer indirection to derive alignment.
399 Do so only if get_pointer_alignment_1 did not reveal absolute
400 alignment knowledge and if using that alignment would
401 improve the situation. */
402 if (!addr_p && !known_alignment
403 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
404 align = TYPE_ALIGN (TREE_TYPE (exp));
405 else
406 {
407 /* Else adjust bitpos accordingly. */
408 bitpos += ptr_bitpos;
409 if (TREE_CODE (exp) == MEM_REF
410 || TREE_CODE (exp) == TARGET_MEM_REF)
411 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
412 }
413 }
414 else if (TREE_CODE (exp) == STRING_CST)
415 {
416 /* STRING_CST are the only constant objects we allow to be not
417 wrapped inside a CONST_DECL. */
418 align = TYPE_ALIGN (TREE_TYPE (exp));
419 #ifdef CONSTANT_ALIGNMENT
420 if (CONSTANT_CLASS_P (exp))
421 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
422 #endif
423 known_alignment = true;
424 }
425
426 /* If there is a non-constant offset part extract the maximum
427 alignment that can prevail. */
428 if (offset)
429 {
430 unsigned int trailing_zeros = tree_ctz (offset);
431 if (trailing_zeros < HOST_BITS_PER_INT)
432 {
433 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
434 if (inner)
435 align = MIN (align, inner);
436 }
437 }
438
439 *alignp = align;
440 *bitposp = bitpos & (*alignp - 1);
441 return known_alignment;
442 }
443
444 /* For a memory reference expression EXP compute values M and N such that M
445 divides (&EXP - N) and such that N < M. If these numbers can be determined,
446 store M in alignp and N in *BITPOSP and return true. Otherwise return false
447 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
448
449 bool
450 get_object_alignment_1 (tree exp, unsigned int *alignp,
451 unsigned HOST_WIDE_INT *bitposp)
452 {
453 return get_object_alignment_2 (exp, alignp, bitposp, false);
454 }
455
456 /* Return the alignment in bits of EXP, an object. */
457
458 unsigned int
459 get_object_alignment (tree exp)
460 {
461 unsigned HOST_WIDE_INT bitpos = 0;
462 unsigned int align;
463
464 get_object_alignment_1 (exp, &align, &bitpos);
465
466 /* align and bitpos now specify known low bits of the pointer.
467 ptr & (align - 1) == bitpos. */
468
469 if (bitpos != 0)
470 align = (bitpos & -bitpos);
471 return align;
472 }
473
474 /* For a pointer valued expression EXP compute values M and N such that M
475 divides (EXP - N) and such that N < M. If these numbers can be determined,
476 store M in alignp and N in *BITPOSP and return true. Return false if
477 the results are just a conservative approximation.
478
479 If EXP is not a pointer, false is returned too. */
480
481 bool
482 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
483 unsigned HOST_WIDE_INT *bitposp)
484 {
485 STRIP_NOPS (exp);
486
487 if (TREE_CODE (exp) == ADDR_EXPR)
488 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
489 alignp, bitposp, true);
490 else if (TREE_CODE (exp) == SSA_NAME
491 && POINTER_TYPE_P (TREE_TYPE (exp)))
492 {
493 unsigned int ptr_align, ptr_misalign;
494 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
495
496 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
497 {
498 *bitposp = ptr_misalign * BITS_PER_UNIT;
499 *alignp = ptr_align * BITS_PER_UNIT;
500 /* We cannot really tell whether this result is an approximation. */
501 return true;
502 }
503 else
504 {
505 *bitposp = 0;
506 *alignp = BITS_PER_UNIT;
507 return false;
508 }
509 }
510 else if (TREE_CODE (exp) == INTEGER_CST)
511 {
512 *alignp = BIGGEST_ALIGNMENT;
513 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
514 & (BIGGEST_ALIGNMENT - 1));
515 return true;
516 }
517
518 *bitposp = 0;
519 *alignp = BITS_PER_UNIT;
520 return false;
521 }
522
523 /* Return the alignment in bits of EXP, a pointer valued expression.
524 The alignment returned is, by default, the alignment of the thing that
525 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
526
527 Otherwise, look at the expression to see if we can do better, i.e., if the
528 expression is actually pointing at an object whose alignment is tighter. */
529
530 unsigned int
531 get_pointer_alignment (tree exp)
532 {
533 unsigned HOST_WIDE_INT bitpos = 0;
534 unsigned int align;
535
536 get_pointer_alignment_1 (exp, &align, &bitpos);
537
538 /* align and bitpos now specify known low bits of the pointer.
539 ptr & (align - 1) == bitpos. */
540
541 if (bitpos != 0)
542 align = (bitpos & -bitpos);
543
544 return align;
545 }
546
547 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
548 way, because it could contain a zero byte in the middle.
549 TREE_STRING_LENGTH is the size of the character array, not the string.
550
551 ONLY_VALUE should be nonzero if the result is not going to be emitted
552 into the instruction stream and zero if it is going to be expanded.
553 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
554 is returned, otherwise NULL, since
555 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
556 evaluate the side-effects.
557
558 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
559 accesses. Note that this implies the result is not going to be emitted
560 into the instruction stream.
561
562 The value returned is of type `ssizetype'.
563
564 Unfortunately, string_constant can't access the values of const char
565 arrays with initializers, so neither can we do so here. */
566
567 tree
568 c_strlen (tree src, int only_value)
569 {
570 tree offset_node;
571 HOST_WIDE_INT offset;
572 int max;
573 const char *ptr;
574 location_t loc;
575
576 STRIP_NOPS (src);
577 if (TREE_CODE (src) == COND_EXPR
578 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
579 {
580 tree len1, len2;
581
582 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
583 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
584 if (tree_int_cst_equal (len1, len2))
585 return len1;
586 }
587
588 if (TREE_CODE (src) == COMPOUND_EXPR
589 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
590 return c_strlen (TREE_OPERAND (src, 1), only_value);
591
592 loc = EXPR_LOC_OR_LOC (src, input_location);
593
594 src = string_constant (src, &offset_node);
595 if (src == 0)
596 return NULL_TREE;
597
598 max = TREE_STRING_LENGTH (src) - 1;
599 ptr = TREE_STRING_POINTER (src);
600
601 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
602 {
603 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
604 compute the offset to the following null if we don't know where to
605 start searching for it. */
606 int i;
607
608 for (i = 0; i < max; i++)
609 if (ptr[i] == 0)
610 return NULL_TREE;
611
612 /* We don't know the starting offset, but we do know that the string
613 has no internal zero bytes. We can assume that the offset falls
614 within the bounds of the string; otherwise, the programmer deserves
615 what he gets. Subtract the offset from the length of the string,
616 and return that. This would perhaps not be valid if we were dealing
617 with named arrays in addition to literal string constants. */
618
619 return size_diffop_loc (loc, size_int (max), offset_node);
620 }
621
622 /* We have a known offset into the string. Start searching there for
623 a null character if we can represent it as a single HOST_WIDE_INT. */
624 if (offset_node == 0)
625 offset = 0;
626 else if (! tree_fits_shwi_p (offset_node))
627 offset = -1;
628 else
629 offset = tree_to_shwi (offset_node);
630
631 /* If the offset is known to be out of bounds, warn, and call strlen at
632 runtime. */
633 if (offset < 0 || offset > max)
634 {
635 /* Suppress multiple warnings for propagated constant strings. */
636 if (only_value != 2
637 && !TREE_NO_WARNING (src))
638 {
639 warning_at (loc, 0, "offset outside bounds of constant string");
640 TREE_NO_WARNING (src) = 1;
641 }
642 return NULL_TREE;
643 }
644
645 /* Use strlen to search for the first zero byte. Since any strings
646 constructed with build_string will have nulls appended, we win even
647 if we get handed something like (char[4])"abcd".
648
649 Since OFFSET is our starting index into the string, no further
650 calculation is needed. */
651 return ssize_int (strlen (ptr + offset));
652 }
653
654 /* Return a char pointer for a C string if it is a string constant
655 or sum of string constant and integer constant. */
656
657 const char *
658 c_getstr (tree src)
659 {
660 tree offset_node;
661
662 src = string_constant (src, &offset_node);
663 if (src == 0)
664 return 0;
665
666 if (offset_node == 0)
667 return TREE_STRING_POINTER (src);
668 else if (!tree_fits_uhwi_p (offset_node)
669 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
670 return 0;
671
672 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
673 }
674
675 /* Return a constant integer corresponding to target reading
676 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
677
678 static rtx
679 c_readstr (const char *str, machine_mode mode)
680 {
681 HOST_WIDE_INT ch;
682 unsigned int i, j;
683 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
684
685 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
686 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
687 / HOST_BITS_PER_WIDE_INT;
688
689 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
690 for (i = 0; i < len; i++)
691 tmp[i] = 0;
692
693 ch = 1;
694 for (i = 0; i < GET_MODE_SIZE (mode); i++)
695 {
696 j = i;
697 if (WORDS_BIG_ENDIAN)
698 j = GET_MODE_SIZE (mode) - i - 1;
699 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
700 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
701 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
702 j *= BITS_PER_UNIT;
703
704 if (ch)
705 ch = (unsigned char) str[i];
706 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
707 }
708
709 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
710 return immed_wide_int_const (c, mode);
711 }
712
713 /* Cast a target constant CST to target CHAR and if that value fits into
714 host char type, return zero and put that value into variable pointed to by
715 P. */
716
717 static int
718 target_char_cast (tree cst, char *p)
719 {
720 unsigned HOST_WIDE_INT val, hostval;
721
722 if (TREE_CODE (cst) != INTEGER_CST
723 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
724 return 1;
725
726 /* Do not care if it fits or not right here. */
727 val = TREE_INT_CST_LOW (cst);
728
729 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
730 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
731
732 hostval = val;
733 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
734 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
735
736 if (val != hostval)
737 return 1;
738
739 *p = hostval;
740 return 0;
741 }
742
743 /* Similar to save_expr, but assumes that arbitrary code is not executed
744 in between the multiple evaluations. In particular, we assume that a
745 non-addressable local variable will not be modified. */
746
747 static tree
748 builtin_save_expr (tree exp)
749 {
750 if (TREE_CODE (exp) == SSA_NAME
751 || (TREE_ADDRESSABLE (exp) == 0
752 && (TREE_CODE (exp) == PARM_DECL
753 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
754 return exp;
755
756 return save_expr (exp);
757 }
758
759 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
760 times to get the address of either a higher stack frame, or a return
761 address located within it (depending on FNDECL_CODE). */
762
763 static rtx
764 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
765 {
766 int i;
767
768 #ifdef INITIAL_FRAME_ADDRESS_RTX
769 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
770 #else
771 rtx tem;
772
773 /* For a zero count with __builtin_return_address, we don't care what
774 frame address we return, because target-specific definitions will
775 override us. Therefore frame pointer elimination is OK, and using
776 the soft frame pointer is OK.
777
778 For a nonzero count, or a zero count with __builtin_frame_address,
779 we require a stable offset from the current frame pointer to the
780 previous one, so we must use the hard frame pointer, and
781 we must disable frame pointer elimination. */
782 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
783 tem = frame_pointer_rtx;
784 else
785 {
786 tem = hard_frame_pointer_rtx;
787
788 /* Tell reload not to eliminate the frame pointer. */
789 crtl->accesses_prior_frames = 1;
790 }
791 #endif
792
793 /* Some machines need special handling before we can access
794 arbitrary frames. For example, on the SPARC, we must first flush
795 all register windows to the stack. */
796 #ifdef SETUP_FRAME_ADDRESSES
797 if (count > 0)
798 SETUP_FRAME_ADDRESSES ();
799 #endif
800
801 /* On the SPARC, the return address is not in the frame, it is in a
802 register. There is no way to access it off of the current frame
803 pointer, but it can be accessed off the previous frame pointer by
804 reading the value from the register window save area. */
805 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
806 count--;
807
808 /* Scan back COUNT frames to the specified frame. */
809 for (i = 0; i < count; i++)
810 {
811 /* Assume the dynamic chain pointer is in the word that the
812 frame address points to, unless otherwise specified. */
813 #ifdef DYNAMIC_CHAIN_ADDRESS
814 tem = DYNAMIC_CHAIN_ADDRESS (tem);
815 #endif
816 tem = memory_address (Pmode, tem);
817 tem = gen_frame_mem (Pmode, tem);
818 tem = copy_to_reg (tem);
819 }
820
821 /* For __builtin_frame_address, return what we've got. But, on
822 the SPARC for example, we may have to add a bias. */
823 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
824 #ifdef FRAME_ADDR_RTX
825 return FRAME_ADDR_RTX (tem);
826 #else
827 return tem;
828 #endif
829
830 /* For __builtin_return_address, get the return address from that frame. */
831 #ifdef RETURN_ADDR_RTX
832 tem = RETURN_ADDR_RTX (count, tem);
833 #else
834 tem = memory_address (Pmode,
835 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
836 tem = gen_frame_mem (Pmode, tem);
837 #endif
838 return tem;
839 }
840
841 /* Alias set used for setjmp buffer. */
842 static alias_set_type setjmp_alias_set = -1;
843
844 /* Construct the leading half of a __builtin_setjmp call. Control will
845 return to RECEIVER_LABEL. This is also called directly by the SJLJ
846 exception handling code. */
847
848 void
849 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
850 {
851 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
852 rtx stack_save;
853 rtx mem;
854
855 if (setjmp_alias_set == -1)
856 setjmp_alias_set = new_alias_set ();
857
858 buf_addr = convert_memory_address (Pmode, buf_addr);
859
860 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
861
862 /* We store the frame pointer and the address of receiver_label in
863 the buffer and use the rest of it for the stack save area, which
864 is machine-dependent. */
865
866 mem = gen_rtx_MEM (Pmode, buf_addr);
867 set_mem_alias_set (mem, setjmp_alias_set);
868 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
869
870 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
871 GET_MODE_SIZE (Pmode))),
872 set_mem_alias_set (mem, setjmp_alias_set);
873
874 emit_move_insn (validize_mem (mem),
875 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
876
877 stack_save = gen_rtx_MEM (sa_mode,
878 plus_constant (Pmode, buf_addr,
879 2 * GET_MODE_SIZE (Pmode)));
880 set_mem_alias_set (stack_save, setjmp_alias_set);
881 emit_stack_save (SAVE_NONLOCAL, &stack_save);
882
883 /* If there is further processing to do, do it. */
884 #ifdef HAVE_builtin_setjmp_setup
885 if (HAVE_builtin_setjmp_setup)
886 emit_insn (gen_builtin_setjmp_setup (buf_addr));
887 #endif
888
889 /* We have a nonlocal label. */
890 cfun->has_nonlocal_label = 1;
891 }
892
893 /* Construct the trailing part of a __builtin_setjmp call. This is
894 also called directly by the SJLJ exception handling code.
895 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
896
897 void
898 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
899 {
900 rtx chain;
901
902 /* Mark the FP as used when we get here, so we have to make sure it's
903 marked as used by this function. */
904 emit_use (hard_frame_pointer_rtx);
905
906 /* Mark the static chain as clobbered here so life information
907 doesn't get messed up for it. */
908 chain = targetm.calls.static_chain (current_function_decl, true);
909 if (chain && REG_P (chain))
910 emit_clobber (chain);
911
912 /* Now put in the code to restore the frame pointer, and argument
913 pointer, if needed. */
914 #ifdef HAVE_nonlocal_goto
915 if (! HAVE_nonlocal_goto)
916 #endif
917 {
918 /* First adjust our frame pointer to its actual value. It was
919 previously set to the start of the virtual area corresponding to
920 the stacked variables when we branched here and now needs to be
921 adjusted to the actual hardware fp value.
922
923 Assignments to virtual registers are converted by
924 instantiate_virtual_regs into the corresponding assignment
925 to the underlying register (fp in this case) that makes
926 the original assignment true.
927 So the following insn will actually be decrementing fp by
928 STARTING_FRAME_OFFSET. */
929 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
930
931 /* Restoring the frame pointer also modifies the hard frame pointer.
932 Mark it used (so that the previous assignment remains live once
933 the frame pointer is eliminated) and clobbered (to represent the
934 implicit update from the assignment). */
935 emit_use (hard_frame_pointer_rtx);
936 emit_clobber (hard_frame_pointer_rtx);
937 }
938
939 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
940 if (fixed_regs[ARG_POINTER_REGNUM])
941 {
942 #ifdef ELIMINABLE_REGS
943 /* If the argument pointer can be eliminated in favor of the
944 frame pointer, we don't need to restore it. We assume here
945 that if such an elimination is present, it can always be used.
946 This is the case on all known machines; if we don't make this
947 assumption, we do unnecessary saving on many machines. */
948 size_t i;
949 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
950
951 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
952 if (elim_regs[i].from == ARG_POINTER_REGNUM
953 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
954 break;
955
956 if (i == ARRAY_SIZE (elim_regs))
957 #endif
958 {
959 /* Now restore our arg pointer from the address at which it
960 was saved in our stack frame. */
961 emit_move_insn (crtl->args.internal_arg_pointer,
962 copy_to_reg (get_arg_pointer_save_area ()));
963 }
964 }
965 #endif
966
967 #ifdef HAVE_builtin_setjmp_receiver
968 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
969 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
970 else
971 #endif
972 #ifdef HAVE_nonlocal_goto_receiver
973 if (HAVE_nonlocal_goto_receiver)
974 emit_insn (gen_nonlocal_goto_receiver ());
975 else
976 #endif
977 { /* Nothing */ }
978
979 /* We must not allow the code we just generated to be reordered by
980 scheduling. Specifically, the update of the frame pointer must
981 happen immediately, not later. */
982 emit_insn (gen_blockage ());
983 }
984
985 /* __builtin_longjmp is passed a pointer to an array of five words (not
986 all will be used on all machines). It operates similarly to the C
987 library function of the same name, but is more efficient. Much of
988 the code below is copied from the handling of non-local gotos. */
989
990 static void
991 expand_builtin_longjmp (rtx buf_addr, rtx value)
992 {
993 rtx fp, lab, stack;
994 rtx_insn *insn, *last;
995 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
996
997 /* DRAP is needed for stack realign if longjmp is expanded to current
998 function */
999 if (SUPPORTS_STACK_ALIGNMENT)
1000 crtl->need_drap = true;
1001
1002 if (setjmp_alias_set == -1)
1003 setjmp_alias_set = new_alias_set ();
1004
1005 buf_addr = convert_memory_address (Pmode, buf_addr);
1006
1007 buf_addr = force_reg (Pmode, buf_addr);
1008
1009 /* We require that the user must pass a second argument of 1, because
1010 that is what builtin_setjmp will return. */
1011 gcc_assert (value == const1_rtx);
1012
1013 last = get_last_insn ();
1014 #ifdef HAVE_builtin_longjmp
1015 if (HAVE_builtin_longjmp)
1016 emit_insn (gen_builtin_longjmp (buf_addr));
1017 else
1018 #endif
1019 {
1020 fp = gen_rtx_MEM (Pmode, buf_addr);
1021 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1022 GET_MODE_SIZE (Pmode)));
1023
1024 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1025 2 * GET_MODE_SIZE (Pmode)));
1026 set_mem_alias_set (fp, setjmp_alias_set);
1027 set_mem_alias_set (lab, setjmp_alias_set);
1028 set_mem_alias_set (stack, setjmp_alias_set);
1029
1030 /* Pick up FP, label, and SP from the block and jump. This code is
1031 from expand_goto in stmt.c; see there for detailed comments. */
1032 #ifdef HAVE_nonlocal_goto
1033 if (HAVE_nonlocal_goto)
1034 /* We have to pass a value to the nonlocal_goto pattern that will
1035 get copied into the static_chain pointer, but it does not matter
1036 what that value is, because builtin_setjmp does not use it. */
1037 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1038 else
1039 #endif
1040 {
1041 lab = copy_to_reg (lab);
1042
1043 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1044 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1045
1046 emit_move_insn (hard_frame_pointer_rtx, fp);
1047 emit_stack_restore (SAVE_NONLOCAL, stack);
1048
1049 emit_use (hard_frame_pointer_rtx);
1050 emit_use (stack_pointer_rtx);
1051 emit_indirect_jump (lab);
1052 }
1053 }
1054
1055 /* Search backwards and mark the jump insn as a non-local goto.
1056 Note that this precludes the use of __builtin_longjmp to a
1057 __builtin_setjmp target in the same function. However, we've
1058 already cautioned the user that these functions are for
1059 internal exception handling use only. */
1060 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1061 {
1062 gcc_assert (insn != last);
1063
1064 if (JUMP_P (insn))
1065 {
1066 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1067 break;
1068 }
1069 else if (CALL_P (insn))
1070 break;
1071 }
1072 }
1073
1074 static inline bool
1075 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1076 {
1077 return (iter->i < iter->n);
1078 }
1079
1080 /* This function validates the types of a function call argument list
1081 against a specified list of tree_codes. If the last specifier is a 0,
1082 that represents an ellipses, otherwise the last specifier must be a
1083 VOID_TYPE. */
1084
1085 static bool
1086 validate_arglist (const_tree callexpr, ...)
1087 {
1088 enum tree_code code;
1089 bool res = 0;
1090 va_list ap;
1091 const_call_expr_arg_iterator iter;
1092 const_tree arg;
1093
1094 va_start (ap, callexpr);
1095 init_const_call_expr_arg_iterator (callexpr, &iter);
1096
1097 do
1098 {
1099 code = (enum tree_code) va_arg (ap, int);
1100 switch (code)
1101 {
1102 case 0:
1103 /* This signifies an ellipses, any further arguments are all ok. */
1104 res = true;
1105 goto end;
1106 case VOID_TYPE:
1107 /* This signifies an endlink, if no arguments remain, return
1108 true, otherwise return false. */
1109 res = !more_const_call_expr_args_p (&iter);
1110 goto end;
1111 default:
1112 /* If no parameters remain or the parameter's code does not
1113 match the specified code, return false. Otherwise continue
1114 checking any remaining arguments. */
1115 arg = next_const_call_expr_arg (&iter);
1116 if (!validate_arg (arg, code))
1117 goto end;
1118 break;
1119 }
1120 }
1121 while (1);
1122
1123 /* We need gotos here since we can only have one VA_CLOSE in a
1124 function. */
1125 end: ;
1126 va_end (ap);
1127
1128 return res;
1129 }
1130
1131 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1132 and the address of the save area. */
1133
1134 static rtx
1135 expand_builtin_nonlocal_goto (tree exp)
1136 {
1137 tree t_label, t_save_area;
1138 rtx r_label, r_save_area, r_fp, r_sp;
1139 rtx_insn *insn;
1140
1141 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1142 return NULL_RTX;
1143
1144 t_label = CALL_EXPR_ARG (exp, 0);
1145 t_save_area = CALL_EXPR_ARG (exp, 1);
1146
1147 r_label = expand_normal (t_label);
1148 r_label = convert_memory_address (Pmode, r_label);
1149 r_save_area = expand_normal (t_save_area);
1150 r_save_area = convert_memory_address (Pmode, r_save_area);
1151 /* Copy the address of the save location to a register just in case it was
1152 based on the frame pointer. */
1153 r_save_area = copy_to_reg (r_save_area);
1154 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1155 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1156 plus_constant (Pmode, r_save_area,
1157 GET_MODE_SIZE (Pmode)));
1158
1159 crtl->has_nonlocal_goto = 1;
1160
1161 #ifdef HAVE_nonlocal_goto
1162 /* ??? We no longer need to pass the static chain value, afaik. */
1163 if (HAVE_nonlocal_goto)
1164 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1165 else
1166 #endif
1167 {
1168 r_label = copy_to_reg (r_label);
1169
1170 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1171 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1172
1173 /* Restore frame pointer for containing function. */
1174 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1175 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1176
1177 /* USE of hard_frame_pointer_rtx added for consistency;
1178 not clear if really needed. */
1179 emit_use (hard_frame_pointer_rtx);
1180 emit_use (stack_pointer_rtx);
1181
1182 /* If the architecture is using a GP register, we must
1183 conservatively assume that the target function makes use of it.
1184 The prologue of functions with nonlocal gotos must therefore
1185 initialize the GP register to the appropriate value, and we
1186 must then make sure that this value is live at the point
1187 of the jump. (Note that this doesn't necessarily apply
1188 to targets with a nonlocal_goto pattern; they are free
1189 to implement it in their own way. Note also that this is
1190 a no-op if the GP register is a global invariant.) */
1191 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1192 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1193 emit_use (pic_offset_table_rtx);
1194
1195 emit_indirect_jump (r_label);
1196 }
1197
1198 /* Search backwards to the jump insn and mark it as a
1199 non-local goto. */
1200 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1201 {
1202 if (JUMP_P (insn))
1203 {
1204 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1205 break;
1206 }
1207 else if (CALL_P (insn))
1208 break;
1209 }
1210
1211 return const0_rtx;
1212 }
1213
1214 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1215 (not all will be used on all machines) that was passed to __builtin_setjmp.
1216 It updates the stack pointer in that block to correspond to the current
1217 stack pointer. */
1218
1219 static void
1220 expand_builtin_update_setjmp_buf (rtx buf_addr)
1221 {
1222 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1223 rtx stack_save
1224 = gen_rtx_MEM (sa_mode,
1225 memory_address
1226 (sa_mode,
1227 plus_constant (Pmode, buf_addr,
1228 2 * GET_MODE_SIZE (Pmode))));
1229
1230 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1231 }
1232
1233 /* Expand a call to __builtin_prefetch. For a target that does not support
1234 data prefetch, evaluate the memory address argument in case it has side
1235 effects. */
1236
1237 static void
1238 expand_builtin_prefetch (tree exp)
1239 {
1240 tree arg0, arg1, arg2;
1241 int nargs;
1242 rtx op0, op1, op2;
1243
1244 if (!validate_arglist (exp, POINTER_TYPE, 0))
1245 return;
1246
1247 arg0 = CALL_EXPR_ARG (exp, 0);
1248
1249 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1250 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1251 locality). */
1252 nargs = call_expr_nargs (exp);
1253 if (nargs > 1)
1254 arg1 = CALL_EXPR_ARG (exp, 1);
1255 else
1256 arg1 = integer_zero_node;
1257 if (nargs > 2)
1258 arg2 = CALL_EXPR_ARG (exp, 2);
1259 else
1260 arg2 = integer_three_node;
1261
1262 /* Argument 0 is an address. */
1263 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1264
1265 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1266 if (TREE_CODE (arg1) != INTEGER_CST)
1267 {
1268 error ("second argument to %<__builtin_prefetch%> must be a constant");
1269 arg1 = integer_zero_node;
1270 }
1271 op1 = expand_normal (arg1);
1272 /* Argument 1 must be either zero or one. */
1273 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1274 {
1275 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1276 " using zero");
1277 op1 = const0_rtx;
1278 }
1279
1280 /* Argument 2 (locality) must be a compile-time constant int. */
1281 if (TREE_CODE (arg2) != INTEGER_CST)
1282 {
1283 error ("third argument to %<__builtin_prefetch%> must be a constant");
1284 arg2 = integer_zero_node;
1285 }
1286 op2 = expand_normal (arg2);
1287 /* Argument 2 must be 0, 1, 2, or 3. */
1288 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1289 {
1290 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1291 op2 = const0_rtx;
1292 }
1293
1294 #ifdef HAVE_prefetch
1295 if (HAVE_prefetch)
1296 {
1297 struct expand_operand ops[3];
1298
1299 create_address_operand (&ops[0], op0);
1300 create_integer_operand (&ops[1], INTVAL (op1));
1301 create_integer_operand (&ops[2], INTVAL (op2));
1302 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1303 return;
1304 }
1305 #endif
1306
1307 /* Don't do anything with direct references to volatile memory, but
1308 generate code to handle other side effects. */
1309 if (!MEM_P (op0) && side_effects_p (op0))
1310 emit_insn (op0);
1311 }
1312
1313 /* Get a MEM rtx for expression EXP which is the address of an operand
1314 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1315 the maximum length of the block of memory that might be accessed or
1316 NULL if unknown. */
1317
1318 static rtx
1319 get_memory_rtx (tree exp, tree len)
1320 {
1321 tree orig_exp = exp;
1322 rtx addr, mem;
1323
1324 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1325 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1326 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1327 exp = TREE_OPERAND (exp, 0);
1328
1329 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1330 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1331
1332 /* Get an expression we can use to find the attributes to assign to MEM.
1333 First remove any nops. */
1334 while (CONVERT_EXPR_P (exp)
1335 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1336 exp = TREE_OPERAND (exp, 0);
1337
1338 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1339 (as builtin stringops may alias with anything). */
1340 exp = fold_build2 (MEM_REF,
1341 build_array_type (char_type_node,
1342 build_range_type (sizetype,
1343 size_one_node, len)),
1344 exp, build_int_cst (ptr_type_node, 0));
1345
1346 /* If the MEM_REF has no acceptable address, try to get the base object
1347 from the original address we got, and build an all-aliasing
1348 unknown-sized access to that one. */
1349 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1350 set_mem_attributes (mem, exp, 0);
1351 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1352 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1353 0))))
1354 {
1355 exp = build_fold_addr_expr (exp);
1356 exp = fold_build2 (MEM_REF,
1357 build_array_type (char_type_node,
1358 build_range_type (sizetype,
1359 size_zero_node,
1360 NULL)),
1361 exp, build_int_cst (ptr_type_node, 0));
1362 set_mem_attributes (mem, exp, 0);
1363 }
1364 set_mem_alias_set (mem, 0);
1365 return mem;
1366 }
1367 \f
1368 /* Built-in functions to perform an untyped call and return. */
1369
1370 #define apply_args_mode \
1371 (this_target_builtins->x_apply_args_mode)
1372 #define apply_result_mode \
1373 (this_target_builtins->x_apply_result_mode)
1374
1375 /* Return the size required for the block returned by __builtin_apply_args,
1376 and initialize apply_args_mode. */
1377
1378 static int
1379 apply_args_size (void)
1380 {
1381 static int size = -1;
1382 int align;
1383 unsigned int regno;
1384 machine_mode mode;
1385
1386 /* The values computed by this function never change. */
1387 if (size < 0)
1388 {
1389 /* The first value is the incoming arg-pointer. */
1390 size = GET_MODE_SIZE (Pmode);
1391
1392 /* The second value is the structure value address unless this is
1393 passed as an "invisible" first argument. */
1394 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1395 size += GET_MODE_SIZE (Pmode);
1396
1397 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1398 if (FUNCTION_ARG_REGNO_P (regno))
1399 {
1400 mode = targetm.calls.get_raw_arg_mode (regno);
1401
1402 gcc_assert (mode != VOIDmode);
1403
1404 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1405 if (size % align != 0)
1406 size = CEIL (size, align) * align;
1407 size += GET_MODE_SIZE (mode);
1408 apply_args_mode[regno] = mode;
1409 }
1410 else
1411 {
1412 apply_args_mode[regno] = VOIDmode;
1413 }
1414 }
1415 return size;
1416 }
1417
1418 /* Return the size required for the block returned by __builtin_apply,
1419 and initialize apply_result_mode. */
1420
1421 static int
1422 apply_result_size (void)
1423 {
1424 static int size = -1;
1425 int align, regno;
1426 machine_mode mode;
1427
1428 /* The values computed by this function never change. */
1429 if (size < 0)
1430 {
1431 size = 0;
1432
1433 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1434 if (targetm.calls.function_value_regno_p (regno))
1435 {
1436 mode = targetm.calls.get_raw_result_mode (regno);
1437
1438 gcc_assert (mode != VOIDmode);
1439
1440 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1441 if (size % align != 0)
1442 size = CEIL (size, align) * align;
1443 size += GET_MODE_SIZE (mode);
1444 apply_result_mode[regno] = mode;
1445 }
1446 else
1447 apply_result_mode[regno] = VOIDmode;
1448
1449 /* Allow targets that use untyped_call and untyped_return to override
1450 the size so that machine-specific information can be stored here. */
1451 #ifdef APPLY_RESULT_SIZE
1452 size = APPLY_RESULT_SIZE;
1453 #endif
1454 }
1455 return size;
1456 }
1457
1458 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1459 /* Create a vector describing the result block RESULT. If SAVEP is true,
1460 the result block is used to save the values; otherwise it is used to
1461 restore the values. */
1462
1463 static rtx
1464 result_vector (int savep, rtx result)
1465 {
1466 int regno, size, align, nelts;
1467 machine_mode mode;
1468 rtx reg, mem;
1469 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1470
1471 size = nelts = 0;
1472 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1473 if ((mode = apply_result_mode[regno]) != VOIDmode)
1474 {
1475 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1476 if (size % align != 0)
1477 size = CEIL (size, align) * align;
1478 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1479 mem = adjust_address (result, mode, size);
1480 savevec[nelts++] = (savep
1481 ? gen_rtx_SET (mem, reg)
1482 : gen_rtx_SET (reg, mem));
1483 size += GET_MODE_SIZE (mode);
1484 }
1485 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1486 }
1487 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1488
1489 /* Save the state required to perform an untyped call with the same
1490 arguments as were passed to the current function. */
1491
1492 static rtx
1493 expand_builtin_apply_args_1 (void)
1494 {
1495 rtx registers, tem;
1496 int size, align, regno;
1497 machine_mode mode;
1498 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1499
1500 /* Create a block where the arg-pointer, structure value address,
1501 and argument registers can be saved. */
1502 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1503
1504 /* Walk past the arg-pointer and structure value address. */
1505 size = GET_MODE_SIZE (Pmode);
1506 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1507 size += GET_MODE_SIZE (Pmode);
1508
1509 /* Save each register used in calling a function to the block. */
1510 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1511 if ((mode = apply_args_mode[regno]) != VOIDmode)
1512 {
1513 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1514 if (size % align != 0)
1515 size = CEIL (size, align) * align;
1516
1517 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1518
1519 emit_move_insn (adjust_address (registers, mode, size), tem);
1520 size += GET_MODE_SIZE (mode);
1521 }
1522
1523 /* Save the arg pointer to the block. */
1524 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1525 #ifdef STACK_GROWS_DOWNWARD
1526 /* We need the pointer as the caller actually passed them to us, not
1527 as we might have pretended they were passed. Make sure it's a valid
1528 operand, as emit_move_insn isn't expected to handle a PLUS. */
1529 tem
1530 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1531 NULL_RTX);
1532 #endif
1533 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1534
1535 size = GET_MODE_SIZE (Pmode);
1536
1537 /* Save the structure value address unless this is passed as an
1538 "invisible" first argument. */
1539 if (struct_incoming_value)
1540 {
1541 emit_move_insn (adjust_address (registers, Pmode, size),
1542 copy_to_reg (struct_incoming_value));
1543 size += GET_MODE_SIZE (Pmode);
1544 }
1545
1546 /* Return the address of the block. */
1547 return copy_addr_to_reg (XEXP (registers, 0));
1548 }
1549
1550 /* __builtin_apply_args returns block of memory allocated on
1551 the stack into which is stored the arg pointer, structure
1552 value address, static chain, and all the registers that might
1553 possibly be used in performing a function call. The code is
1554 moved to the start of the function so the incoming values are
1555 saved. */
1556
1557 static rtx
1558 expand_builtin_apply_args (void)
1559 {
1560 /* Don't do __builtin_apply_args more than once in a function.
1561 Save the result of the first call and reuse it. */
1562 if (apply_args_value != 0)
1563 return apply_args_value;
1564 {
1565 /* When this function is called, it means that registers must be
1566 saved on entry to this function. So we migrate the
1567 call to the first insn of this function. */
1568 rtx temp;
1569 rtx seq;
1570
1571 start_sequence ();
1572 temp = expand_builtin_apply_args_1 ();
1573 seq = get_insns ();
1574 end_sequence ();
1575
1576 apply_args_value = temp;
1577
1578 /* Put the insns after the NOTE that starts the function.
1579 If this is inside a start_sequence, make the outer-level insn
1580 chain current, so the code is placed at the start of the
1581 function. If internal_arg_pointer is a non-virtual pseudo,
1582 it needs to be placed after the function that initializes
1583 that pseudo. */
1584 push_topmost_sequence ();
1585 if (REG_P (crtl->args.internal_arg_pointer)
1586 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1587 emit_insn_before (seq, parm_birth_insn);
1588 else
1589 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1590 pop_topmost_sequence ();
1591 return temp;
1592 }
1593 }
1594
1595 /* Perform an untyped call and save the state required to perform an
1596 untyped return of whatever value was returned by the given function. */
1597
1598 static rtx
1599 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1600 {
1601 int size, align, regno;
1602 machine_mode mode;
1603 rtx incoming_args, result, reg, dest, src;
1604 rtx_call_insn *call_insn;
1605 rtx old_stack_level = 0;
1606 rtx call_fusage = 0;
1607 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1608
1609 arguments = convert_memory_address (Pmode, arguments);
1610
1611 /* Create a block where the return registers can be saved. */
1612 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1613
1614 /* Fetch the arg pointer from the ARGUMENTS block. */
1615 incoming_args = gen_reg_rtx (Pmode);
1616 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1617 #ifndef STACK_GROWS_DOWNWARD
1618 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1619 incoming_args, 0, OPTAB_LIB_WIDEN);
1620 #endif
1621
1622 /* Push a new argument block and copy the arguments. Do not allow
1623 the (potential) memcpy call below to interfere with our stack
1624 manipulations. */
1625 do_pending_stack_adjust ();
1626 NO_DEFER_POP;
1627
1628 /* Save the stack with nonlocal if available. */
1629 #ifdef HAVE_save_stack_nonlocal
1630 if (HAVE_save_stack_nonlocal)
1631 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1632 else
1633 #endif
1634 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1635
1636 /* Allocate a block of memory onto the stack and copy the memory
1637 arguments to the outgoing arguments address. We can pass TRUE
1638 as the 4th argument because we just saved the stack pointer
1639 and will restore it right after the call. */
1640 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1641
1642 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1643 may have already set current_function_calls_alloca to true.
1644 current_function_calls_alloca won't be set if argsize is zero,
1645 so we have to guarantee need_drap is true here. */
1646 if (SUPPORTS_STACK_ALIGNMENT)
1647 crtl->need_drap = true;
1648
1649 dest = virtual_outgoing_args_rtx;
1650 #ifndef STACK_GROWS_DOWNWARD
1651 if (CONST_INT_P (argsize))
1652 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1653 else
1654 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1655 #endif
1656 dest = gen_rtx_MEM (BLKmode, dest);
1657 set_mem_align (dest, PARM_BOUNDARY);
1658 src = gen_rtx_MEM (BLKmode, incoming_args);
1659 set_mem_align (src, PARM_BOUNDARY);
1660 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1661
1662 /* Refer to the argument block. */
1663 apply_args_size ();
1664 arguments = gen_rtx_MEM (BLKmode, arguments);
1665 set_mem_align (arguments, PARM_BOUNDARY);
1666
1667 /* Walk past the arg-pointer and structure value address. */
1668 size = GET_MODE_SIZE (Pmode);
1669 if (struct_value)
1670 size += GET_MODE_SIZE (Pmode);
1671
1672 /* Restore each of the registers previously saved. Make USE insns
1673 for each of these registers for use in making the call. */
1674 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1675 if ((mode = apply_args_mode[regno]) != VOIDmode)
1676 {
1677 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1678 if (size % align != 0)
1679 size = CEIL (size, align) * align;
1680 reg = gen_rtx_REG (mode, regno);
1681 emit_move_insn (reg, adjust_address (arguments, mode, size));
1682 use_reg (&call_fusage, reg);
1683 size += GET_MODE_SIZE (mode);
1684 }
1685
1686 /* Restore the structure value address unless this is passed as an
1687 "invisible" first argument. */
1688 size = GET_MODE_SIZE (Pmode);
1689 if (struct_value)
1690 {
1691 rtx value = gen_reg_rtx (Pmode);
1692 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1693 emit_move_insn (struct_value, value);
1694 if (REG_P (struct_value))
1695 use_reg (&call_fusage, struct_value);
1696 size += GET_MODE_SIZE (Pmode);
1697 }
1698
1699 /* All arguments and registers used for the call are set up by now! */
1700 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1701
1702 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1703 and we don't want to load it into a register as an optimization,
1704 because prepare_call_address already did it if it should be done. */
1705 if (GET_CODE (function) != SYMBOL_REF)
1706 function = memory_address (FUNCTION_MODE, function);
1707
1708 /* Generate the actual call instruction and save the return value. */
1709 #ifdef HAVE_untyped_call
1710 if (HAVE_untyped_call)
1711 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1712 result, result_vector (1, result)));
1713 else
1714 #endif
1715 #ifdef HAVE_call_value
1716 if (HAVE_call_value)
1717 {
1718 rtx valreg = 0;
1719
1720 /* Locate the unique return register. It is not possible to
1721 express a call that sets more than one return register using
1722 call_value; use untyped_call for that. In fact, untyped_call
1723 only needs to save the return registers in the given block. */
1724 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1725 if ((mode = apply_result_mode[regno]) != VOIDmode)
1726 {
1727 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1728
1729 valreg = gen_rtx_REG (mode, regno);
1730 }
1731
1732 emit_call_insn (GEN_CALL_VALUE (valreg,
1733 gen_rtx_MEM (FUNCTION_MODE, function),
1734 const0_rtx, NULL_RTX, const0_rtx));
1735
1736 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1737 }
1738 else
1739 #endif
1740 gcc_unreachable ();
1741
1742 /* Find the CALL insn we just emitted, and attach the register usage
1743 information. */
1744 call_insn = last_call_insn ();
1745 add_function_usage_to (call_insn, call_fusage);
1746
1747 /* Restore the stack. */
1748 #ifdef HAVE_save_stack_nonlocal
1749 if (HAVE_save_stack_nonlocal)
1750 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1751 else
1752 #endif
1753 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1754 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1755
1756 OK_DEFER_POP;
1757
1758 /* Return the address of the result block. */
1759 result = copy_addr_to_reg (XEXP (result, 0));
1760 return convert_memory_address (ptr_mode, result);
1761 }
1762
1763 /* Perform an untyped return. */
1764
1765 static void
1766 expand_builtin_return (rtx result)
1767 {
1768 int size, align, regno;
1769 machine_mode mode;
1770 rtx reg;
1771 rtx_insn *call_fusage = 0;
1772
1773 result = convert_memory_address (Pmode, result);
1774
1775 apply_result_size ();
1776 result = gen_rtx_MEM (BLKmode, result);
1777
1778 #ifdef HAVE_untyped_return
1779 if (HAVE_untyped_return)
1780 {
1781 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1782 emit_barrier ();
1783 return;
1784 }
1785 #endif
1786
1787 /* Restore the return value and note that each value is used. */
1788 size = 0;
1789 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1790 if ((mode = apply_result_mode[regno]) != VOIDmode)
1791 {
1792 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1793 if (size % align != 0)
1794 size = CEIL (size, align) * align;
1795 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1796 emit_move_insn (reg, adjust_address (result, mode, size));
1797
1798 push_to_sequence (call_fusage);
1799 emit_use (reg);
1800 call_fusage = get_insns ();
1801 end_sequence ();
1802 size += GET_MODE_SIZE (mode);
1803 }
1804
1805 /* Put the USE insns before the return. */
1806 emit_insn (call_fusage);
1807
1808 /* Return whatever values was restored by jumping directly to the end
1809 of the function. */
1810 expand_naked_return ();
1811 }
1812
1813 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1814
1815 static enum type_class
1816 type_to_class (tree type)
1817 {
1818 switch (TREE_CODE (type))
1819 {
1820 case VOID_TYPE: return void_type_class;
1821 case INTEGER_TYPE: return integer_type_class;
1822 case ENUMERAL_TYPE: return enumeral_type_class;
1823 case BOOLEAN_TYPE: return boolean_type_class;
1824 case POINTER_TYPE: return pointer_type_class;
1825 case REFERENCE_TYPE: return reference_type_class;
1826 case OFFSET_TYPE: return offset_type_class;
1827 case REAL_TYPE: return real_type_class;
1828 case COMPLEX_TYPE: return complex_type_class;
1829 case FUNCTION_TYPE: return function_type_class;
1830 case METHOD_TYPE: return method_type_class;
1831 case RECORD_TYPE: return record_type_class;
1832 case UNION_TYPE:
1833 case QUAL_UNION_TYPE: return union_type_class;
1834 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1835 ? string_type_class : array_type_class);
1836 case LANG_TYPE: return lang_type_class;
1837 default: return no_type_class;
1838 }
1839 }
1840
1841 /* Expand a call EXP to __builtin_classify_type. */
1842
1843 static rtx
1844 expand_builtin_classify_type (tree exp)
1845 {
1846 if (call_expr_nargs (exp))
1847 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1848 return GEN_INT (no_type_class);
1849 }
1850
1851 /* This helper macro, meant to be used in mathfn_built_in below,
1852 determines which among a set of three builtin math functions is
1853 appropriate for a given type mode. The `F' and `L' cases are
1854 automatically generated from the `double' case. */
1855 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1856 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1857 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1858 fcodel = BUILT_IN_MATHFN##L ; break;
1859 /* Similar to above, but appends _R after any F/L suffix. */
1860 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1861 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1862 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1863 fcodel = BUILT_IN_MATHFN##L_R ; break;
1864
1865 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1866 if available. If IMPLICIT is true use the implicit builtin declaration,
1867 otherwise use the explicit declaration. If we can't do the conversion,
1868 return zero. */
1869
1870 static tree
1871 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1872 {
1873 enum built_in_function fcode, fcodef, fcodel, fcode2;
1874
1875 switch (fn)
1876 {
1877 CASE_MATHFN (BUILT_IN_ACOS)
1878 CASE_MATHFN (BUILT_IN_ACOSH)
1879 CASE_MATHFN (BUILT_IN_ASIN)
1880 CASE_MATHFN (BUILT_IN_ASINH)
1881 CASE_MATHFN (BUILT_IN_ATAN)
1882 CASE_MATHFN (BUILT_IN_ATAN2)
1883 CASE_MATHFN (BUILT_IN_ATANH)
1884 CASE_MATHFN (BUILT_IN_CBRT)
1885 CASE_MATHFN (BUILT_IN_CEIL)
1886 CASE_MATHFN (BUILT_IN_CEXPI)
1887 CASE_MATHFN (BUILT_IN_COPYSIGN)
1888 CASE_MATHFN (BUILT_IN_COS)
1889 CASE_MATHFN (BUILT_IN_COSH)
1890 CASE_MATHFN (BUILT_IN_DREM)
1891 CASE_MATHFN (BUILT_IN_ERF)
1892 CASE_MATHFN (BUILT_IN_ERFC)
1893 CASE_MATHFN (BUILT_IN_EXP)
1894 CASE_MATHFN (BUILT_IN_EXP10)
1895 CASE_MATHFN (BUILT_IN_EXP2)
1896 CASE_MATHFN (BUILT_IN_EXPM1)
1897 CASE_MATHFN (BUILT_IN_FABS)
1898 CASE_MATHFN (BUILT_IN_FDIM)
1899 CASE_MATHFN (BUILT_IN_FLOOR)
1900 CASE_MATHFN (BUILT_IN_FMA)
1901 CASE_MATHFN (BUILT_IN_FMAX)
1902 CASE_MATHFN (BUILT_IN_FMIN)
1903 CASE_MATHFN (BUILT_IN_FMOD)
1904 CASE_MATHFN (BUILT_IN_FREXP)
1905 CASE_MATHFN (BUILT_IN_GAMMA)
1906 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1907 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1908 CASE_MATHFN (BUILT_IN_HYPOT)
1909 CASE_MATHFN (BUILT_IN_ILOGB)
1910 CASE_MATHFN (BUILT_IN_ICEIL)
1911 CASE_MATHFN (BUILT_IN_IFLOOR)
1912 CASE_MATHFN (BUILT_IN_INF)
1913 CASE_MATHFN (BUILT_IN_IRINT)
1914 CASE_MATHFN (BUILT_IN_IROUND)
1915 CASE_MATHFN (BUILT_IN_ISINF)
1916 CASE_MATHFN (BUILT_IN_J0)
1917 CASE_MATHFN (BUILT_IN_J1)
1918 CASE_MATHFN (BUILT_IN_JN)
1919 CASE_MATHFN (BUILT_IN_LCEIL)
1920 CASE_MATHFN (BUILT_IN_LDEXP)
1921 CASE_MATHFN (BUILT_IN_LFLOOR)
1922 CASE_MATHFN (BUILT_IN_LGAMMA)
1923 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1924 CASE_MATHFN (BUILT_IN_LLCEIL)
1925 CASE_MATHFN (BUILT_IN_LLFLOOR)
1926 CASE_MATHFN (BUILT_IN_LLRINT)
1927 CASE_MATHFN (BUILT_IN_LLROUND)
1928 CASE_MATHFN (BUILT_IN_LOG)
1929 CASE_MATHFN (BUILT_IN_LOG10)
1930 CASE_MATHFN (BUILT_IN_LOG1P)
1931 CASE_MATHFN (BUILT_IN_LOG2)
1932 CASE_MATHFN (BUILT_IN_LOGB)
1933 CASE_MATHFN (BUILT_IN_LRINT)
1934 CASE_MATHFN (BUILT_IN_LROUND)
1935 CASE_MATHFN (BUILT_IN_MODF)
1936 CASE_MATHFN (BUILT_IN_NAN)
1937 CASE_MATHFN (BUILT_IN_NANS)
1938 CASE_MATHFN (BUILT_IN_NEARBYINT)
1939 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1940 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1941 CASE_MATHFN (BUILT_IN_POW)
1942 CASE_MATHFN (BUILT_IN_POWI)
1943 CASE_MATHFN (BUILT_IN_POW10)
1944 CASE_MATHFN (BUILT_IN_REMAINDER)
1945 CASE_MATHFN (BUILT_IN_REMQUO)
1946 CASE_MATHFN (BUILT_IN_RINT)
1947 CASE_MATHFN (BUILT_IN_ROUND)
1948 CASE_MATHFN (BUILT_IN_SCALB)
1949 CASE_MATHFN (BUILT_IN_SCALBLN)
1950 CASE_MATHFN (BUILT_IN_SCALBN)
1951 CASE_MATHFN (BUILT_IN_SIGNBIT)
1952 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1953 CASE_MATHFN (BUILT_IN_SIN)
1954 CASE_MATHFN (BUILT_IN_SINCOS)
1955 CASE_MATHFN (BUILT_IN_SINH)
1956 CASE_MATHFN (BUILT_IN_SQRT)
1957 CASE_MATHFN (BUILT_IN_TAN)
1958 CASE_MATHFN (BUILT_IN_TANH)
1959 CASE_MATHFN (BUILT_IN_TGAMMA)
1960 CASE_MATHFN (BUILT_IN_TRUNC)
1961 CASE_MATHFN (BUILT_IN_Y0)
1962 CASE_MATHFN (BUILT_IN_Y1)
1963 CASE_MATHFN (BUILT_IN_YN)
1964
1965 default:
1966 return NULL_TREE;
1967 }
1968
1969 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1970 fcode2 = fcode;
1971 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1972 fcode2 = fcodef;
1973 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1974 fcode2 = fcodel;
1975 else
1976 return NULL_TREE;
1977
1978 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1979 return NULL_TREE;
1980
1981 return builtin_decl_explicit (fcode2);
1982 }
1983
1984 /* Like mathfn_built_in_1(), but always use the implicit array. */
1985
1986 tree
1987 mathfn_built_in (tree type, enum built_in_function fn)
1988 {
1989 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1990 }
1991
1992 /* If errno must be maintained, expand the RTL to check if the result,
1993 TARGET, of a built-in function call, EXP, is NaN, and if so set
1994 errno to EDOM. */
1995
1996 static void
1997 expand_errno_check (tree exp, rtx target)
1998 {
1999 rtx_code_label *lab = gen_label_rtx ();
2000
2001 /* Test the result; if it is NaN, set errno=EDOM because
2002 the argument was not in the domain. */
2003 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
2004 NULL_RTX, NULL_RTX, lab,
2005 /* The jump is very likely. */
2006 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
2007
2008 #ifdef TARGET_EDOM
2009 /* If this built-in doesn't throw an exception, set errno directly. */
2010 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
2011 {
2012 #ifdef GEN_ERRNO_RTX
2013 rtx errno_rtx = GEN_ERRNO_RTX;
2014 #else
2015 rtx errno_rtx
2016 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
2017 #endif
2018 emit_move_insn (errno_rtx,
2019 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
2020 emit_label (lab);
2021 return;
2022 }
2023 #endif
2024
2025 /* Make sure the library call isn't expanded as a tail call. */
2026 CALL_EXPR_TAILCALL (exp) = 0;
2027
2028 /* We can't set errno=EDOM directly; let the library call do it.
2029 Pop the arguments right away in case the call gets deleted. */
2030 NO_DEFER_POP;
2031 expand_call (exp, target, 0);
2032 OK_DEFER_POP;
2033 emit_label (lab);
2034 }
2035
2036 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2037 Return NULL_RTX if a normal call should be emitted rather than expanding
2038 the function in-line. EXP is the expression that is a call to the builtin
2039 function; if convenient, the result should be placed in TARGET.
2040 SUBTARGET may be used as the target for computing one of EXP's operands. */
2041
2042 static rtx
2043 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2044 {
2045 optab builtin_optab;
2046 rtx op0;
2047 rtx_insn *insns;
2048 tree fndecl = get_callee_fndecl (exp);
2049 machine_mode mode;
2050 bool errno_set = false;
2051 bool try_widening = false;
2052 tree arg;
2053
2054 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2055 return NULL_RTX;
2056
2057 arg = CALL_EXPR_ARG (exp, 0);
2058
2059 switch (DECL_FUNCTION_CODE (fndecl))
2060 {
2061 CASE_FLT_FN (BUILT_IN_SQRT):
2062 errno_set = ! tree_expr_nonnegative_p (arg);
2063 try_widening = true;
2064 builtin_optab = sqrt_optab;
2065 break;
2066 CASE_FLT_FN (BUILT_IN_EXP):
2067 errno_set = true; builtin_optab = exp_optab; break;
2068 CASE_FLT_FN (BUILT_IN_EXP10):
2069 CASE_FLT_FN (BUILT_IN_POW10):
2070 errno_set = true; builtin_optab = exp10_optab; break;
2071 CASE_FLT_FN (BUILT_IN_EXP2):
2072 errno_set = true; builtin_optab = exp2_optab; break;
2073 CASE_FLT_FN (BUILT_IN_EXPM1):
2074 errno_set = true; builtin_optab = expm1_optab; break;
2075 CASE_FLT_FN (BUILT_IN_LOGB):
2076 errno_set = true; builtin_optab = logb_optab; break;
2077 CASE_FLT_FN (BUILT_IN_LOG):
2078 errno_set = true; builtin_optab = log_optab; break;
2079 CASE_FLT_FN (BUILT_IN_LOG10):
2080 errno_set = true; builtin_optab = log10_optab; break;
2081 CASE_FLT_FN (BUILT_IN_LOG2):
2082 errno_set = true; builtin_optab = log2_optab; break;
2083 CASE_FLT_FN (BUILT_IN_LOG1P):
2084 errno_set = true; builtin_optab = log1p_optab; break;
2085 CASE_FLT_FN (BUILT_IN_ASIN):
2086 builtin_optab = asin_optab; break;
2087 CASE_FLT_FN (BUILT_IN_ACOS):
2088 builtin_optab = acos_optab; break;
2089 CASE_FLT_FN (BUILT_IN_TAN):
2090 builtin_optab = tan_optab; break;
2091 CASE_FLT_FN (BUILT_IN_ATAN):
2092 builtin_optab = atan_optab; break;
2093 CASE_FLT_FN (BUILT_IN_FLOOR):
2094 builtin_optab = floor_optab; break;
2095 CASE_FLT_FN (BUILT_IN_CEIL):
2096 builtin_optab = ceil_optab; break;
2097 CASE_FLT_FN (BUILT_IN_TRUNC):
2098 builtin_optab = btrunc_optab; break;
2099 CASE_FLT_FN (BUILT_IN_ROUND):
2100 builtin_optab = round_optab; break;
2101 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2102 builtin_optab = nearbyint_optab;
2103 if (flag_trapping_math)
2104 break;
2105 /* Else fallthrough and expand as rint. */
2106 CASE_FLT_FN (BUILT_IN_RINT):
2107 builtin_optab = rint_optab; break;
2108 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2109 builtin_optab = significand_optab; break;
2110 default:
2111 gcc_unreachable ();
2112 }
2113
2114 /* Make a suitable register to place result in. */
2115 mode = TYPE_MODE (TREE_TYPE (exp));
2116
2117 if (! flag_errno_math || ! HONOR_NANS (mode))
2118 errno_set = false;
2119
2120 /* Before working hard, check whether the instruction is available, but try
2121 to widen the mode for specific operations. */
2122 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2123 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2124 && (!errno_set || !optimize_insn_for_size_p ()))
2125 {
2126 rtx result = gen_reg_rtx (mode);
2127
2128 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2129 need to expand the argument again. This way, we will not perform
2130 side-effects more the once. */
2131 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2132
2133 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2134
2135 start_sequence ();
2136
2137 /* Compute into RESULT.
2138 Set RESULT to wherever the result comes back. */
2139 result = expand_unop (mode, builtin_optab, op0, result, 0);
2140
2141 if (result != 0)
2142 {
2143 if (errno_set)
2144 expand_errno_check (exp, result);
2145
2146 /* Output the entire sequence. */
2147 insns = get_insns ();
2148 end_sequence ();
2149 emit_insn (insns);
2150 return result;
2151 }
2152
2153 /* If we were unable to expand via the builtin, stop the sequence
2154 (without outputting the insns) and call to the library function
2155 with the stabilized argument list. */
2156 end_sequence ();
2157 }
2158
2159 return expand_call (exp, target, target == const0_rtx);
2160 }
2161
2162 /* Expand a call to the builtin binary math functions (pow and atan2).
2163 Return NULL_RTX if a normal call should be emitted rather than expanding the
2164 function in-line. EXP is the expression that is a call to the builtin
2165 function; if convenient, the result should be placed in TARGET.
2166 SUBTARGET may be used as the target for computing one of EXP's
2167 operands. */
2168
2169 static rtx
2170 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2171 {
2172 optab builtin_optab;
2173 rtx op0, op1, result;
2174 rtx_insn *insns;
2175 int op1_type = REAL_TYPE;
2176 tree fndecl = get_callee_fndecl (exp);
2177 tree arg0, arg1;
2178 machine_mode mode;
2179 bool errno_set = true;
2180
2181 switch (DECL_FUNCTION_CODE (fndecl))
2182 {
2183 CASE_FLT_FN (BUILT_IN_SCALBN):
2184 CASE_FLT_FN (BUILT_IN_SCALBLN):
2185 CASE_FLT_FN (BUILT_IN_LDEXP):
2186 op1_type = INTEGER_TYPE;
2187 default:
2188 break;
2189 }
2190
2191 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2192 return NULL_RTX;
2193
2194 arg0 = CALL_EXPR_ARG (exp, 0);
2195 arg1 = CALL_EXPR_ARG (exp, 1);
2196
2197 switch (DECL_FUNCTION_CODE (fndecl))
2198 {
2199 CASE_FLT_FN (BUILT_IN_POW):
2200 builtin_optab = pow_optab; break;
2201 CASE_FLT_FN (BUILT_IN_ATAN2):
2202 builtin_optab = atan2_optab; break;
2203 CASE_FLT_FN (BUILT_IN_SCALB):
2204 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2205 return 0;
2206 builtin_optab = scalb_optab; break;
2207 CASE_FLT_FN (BUILT_IN_SCALBN):
2208 CASE_FLT_FN (BUILT_IN_SCALBLN):
2209 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2210 return 0;
2211 /* Fall through... */
2212 CASE_FLT_FN (BUILT_IN_LDEXP):
2213 builtin_optab = ldexp_optab; break;
2214 CASE_FLT_FN (BUILT_IN_FMOD):
2215 builtin_optab = fmod_optab; break;
2216 CASE_FLT_FN (BUILT_IN_REMAINDER):
2217 CASE_FLT_FN (BUILT_IN_DREM):
2218 builtin_optab = remainder_optab; break;
2219 default:
2220 gcc_unreachable ();
2221 }
2222
2223 /* Make a suitable register to place result in. */
2224 mode = TYPE_MODE (TREE_TYPE (exp));
2225
2226 /* Before working hard, check whether the instruction is available. */
2227 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2228 return NULL_RTX;
2229
2230 result = gen_reg_rtx (mode);
2231
2232 if (! flag_errno_math || ! HONOR_NANS (mode))
2233 errno_set = false;
2234
2235 if (errno_set && optimize_insn_for_size_p ())
2236 return 0;
2237
2238 /* Always stabilize the argument list. */
2239 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2240 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2241
2242 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2243 op1 = expand_normal (arg1);
2244
2245 start_sequence ();
2246
2247 /* Compute into RESULT.
2248 Set RESULT to wherever the result comes back. */
2249 result = expand_binop (mode, builtin_optab, op0, op1,
2250 result, 0, OPTAB_DIRECT);
2251
2252 /* If we were unable to expand via the builtin, stop the sequence
2253 (without outputting the insns) and call to the library function
2254 with the stabilized argument list. */
2255 if (result == 0)
2256 {
2257 end_sequence ();
2258 return expand_call (exp, target, target == const0_rtx);
2259 }
2260
2261 if (errno_set)
2262 expand_errno_check (exp, result);
2263
2264 /* Output the entire sequence. */
2265 insns = get_insns ();
2266 end_sequence ();
2267 emit_insn (insns);
2268
2269 return result;
2270 }
2271
2272 /* Expand a call to the builtin trinary math functions (fma).
2273 Return NULL_RTX if a normal call should be emitted rather than expanding the
2274 function in-line. EXP is the expression that is a call to the builtin
2275 function; if convenient, the result should be placed in TARGET.
2276 SUBTARGET may be used as the target for computing one of EXP's
2277 operands. */
2278
2279 static rtx
2280 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2281 {
2282 optab builtin_optab;
2283 rtx op0, op1, op2, result;
2284 rtx_insn *insns;
2285 tree fndecl = get_callee_fndecl (exp);
2286 tree arg0, arg1, arg2;
2287 machine_mode mode;
2288
2289 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2290 return NULL_RTX;
2291
2292 arg0 = CALL_EXPR_ARG (exp, 0);
2293 arg1 = CALL_EXPR_ARG (exp, 1);
2294 arg2 = CALL_EXPR_ARG (exp, 2);
2295
2296 switch (DECL_FUNCTION_CODE (fndecl))
2297 {
2298 CASE_FLT_FN (BUILT_IN_FMA):
2299 builtin_optab = fma_optab; break;
2300 default:
2301 gcc_unreachable ();
2302 }
2303
2304 /* Make a suitable register to place result in. */
2305 mode = TYPE_MODE (TREE_TYPE (exp));
2306
2307 /* Before working hard, check whether the instruction is available. */
2308 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2309 return NULL_RTX;
2310
2311 result = gen_reg_rtx (mode);
2312
2313 /* Always stabilize the argument list. */
2314 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2315 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2316 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2317
2318 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2319 op1 = expand_normal (arg1);
2320 op2 = expand_normal (arg2);
2321
2322 start_sequence ();
2323
2324 /* Compute into RESULT.
2325 Set RESULT to wherever the result comes back. */
2326 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2327 result, 0);
2328
2329 /* If we were unable to expand via the builtin, stop the sequence
2330 (without outputting the insns) and call to the library function
2331 with the stabilized argument list. */
2332 if (result == 0)
2333 {
2334 end_sequence ();
2335 return expand_call (exp, target, target == const0_rtx);
2336 }
2337
2338 /* Output the entire sequence. */
2339 insns = get_insns ();
2340 end_sequence ();
2341 emit_insn (insns);
2342
2343 return result;
2344 }
2345
2346 /* Expand a call to the builtin sin and cos math functions.
2347 Return NULL_RTX if a normal call should be emitted rather than expanding the
2348 function in-line. EXP is the expression that is a call to the builtin
2349 function; if convenient, the result should be placed in TARGET.
2350 SUBTARGET may be used as the target for computing one of EXP's
2351 operands. */
2352
2353 static rtx
2354 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2355 {
2356 optab builtin_optab;
2357 rtx op0;
2358 rtx_insn *insns;
2359 tree fndecl = get_callee_fndecl (exp);
2360 machine_mode mode;
2361 tree arg;
2362
2363 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2364 return NULL_RTX;
2365
2366 arg = CALL_EXPR_ARG (exp, 0);
2367
2368 switch (DECL_FUNCTION_CODE (fndecl))
2369 {
2370 CASE_FLT_FN (BUILT_IN_SIN):
2371 CASE_FLT_FN (BUILT_IN_COS):
2372 builtin_optab = sincos_optab; break;
2373 default:
2374 gcc_unreachable ();
2375 }
2376
2377 /* Make a suitable register to place result in. */
2378 mode = TYPE_MODE (TREE_TYPE (exp));
2379
2380 /* Check if sincos insn is available, otherwise fallback
2381 to sin or cos insn. */
2382 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2383 switch (DECL_FUNCTION_CODE (fndecl))
2384 {
2385 CASE_FLT_FN (BUILT_IN_SIN):
2386 builtin_optab = sin_optab; break;
2387 CASE_FLT_FN (BUILT_IN_COS):
2388 builtin_optab = cos_optab; break;
2389 default:
2390 gcc_unreachable ();
2391 }
2392
2393 /* Before working hard, check whether the instruction is available. */
2394 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2395 {
2396 rtx result = gen_reg_rtx (mode);
2397
2398 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2399 need to expand the argument again. This way, we will not perform
2400 side-effects more the once. */
2401 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2402
2403 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2404
2405 start_sequence ();
2406
2407 /* Compute into RESULT.
2408 Set RESULT to wherever the result comes back. */
2409 if (builtin_optab == sincos_optab)
2410 {
2411 int ok;
2412
2413 switch (DECL_FUNCTION_CODE (fndecl))
2414 {
2415 CASE_FLT_FN (BUILT_IN_SIN):
2416 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2417 break;
2418 CASE_FLT_FN (BUILT_IN_COS):
2419 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2420 break;
2421 default:
2422 gcc_unreachable ();
2423 }
2424 gcc_assert (ok);
2425 }
2426 else
2427 result = expand_unop (mode, builtin_optab, op0, result, 0);
2428
2429 if (result != 0)
2430 {
2431 /* Output the entire sequence. */
2432 insns = get_insns ();
2433 end_sequence ();
2434 emit_insn (insns);
2435 return result;
2436 }
2437
2438 /* If we were unable to expand via the builtin, stop the sequence
2439 (without outputting the insns) and call to the library function
2440 with the stabilized argument list. */
2441 end_sequence ();
2442 }
2443
2444 return expand_call (exp, target, target == const0_rtx);
2445 }
2446
2447 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2448 return an RTL instruction code that implements the functionality.
2449 If that isn't possible or available return CODE_FOR_nothing. */
2450
2451 static enum insn_code
2452 interclass_mathfn_icode (tree arg, tree fndecl)
2453 {
2454 bool errno_set = false;
2455 optab builtin_optab = unknown_optab;
2456 machine_mode mode;
2457
2458 switch (DECL_FUNCTION_CODE (fndecl))
2459 {
2460 CASE_FLT_FN (BUILT_IN_ILOGB):
2461 errno_set = true; builtin_optab = ilogb_optab; break;
2462 CASE_FLT_FN (BUILT_IN_ISINF):
2463 builtin_optab = isinf_optab; break;
2464 case BUILT_IN_ISNORMAL:
2465 case BUILT_IN_ISFINITE:
2466 CASE_FLT_FN (BUILT_IN_FINITE):
2467 case BUILT_IN_FINITED32:
2468 case BUILT_IN_FINITED64:
2469 case BUILT_IN_FINITED128:
2470 case BUILT_IN_ISINFD32:
2471 case BUILT_IN_ISINFD64:
2472 case BUILT_IN_ISINFD128:
2473 /* These builtins have no optabs (yet). */
2474 break;
2475 default:
2476 gcc_unreachable ();
2477 }
2478
2479 /* There's no easy way to detect the case we need to set EDOM. */
2480 if (flag_errno_math && errno_set)
2481 return CODE_FOR_nothing;
2482
2483 /* Optab mode depends on the mode of the input argument. */
2484 mode = TYPE_MODE (TREE_TYPE (arg));
2485
2486 if (builtin_optab)
2487 return optab_handler (builtin_optab, mode);
2488 return CODE_FOR_nothing;
2489 }
2490
2491 /* Expand a call to one of the builtin math functions that operate on
2492 floating point argument and output an integer result (ilogb, isinf,
2493 isnan, etc).
2494 Return 0 if a normal call should be emitted rather than expanding the
2495 function in-line. EXP is the expression that is a call to the builtin
2496 function; if convenient, the result should be placed in TARGET. */
2497
2498 static rtx
2499 expand_builtin_interclass_mathfn (tree exp, rtx target)
2500 {
2501 enum insn_code icode = CODE_FOR_nothing;
2502 rtx op0;
2503 tree fndecl = get_callee_fndecl (exp);
2504 machine_mode mode;
2505 tree arg;
2506
2507 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2508 return NULL_RTX;
2509
2510 arg = CALL_EXPR_ARG (exp, 0);
2511 icode = interclass_mathfn_icode (arg, fndecl);
2512 mode = TYPE_MODE (TREE_TYPE (arg));
2513
2514 if (icode != CODE_FOR_nothing)
2515 {
2516 struct expand_operand ops[1];
2517 rtx_insn *last = get_last_insn ();
2518 tree orig_arg = arg;
2519
2520 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2521 need to expand the argument again. This way, we will not perform
2522 side-effects more the once. */
2523 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2524
2525 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2526
2527 if (mode != GET_MODE (op0))
2528 op0 = convert_to_mode (mode, op0, 0);
2529
2530 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2531 if (maybe_legitimize_operands (icode, 0, 1, ops)
2532 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2533 return ops[0].value;
2534
2535 delete_insns_since (last);
2536 CALL_EXPR_ARG (exp, 0) = orig_arg;
2537 }
2538
2539 return NULL_RTX;
2540 }
2541
2542 /* Expand a call to the builtin sincos math function.
2543 Return NULL_RTX if a normal call should be emitted rather than expanding the
2544 function in-line. EXP is the expression that is a call to the builtin
2545 function. */
2546
2547 static rtx
2548 expand_builtin_sincos (tree exp)
2549 {
2550 rtx op0, op1, op2, target1, target2;
2551 machine_mode mode;
2552 tree arg, sinp, cosp;
2553 int result;
2554 location_t loc = EXPR_LOCATION (exp);
2555 tree alias_type, alias_off;
2556
2557 if (!validate_arglist (exp, REAL_TYPE,
2558 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2559 return NULL_RTX;
2560
2561 arg = CALL_EXPR_ARG (exp, 0);
2562 sinp = CALL_EXPR_ARG (exp, 1);
2563 cosp = CALL_EXPR_ARG (exp, 2);
2564
2565 /* Make a suitable register to place result in. */
2566 mode = TYPE_MODE (TREE_TYPE (arg));
2567
2568 /* Check if sincos insn is available, otherwise emit the call. */
2569 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2570 return NULL_RTX;
2571
2572 target1 = gen_reg_rtx (mode);
2573 target2 = gen_reg_rtx (mode);
2574
2575 op0 = expand_normal (arg);
2576 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2577 alias_off = build_int_cst (alias_type, 0);
2578 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2579 sinp, alias_off));
2580 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2581 cosp, alias_off));
2582
2583 /* Compute into target1 and target2.
2584 Set TARGET to wherever the result comes back. */
2585 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2586 gcc_assert (result);
2587
2588 /* Move target1 and target2 to the memory locations indicated
2589 by op1 and op2. */
2590 emit_move_insn (op1, target1);
2591 emit_move_insn (op2, target2);
2592
2593 return const0_rtx;
2594 }
2595
2596 /* Expand a call to the internal cexpi builtin to the sincos math function.
2597 EXP is the expression that is a call to the builtin function; if convenient,
2598 the result should be placed in TARGET. */
2599
2600 static rtx
2601 expand_builtin_cexpi (tree exp, rtx target)
2602 {
2603 tree fndecl = get_callee_fndecl (exp);
2604 tree arg, type;
2605 machine_mode mode;
2606 rtx op0, op1, op2;
2607 location_t loc = EXPR_LOCATION (exp);
2608
2609 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2610 return NULL_RTX;
2611
2612 arg = CALL_EXPR_ARG (exp, 0);
2613 type = TREE_TYPE (arg);
2614 mode = TYPE_MODE (TREE_TYPE (arg));
2615
2616 /* Try expanding via a sincos optab, fall back to emitting a libcall
2617 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2618 is only generated from sincos, cexp or if we have either of them. */
2619 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2620 {
2621 op1 = gen_reg_rtx (mode);
2622 op2 = gen_reg_rtx (mode);
2623
2624 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2625
2626 /* Compute into op1 and op2. */
2627 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2628 }
2629 else if (targetm.libc_has_function (function_sincos))
2630 {
2631 tree call, fn = NULL_TREE;
2632 tree top1, top2;
2633 rtx op1a, op2a;
2634
2635 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2636 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2637 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2638 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2639 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2640 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2641 else
2642 gcc_unreachable ();
2643
2644 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2645 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2646 op1a = copy_addr_to_reg (XEXP (op1, 0));
2647 op2a = copy_addr_to_reg (XEXP (op2, 0));
2648 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2649 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2650
2651 /* Make sure not to fold the sincos call again. */
2652 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2653 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2654 call, 3, arg, top1, top2));
2655 }
2656 else
2657 {
2658 tree call, fn = NULL_TREE, narg;
2659 tree ctype = build_complex_type (type);
2660
2661 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2662 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2663 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2664 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2665 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2666 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2667 else
2668 gcc_unreachable ();
2669
2670 /* If we don't have a decl for cexp create one. This is the
2671 friendliest fallback if the user calls __builtin_cexpi
2672 without full target C99 function support. */
2673 if (fn == NULL_TREE)
2674 {
2675 tree fntype;
2676 const char *name = NULL;
2677
2678 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2679 name = "cexpf";
2680 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2681 name = "cexp";
2682 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2683 name = "cexpl";
2684
2685 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2686 fn = build_fn_decl (name, fntype);
2687 }
2688
2689 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2690 build_real (type, dconst0), arg);
2691
2692 /* Make sure not to fold the cexp call again. */
2693 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2694 return expand_expr (build_call_nary (ctype, call, 1, narg),
2695 target, VOIDmode, EXPAND_NORMAL);
2696 }
2697
2698 /* Now build the proper return type. */
2699 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2700 make_tree (TREE_TYPE (arg), op2),
2701 make_tree (TREE_TYPE (arg), op1)),
2702 target, VOIDmode, EXPAND_NORMAL);
2703 }
2704
2705 /* Conveniently construct a function call expression. FNDECL names the
2706 function to be called, N is the number of arguments, and the "..."
2707 parameters are the argument expressions. Unlike build_call_exr
2708 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2709
2710 static tree
2711 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2712 {
2713 va_list ap;
2714 tree fntype = TREE_TYPE (fndecl);
2715 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2716
2717 va_start (ap, n);
2718 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2719 va_end (ap);
2720 SET_EXPR_LOCATION (fn, loc);
2721 return fn;
2722 }
2723
2724 /* Expand a call to one of the builtin rounding functions gcc defines
2725 as an extension (lfloor and lceil). As these are gcc extensions we
2726 do not need to worry about setting errno to EDOM.
2727 If expanding via optab fails, lower expression to (int)(floor(x)).
2728 EXP is the expression that is a call to the builtin function;
2729 if convenient, the result should be placed in TARGET. */
2730
2731 static rtx
2732 expand_builtin_int_roundingfn (tree exp, rtx target)
2733 {
2734 convert_optab builtin_optab;
2735 rtx op0, tmp;
2736 rtx_insn *insns;
2737 tree fndecl = get_callee_fndecl (exp);
2738 enum built_in_function fallback_fn;
2739 tree fallback_fndecl;
2740 machine_mode mode;
2741 tree arg;
2742
2743 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2744 gcc_unreachable ();
2745
2746 arg = CALL_EXPR_ARG (exp, 0);
2747
2748 switch (DECL_FUNCTION_CODE (fndecl))
2749 {
2750 CASE_FLT_FN (BUILT_IN_ICEIL):
2751 CASE_FLT_FN (BUILT_IN_LCEIL):
2752 CASE_FLT_FN (BUILT_IN_LLCEIL):
2753 builtin_optab = lceil_optab;
2754 fallback_fn = BUILT_IN_CEIL;
2755 break;
2756
2757 CASE_FLT_FN (BUILT_IN_IFLOOR):
2758 CASE_FLT_FN (BUILT_IN_LFLOOR):
2759 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2760 builtin_optab = lfloor_optab;
2761 fallback_fn = BUILT_IN_FLOOR;
2762 break;
2763
2764 default:
2765 gcc_unreachable ();
2766 }
2767
2768 /* Make a suitable register to place result in. */
2769 mode = TYPE_MODE (TREE_TYPE (exp));
2770
2771 target = gen_reg_rtx (mode);
2772
2773 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2774 need to expand the argument again. This way, we will not perform
2775 side-effects more the once. */
2776 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2777
2778 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2779
2780 start_sequence ();
2781
2782 /* Compute into TARGET. */
2783 if (expand_sfix_optab (target, op0, builtin_optab))
2784 {
2785 /* Output the entire sequence. */
2786 insns = get_insns ();
2787 end_sequence ();
2788 emit_insn (insns);
2789 return target;
2790 }
2791
2792 /* If we were unable to expand via the builtin, stop the sequence
2793 (without outputting the insns). */
2794 end_sequence ();
2795
2796 /* Fall back to floating point rounding optab. */
2797 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2798
2799 /* For non-C99 targets we may end up without a fallback fndecl here
2800 if the user called __builtin_lfloor directly. In this case emit
2801 a call to the floor/ceil variants nevertheless. This should result
2802 in the best user experience for not full C99 targets. */
2803 if (fallback_fndecl == NULL_TREE)
2804 {
2805 tree fntype;
2806 const char *name = NULL;
2807
2808 switch (DECL_FUNCTION_CODE (fndecl))
2809 {
2810 case BUILT_IN_ICEIL:
2811 case BUILT_IN_LCEIL:
2812 case BUILT_IN_LLCEIL:
2813 name = "ceil";
2814 break;
2815 case BUILT_IN_ICEILF:
2816 case BUILT_IN_LCEILF:
2817 case BUILT_IN_LLCEILF:
2818 name = "ceilf";
2819 break;
2820 case BUILT_IN_ICEILL:
2821 case BUILT_IN_LCEILL:
2822 case BUILT_IN_LLCEILL:
2823 name = "ceill";
2824 break;
2825 case BUILT_IN_IFLOOR:
2826 case BUILT_IN_LFLOOR:
2827 case BUILT_IN_LLFLOOR:
2828 name = "floor";
2829 break;
2830 case BUILT_IN_IFLOORF:
2831 case BUILT_IN_LFLOORF:
2832 case BUILT_IN_LLFLOORF:
2833 name = "floorf";
2834 break;
2835 case BUILT_IN_IFLOORL:
2836 case BUILT_IN_LFLOORL:
2837 case BUILT_IN_LLFLOORL:
2838 name = "floorl";
2839 break;
2840 default:
2841 gcc_unreachable ();
2842 }
2843
2844 fntype = build_function_type_list (TREE_TYPE (arg),
2845 TREE_TYPE (arg), NULL_TREE);
2846 fallback_fndecl = build_fn_decl (name, fntype);
2847 }
2848
2849 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2850
2851 tmp = expand_normal (exp);
2852 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2853
2854 /* Truncate the result of floating point optab to integer
2855 via expand_fix (). */
2856 target = gen_reg_rtx (mode);
2857 expand_fix (target, tmp, 0);
2858
2859 return target;
2860 }
2861
2862 /* Expand a call to one of the builtin math functions doing integer
2863 conversion (lrint).
2864 Return 0 if a normal call should be emitted rather than expanding the
2865 function in-line. EXP is the expression that is a call to the builtin
2866 function; if convenient, the result should be placed in TARGET. */
2867
2868 static rtx
2869 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2870 {
2871 convert_optab builtin_optab;
2872 rtx op0;
2873 rtx_insn *insns;
2874 tree fndecl = get_callee_fndecl (exp);
2875 tree arg;
2876 machine_mode mode;
2877 enum built_in_function fallback_fn = BUILT_IN_NONE;
2878
2879 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2880 gcc_unreachable ();
2881
2882 arg = CALL_EXPR_ARG (exp, 0);
2883
2884 switch (DECL_FUNCTION_CODE (fndecl))
2885 {
2886 CASE_FLT_FN (BUILT_IN_IRINT):
2887 fallback_fn = BUILT_IN_LRINT;
2888 /* FALLTHRU */
2889 CASE_FLT_FN (BUILT_IN_LRINT):
2890 CASE_FLT_FN (BUILT_IN_LLRINT):
2891 builtin_optab = lrint_optab;
2892 break;
2893
2894 CASE_FLT_FN (BUILT_IN_IROUND):
2895 fallback_fn = BUILT_IN_LROUND;
2896 /* FALLTHRU */
2897 CASE_FLT_FN (BUILT_IN_LROUND):
2898 CASE_FLT_FN (BUILT_IN_LLROUND):
2899 builtin_optab = lround_optab;
2900 break;
2901
2902 default:
2903 gcc_unreachable ();
2904 }
2905
2906 /* There's no easy way to detect the case we need to set EDOM. */
2907 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2908 return NULL_RTX;
2909
2910 /* Make a suitable register to place result in. */
2911 mode = TYPE_MODE (TREE_TYPE (exp));
2912
2913 /* There's no easy way to detect the case we need to set EDOM. */
2914 if (!flag_errno_math)
2915 {
2916 rtx result = gen_reg_rtx (mode);
2917
2918 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2919 need to expand the argument again. This way, we will not perform
2920 side-effects more the once. */
2921 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2922
2923 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2924
2925 start_sequence ();
2926
2927 if (expand_sfix_optab (result, op0, builtin_optab))
2928 {
2929 /* Output the entire sequence. */
2930 insns = get_insns ();
2931 end_sequence ();
2932 emit_insn (insns);
2933 return result;
2934 }
2935
2936 /* If we were unable to expand via the builtin, stop the sequence
2937 (without outputting the insns) and call to the library function
2938 with the stabilized argument list. */
2939 end_sequence ();
2940 }
2941
2942 if (fallback_fn != BUILT_IN_NONE)
2943 {
2944 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2945 targets, (int) round (x) should never be transformed into
2946 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2947 a call to lround in the hope that the target provides at least some
2948 C99 functions. This should result in the best user experience for
2949 not full C99 targets. */
2950 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2951 fallback_fn, 0);
2952
2953 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2954 fallback_fndecl, 1, arg);
2955
2956 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2957 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2958 return convert_to_mode (mode, target, 0);
2959 }
2960
2961 return expand_call (exp, target, target == const0_rtx);
2962 }
2963
2964 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2965 a normal call should be emitted rather than expanding the function
2966 in-line. EXP is the expression that is a call to the builtin
2967 function; if convenient, the result should be placed in TARGET. */
2968
2969 static rtx
2970 expand_builtin_powi (tree exp, rtx target)
2971 {
2972 tree arg0, arg1;
2973 rtx op0, op1;
2974 machine_mode mode;
2975 machine_mode mode2;
2976
2977 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2978 return NULL_RTX;
2979
2980 arg0 = CALL_EXPR_ARG (exp, 0);
2981 arg1 = CALL_EXPR_ARG (exp, 1);
2982 mode = TYPE_MODE (TREE_TYPE (exp));
2983
2984 /* Emit a libcall to libgcc. */
2985
2986 /* Mode of the 2nd argument must match that of an int. */
2987 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2988
2989 if (target == NULL_RTX)
2990 target = gen_reg_rtx (mode);
2991
2992 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2993 if (GET_MODE (op0) != mode)
2994 op0 = convert_to_mode (mode, op0, 0);
2995 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2996 if (GET_MODE (op1) != mode2)
2997 op1 = convert_to_mode (mode2, op1, 0);
2998
2999 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3000 target, LCT_CONST, mode, 2,
3001 op0, mode, op1, mode2);
3002
3003 return target;
3004 }
3005
3006 /* Expand expression EXP which is a call to the strlen builtin. Return
3007 NULL_RTX if we failed the caller should emit a normal call, otherwise
3008 try to get the result in TARGET, if convenient. */
3009
3010 static rtx
3011 expand_builtin_strlen (tree exp, rtx target,
3012 machine_mode target_mode)
3013 {
3014 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3015 return NULL_RTX;
3016 else
3017 {
3018 struct expand_operand ops[4];
3019 rtx pat;
3020 tree len;
3021 tree src = CALL_EXPR_ARG (exp, 0);
3022 rtx src_reg;
3023 rtx_insn *before_strlen;
3024 machine_mode insn_mode = target_mode;
3025 enum insn_code icode = CODE_FOR_nothing;
3026 unsigned int align;
3027
3028 /* If the length can be computed at compile-time, return it. */
3029 len = c_strlen (src, 0);
3030 if (len)
3031 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3032
3033 /* If the length can be computed at compile-time and is constant
3034 integer, but there are side-effects in src, evaluate
3035 src for side-effects, then return len.
3036 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3037 can be optimized into: i++; x = 3; */
3038 len = c_strlen (src, 1);
3039 if (len && TREE_CODE (len) == INTEGER_CST)
3040 {
3041 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3042 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3043 }
3044
3045 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3046
3047 /* If SRC is not a pointer type, don't do this operation inline. */
3048 if (align == 0)
3049 return NULL_RTX;
3050
3051 /* Bail out if we can't compute strlen in the right mode. */
3052 while (insn_mode != VOIDmode)
3053 {
3054 icode = optab_handler (strlen_optab, insn_mode);
3055 if (icode != CODE_FOR_nothing)
3056 break;
3057
3058 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3059 }
3060 if (insn_mode == VOIDmode)
3061 return NULL_RTX;
3062
3063 /* Make a place to hold the source address. We will not expand
3064 the actual source until we are sure that the expansion will
3065 not fail -- there are trees that cannot be expanded twice. */
3066 src_reg = gen_reg_rtx (Pmode);
3067
3068 /* Mark the beginning of the strlen sequence so we can emit the
3069 source operand later. */
3070 before_strlen = get_last_insn ();
3071
3072 create_output_operand (&ops[0], target, insn_mode);
3073 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3074 create_integer_operand (&ops[2], 0);
3075 create_integer_operand (&ops[3], align);
3076 if (!maybe_expand_insn (icode, 4, ops))
3077 return NULL_RTX;
3078
3079 /* Now that we are assured of success, expand the source. */
3080 start_sequence ();
3081 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3082 if (pat != src_reg)
3083 {
3084 #ifdef POINTERS_EXTEND_UNSIGNED
3085 if (GET_MODE (pat) != Pmode)
3086 pat = convert_to_mode (Pmode, pat,
3087 POINTERS_EXTEND_UNSIGNED);
3088 #endif
3089 emit_move_insn (src_reg, pat);
3090 }
3091 pat = get_insns ();
3092 end_sequence ();
3093
3094 if (before_strlen)
3095 emit_insn_after (pat, before_strlen);
3096 else
3097 emit_insn_before (pat, get_insns ());
3098
3099 /* Return the value in the proper mode for this function. */
3100 if (GET_MODE (ops[0].value) == target_mode)
3101 target = ops[0].value;
3102 else if (target != 0)
3103 convert_move (target, ops[0].value, 0);
3104 else
3105 target = convert_to_mode (target_mode, ops[0].value, 0);
3106
3107 return target;
3108 }
3109 }
3110
3111 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3112 bytes from constant string DATA + OFFSET and return it as target
3113 constant. */
3114
3115 static rtx
3116 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3117 machine_mode mode)
3118 {
3119 const char *str = (const char *) data;
3120
3121 gcc_assert (offset >= 0
3122 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3123 <= strlen (str) + 1));
3124
3125 return c_readstr (str + offset, mode);
3126 }
3127
3128 /* LEN specify length of the block of memcpy/memset operation.
3129 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3130 In some cases we can make very likely guess on max size, then we
3131 set it into PROBABLE_MAX_SIZE. */
3132
3133 static void
3134 determine_block_size (tree len, rtx len_rtx,
3135 unsigned HOST_WIDE_INT *min_size,
3136 unsigned HOST_WIDE_INT *max_size,
3137 unsigned HOST_WIDE_INT *probable_max_size)
3138 {
3139 if (CONST_INT_P (len_rtx))
3140 {
3141 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3142 return;
3143 }
3144 else
3145 {
3146 wide_int min, max;
3147 enum value_range_type range_type = VR_UNDEFINED;
3148
3149 /* Determine bounds from the type. */
3150 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3151 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3152 else
3153 *min_size = 0;
3154 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3155 *probable_max_size = *max_size
3156 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3157 else
3158 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3159
3160 if (TREE_CODE (len) == SSA_NAME)
3161 range_type = get_range_info (len, &min, &max);
3162 if (range_type == VR_RANGE)
3163 {
3164 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3165 *min_size = min.to_uhwi ();
3166 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3167 *probable_max_size = *max_size = max.to_uhwi ();
3168 }
3169 else if (range_type == VR_ANTI_RANGE)
3170 {
3171 /* Anti range 0...N lets us to determine minimal size to N+1. */
3172 if (min == 0)
3173 {
3174 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3175 *min_size = max.to_uhwi () + 1;
3176 }
3177 /* Code like
3178
3179 int n;
3180 if (n < 100)
3181 memcpy (a, b, n)
3182
3183 Produce anti range allowing negative values of N. We still
3184 can use the information and make a guess that N is not negative.
3185 */
3186 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3187 *probable_max_size = min.to_uhwi () - 1;
3188 }
3189 }
3190 gcc_checking_assert (*max_size <=
3191 (unsigned HOST_WIDE_INT)
3192 GET_MODE_MASK (GET_MODE (len_rtx)));
3193 }
3194
3195 /* Helper function to do the actual work for expand_builtin_memcpy. */
3196
3197 static rtx
3198 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3199 {
3200 const char *src_str;
3201 unsigned int src_align = get_pointer_alignment (src);
3202 unsigned int dest_align = get_pointer_alignment (dest);
3203 rtx dest_mem, src_mem, dest_addr, len_rtx;
3204 HOST_WIDE_INT expected_size = -1;
3205 unsigned int expected_align = 0;
3206 unsigned HOST_WIDE_INT min_size;
3207 unsigned HOST_WIDE_INT max_size;
3208 unsigned HOST_WIDE_INT probable_max_size;
3209
3210 /* If DEST is not a pointer type, call the normal function. */
3211 if (dest_align == 0)
3212 return NULL_RTX;
3213
3214 /* If either SRC is not a pointer type, don't do this
3215 operation in-line. */
3216 if (src_align == 0)
3217 return NULL_RTX;
3218
3219 if (currently_expanding_gimple_stmt)
3220 stringop_block_profile (currently_expanding_gimple_stmt,
3221 &expected_align, &expected_size);
3222
3223 if (expected_align < dest_align)
3224 expected_align = dest_align;
3225 dest_mem = get_memory_rtx (dest, len);
3226 set_mem_align (dest_mem, dest_align);
3227 len_rtx = expand_normal (len);
3228 determine_block_size (len, len_rtx, &min_size, &max_size,
3229 &probable_max_size);
3230 src_str = c_getstr (src);
3231
3232 /* If SRC is a string constant and block move would be done
3233 by pieces, we can avoid loading the string from memory
3234 and only stored the computed constants. */
3235 if (src_str
3236 && CONST_INT_P (len_rtx)
3237 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3238 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3239 CONST_CAST (char *, src_str),
3240 dest_align, false))
3241 {
3242 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3243 builtin_memcpy_read_str,
3244 CONST_CAST (char *, src_str),
3245 dest_align, false, 0);
3246 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3247 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3248 return dest_mem;
3249 }
3250
3251 src_mem = get_memory_rtx (src, len);
3252 set_mem_align (src_mem, src_align);
3253
3254 /* Copy word part most expediently. */
3255 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3256 CALL_EXPR_TAILCALL (exp)
3257 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3258 expected_align, expected_size,
3259 min_size, max_size, probable_max_size);
3260
3261 if (dest_addr == 0)
3262 {
3263 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3264 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3265 }
3266
3267 return dest_addr;
3268 }
3269
3270 /* Expand a call EXP to the memcpy builtin.
3271 Return NULL_RTX if we failed, the caller should emit a normal call,
3272 otherwise try to get the result in TARGET, if convenient (and in
3273 mode MODE if that's convenient). */
3274
3275 static rtx
3276 expand_builtin_memcpy (tree exp, rtx target)
3277 {
3278 if (!validate_arglist (exp,
3279 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3280 return NULL_RTX;
3281 else
3282 {
3283 tree dest = CALL_EXPR_ARG (exp, 0);
3284 tree src = CALL_EXPR_ARG (exp, 1);
3285 tree len = CALL_EXPR_ARG (exp, 2);
3286 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3287 }
3288 }
3289
3290 /* Expand an instrumented call EXP to the memcpy builtin.
3291 Return NULL_RTX if we failed, the caller should emit a normal call,
3292 otherwise try to get the result in TARGET, if convenient (and in
3293 mode MODE if that's convenient). */
3294
3295 static rtx
3296 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3297 {
3298 if (!validate_arglist (exp,
3299 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3300 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3301 INTEGER_TYPE, VOID_TYPE))
3302 return NULL_RTX;
3303 else
3304 {
3305 tree dest = CALL_EXPR_ARG (exp, 0);
3306 tree src = CALL_EXPR_ARG (exp, 2);
3307 tree len = CALL_EXPR_ARG (exp, 4);
3308 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3309
3310 /* Return src bounds with the result. */
3311 if (res)
3312 {
3313 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3314 expand_normal (CALL_EXPR_ARG (exp, 1)));
3315 res = chkp_join_splitted_slot (res, bnd);
3316 }
3317 return res;
3318 }
3319 }
3320
3321 /* Expand a call EXP to the mempcpy builtin.
3322 Return NULL_RTX if we failed; the caller should emit a normal call,
3323 otherwise try to get the result in TARGET, if convenient (and in
3324 mode MODE if that's convenient). If ENDP is 0 return the
3325 destination pointer, if ENDP is 1 return the end pointer ala
3326 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3327 stpcpy. */
3328
3329 static rtx
3330 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3331 {
3332 if (!validate_arglist (exp,
3333 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3334 return NULL_RTX;
3335 else
3336 {
3337 tree dest = CALL_EXPR_ARG (exp, 0);
3338 tree src = CALL_EXPR_ARG (exp, 1);
3339 tree len = CALL_EXPR_ARG (exp, 2);
3340 return expand_builtin_mempcpy_args (dest, src, len,
3341 target, mode, /*endp=*/ 1,
3342 exp);
3343 }
3344 }
3345
3346 /* Expand an instrumented call EXP to the mempcpy builtin.
3347 Return NULL_RTX if we failed, the caller should emit a normal call,
3348 otherwise try to get the result in TARGET, if convenient (and in
3349 mode MODE if that's convenient). */
3350
3351 static rtx
3352 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3353 {
3354 if (!validate_arglist (exp,
3355 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3356 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3357 INTEGER_TYPE, VOID_TYPE))
3358 return NULL_RTX;
3359 else
3360 {
3361 tree dest = CALL_EXPR_ARG (exp, 0);
3362 tree src = CALL_EXPR_ARG (exp, 2);
3363 tree len = CALL_EXPR_ARG (exp, 4);
3364 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3365 mode, 1, exp);
3366
3367 /* Return src bounds with the result. */
3368 if (res)
3369 {
3370 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3371 expand_normal (CALL_EXPR_ARG (exp, 1)));
3372 res = chkp_join_splitted_slot (res, bnd);
3373 }
3374 return res;
3375 }
3376 }
3377
3378 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3379 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3380 so that this can also be called without constructing an actual CALL_EXPR.
3381 The other arguments and return value are the same as for
3382 expand_builtin_mempcpy. */
3383
3384 static rtx
3385 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3386 rtx target, machine_mode mode, int endp,
3387 tree orig_exp)
3388 {
3389 tree fndecl = get_callee_fndecl (orig_exp);
3390
3391 /* If return value is ignored, transform mempcpy into memcpy. */
3392 if (target == const0_rtx
3393 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3394 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3395 {
3396 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3397 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3398 dest, src, len);
3399 return expand_expr (result, target, mode, EXPAND_NORMAL);
3400 }
3401 else if (target == const0_rtx
3402 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3403 {
3404 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3405 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3406 dest, src, len);
3407 return expand_expr (result, target, mode, EXPAND_NORMAL);
3408 }
3409 else
3410 {
3411 const char *src_str;
3412 unsigned int src_align = get_pointer_alignment (src);
3413 unsigned int dest_align = get_pointer_alignment (dest);
3414 rtx dest_mem, src_mem, len_rtx;
3415
3416 /* If either SRC or DEST is not a pointer type, don't do this
3417 operation in-line. */
3418 if (dest_align == 0 || src_align == 0)
3419 return NULL_RTX;
3420
3421 /* If LEN is not constant, call the normal function. */
3422 if (! tree_fits_uhwi_p (len))
3423 return NULL_RTX;
3424
3425 len_rtx = expand_normal (len);
3426 src_str = c_getstr (src);
3427
3428 /* If SRC is a string constant and block move would be done
3429 by pieces, we can avoid loading the string from memory
3430 and only stored the computed constants. */
3431 if (src_str
3432 && CONST_INT_P (len_rtx)
3433 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3434 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3435 CONST_CAST (char *, src_str),
3436 dest_align, false))
3437 {
3438 dest_mem = get_memory_rtx (dest, len);
3439 set_mem_align (dest_mem, dest_align);
3440 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3441 builtin_memcpy_read_str,
3442 CONST_CAST (char *, src_str),
3443 dest_align, false, endp);
3444 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3445 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3446 return dest_mem;
3447 }
3448
3449 if (CONST_INT_P (len_rtx)
3450 && can_move_by_pieces (INTVAL (len_rtx),
3451 MIN (dest_align, src_align)))
3452 {
3453 dest_mem = get_memory_rtx (dest, len);
3454 set_mem_align (dest_mem, dest_align);
3455 src_mem = get_memory_rtx (src, len);
3456 set_mem_align (src_mem, src_align);
3457 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3458 MIN (dest_align, src_align), endp);
3459 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3460 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3461 return dest_mem;
3462 }
3463
3464 return NULL_RTX;
3465 }
3466 }
3467
3468 #ifndef HAVE_movstr
3469 # define HAVE_movstr 0
3470 # define CODE_FOR_movstr CODE_FOR_nothing
3471 #endif
3472
3473 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3474 we failed, the caller should emit a normal call, otherwise try to
3475 get the result in TARGET, if convenient. If ENDP is 0 return the
3476 destination pointer, if ENDP is 1 return the end pointer ala
3477 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3478 stpcpy. */
3479
3480 static rtx
3481 expand_movstr (tree dest, tree src, rtx target, int endp)
3482 {
3483 struct expand_operand ops[3];
3484 rtx dest_mem;
3485 rtx src_mem;
3486
3487 if (!HAVE_movstr)
3488 return NULL_RTX;
3489
3490 dest_mem = get_memory_rtx (dest, NULL);
3491 src_mem = get_memory_rtx (src, NULL);
3492 if (!endp)
3493 {
3494 target = force_reg (Pmode, XEXP (dest_mem, 0));
3495 dest_mem = replace_equiv_address (dest_mem, target);
3496 }
3497
3498 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3499 create_fixed_operand (&ops[1], dest_mem);
3500 create_fixed_operand (&ops[2], src_mem);
3501 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3502 return NULL_RTX;
3503
3504 if (endp && target != const0_rtx)
3505 {
3506 target = ops[0].value;
3507 /* movstr is supposed to set end to the address of the NUL
3508 terminator. If the caller requested a mempcpy-like return value,
3509 adjust it. */
3510 if (endp == 1)
3511 {
3512 rtx tem = plus_constant (GET_MODE (target),
3513 gen_lowpart (GET_MODE (target), target), 1);
3514 emit_move_insn (target, force_operand (tem, NULL_RTX));
3515 }
3516 }
3517 return target;
3518 }
3519
3520 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3521 NULL_RTX if we failed the caller should emit a normal call, otherwise
3522 try to get the result in TARGET, if convenient (and in mode MODE if that's
3523 convenient). */
3524
3525 static rtx
3526 expand_builtin_strcpy (tree exp, rtx target)
3527 {
3528 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3529 {
3530 tree dest = CALL_EXPR_ARG (exp, 0);
3531 tree src = CALL_EXPR_ARG (exp, 1);
3532 return expand_builtin_strcpy_args (dest, src, target);
3533 }
3534 return NULL_RTX;
3535 }
3536
3537 /* Helper function to do the actual work for expand_builtin_strcpy. The
3538 arguments to the builtin_strcpy call DEST and SRC are broken out
3539 so that this can also be called without constructing an actual CALL_EXPR.
3540 The other arguments and return value are the same as for
3541 expand_builtin_strcpy. */
3542
3543 static rtx
3544 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3545 {
3546 return expand_movstr (dest, src, target, /*endp=*/0);
3547 }
3548
3549 /* Expand a call EXP to the stpcpy builtin.
3550 Return NULL_RTX if we failed the caller should emit a normal call,
3551 otherwise try to get the result in TARGET, if convenient (and in
3552 mode MODE if that's convenient). */
3553
3554 static rtx
3555 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3556 {
3557 tree dst, src;
3558 location_t loc = EXPR_LOCATION (exp);
3559
3560 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3561 return NULL_RTX;
3562
3563 dst = CALL_EXPR_ARG (exp, 0);
3564 src = CALL_EXPR_ARG (exp, 1);
3565
3566 /* If return value is ignored, transform stpcpy into strcpy. */
3567 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3568 {
3569 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3570 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3571 return expand_expr (result, target, mode, EXPAND_NORMAL);
3572 }
3573 else
3574 {
3575 tree len, lenp1;
3576 rtx ret;
3577
3578 /* Ensure we get an actual string whose length can be evaluated at
3579 compile-time, not an expression containing a string. This is
3580 because the latter will potentially produce pessimized code
3581 when used to produce the return value. */
3582 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3583 return expand_movstr (dst, src, target, /*endp=*/2);
3584
3585 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3586 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3587 target, mode, /*endp=*/2,
3588 exp);
3589
3590 if (ret)
3591 return ret;
3592
3593 if (TREE_CODE (len) == INTEGER_CST)
3594 {
3595 rtx len_rtx = expand_normal (len);
3596
3597 if (CONST_INT_P (len_rtx))
3598 {
3599 ret = expand_builtin_strcpy_args (dst, src, target);
3600
3601 if (ret)
3602 {
3603 if (! target)
3604 {
3605 if (mode != VOIDmode)
3606 target = gen_reg_rtx (mode);
3607 else
3608 target = gen_reg_rtx (GET_MODE (ret));
3609 }
3610 if (GET_MODE (target) != GET_MODE (ret))
3611 ret = gen_lowpart (GET_MODE (target), ret);
3612
3613 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3614 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3615 gcc_assert (ret);
3616
3617 return target;
3618 }
3619 }
3620 }
3621
3622 return expand_movstr (dst, src, target, /*endp=*/2);
3623 }
3624 }
3625
3626 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3627 bytes from constant string DATA + OFFSET and return it as target
3628 constant. */
3629
3630 rtx
3631 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3632 machine_mode mode)
3633 {
3634 const char *str = (const char *) data;
3635
3636 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3637 return const0_rtx;
3638
3639 return c_readstr (str + offset, mode);
3640 }
3641
3642 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3643 NULL_RTX if we failed the caller should emit a normal call. */
3644
3645 static rtx
3646 expand_builtin_strncpy (tree exp, rtx target)
3647 {
3648 location_t loc = EXPR_LOCATION (exp);
3649
3650 if (validate_arglist (exp,
3651 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3652 {
3653 tree dest = CALL_EXPR_ARG (exp, 0);
3654 tree src = CALL_EXPR_ARG (exp, 1);
3655 tree len = CALL_EXPR_ARG (exp, 2);
3656 tree slen = c_strlen (src, 1);
3657
3658 /* We must be passed a constant len and src parameter. */
3659 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3660 return NULL_RTX;
3661
3662 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3663
3664 /* We're required to pad with trailing zeros if the requested
3665 len is greater than strlen(s2)+1. In that case try to
3666 use store_by_pieces, if it fails, punt. */
3667 if (tree_int_cst_lt (slen, len))
3668 {
3669 unsigned int dest_align = get_pointer_alignment (dest);
3670 const char *p = c_getstr (src);
3671 rtx dest_mem;
3672
3673 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3674 || !can_store_by_pieces (tree_to_uhwi (len),
3675 builtin_strncpy_read_str,
3676 CONST_CAST (char *, p),
3677 dest_align, false))
3678 return NULL_RTX;
3679
3680 dest_mem = get_memory_rtx (dest, len);
3681 store_by_pieces (dest_mem, tree_to_uhwi (len),
3682 builtin_strncpy_read_str,
3683 CONST_CAST (char *, p), dest_align, false, 0);
3684 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3685 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3686 return dest_mem;
3687 }
3688 }
3689 return NULL_RTX;
3690 }
3691
3692 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3693 bytes from constant string DATA + OFFSET and return it as target
3694 constant. */
3695
3696 rtx
3697 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3698 machine_mode mode)
3699 {
3700 const char *c = (const char *) data;
3701 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3702
3703 memset (p, *c, GET_MODE_SIZE (mode));
3704
3705 return c_readstr (p, mode);
3706 }
3707
3708 /* Callback routine for store_by_pieces. Return the RTL of a register
3709 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3710 char value given in the RTL register data. For example, if mode is
3711 4 bytes wide, return the RTL for 0x01010101*data. */
3712
3713 static rtx
3714 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3715 machine_mode mode)
3716 {
3717 rtx target, coeff;
3718 size_t size;
3719 char *p;
3720
3721 size = GET_MODE_SIZE (mode);
3722 if (size == 1)
3723 return (rtx) data;
3724
3725 p = XALLOCAVEC (char, size);
3726 memset (p, 1, size);
3727 coeff = c_readstr (p, mode);
3728
3729 target = convert_to_mode (mode, (rtx) data, 1);
3730 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3731 return force_reg (mode, target);
3732 }
3733
3734 /* Expand expression EXP, which is a call to the memset builtin. Return
3735 NULL_RTX if we failed the caller should emit a normal call, otherwise
3736 try to get the result in TARGET, if convenient (and in mode MODE if that's
3737 convenient). */
3738
3739 static rtx
3740 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3741 {
3742 if (!validate_arglist (exp,
3743 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3744 return NULL_RTX;
3745 else
3746 {
3747 tree dest = CALL_EXPR_ARG (exp, 0);
3748 tree val = CALL_EXPR_ARG (exp, 1);
3749 tree len = CALL_EXPR_ARG (exp, 2);
3750 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3751 }
3752 }
3753
3754 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3755 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3756 try to get the result in TARGET, if convenient (and in mode MODE if that's
3757 convenient). */
3758
3759 static rtx
3760 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3761 {
3762 if (!validate_arglist (exp,
3763 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3764 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3765 return NULL_RTX;
3766 else
3767 {
3768 tree dest = CALL_EXPR_ARG (exp, 0);
3769 tree val = CALL_EXPR_ARG (exp, 2);
3770 tree len = CALL_EXPR_ARG (exp, 3);
3771 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3772
3773 /* Return src bounds with the result. */
3774 if (res)
3775 {
3776 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3777 expand_normal (CALL_EXPR_ARG (exp, 1)));
3778 res = chkp_join_splitted_slot (res, bnd);
3779 }
3780 return res;
3781 }
3782 }
3783
3784 /* Helper function to do the actual work for expand_builtin_memset. The
3785 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3786 so that this can also be called without constructing an actual CALL_EXPR.
3787 The other arguments and return value are the same as for
3788 expand_builtin_memset. */
3789
3790 static rtx
3791 expand_builtin_memset_args (tree dest, tree val, tree len,
3792 rtx target, machine_mode mode, tree orig_exp)
3793 {
3794 tree fndecl, fn;
3795 enum built_in_function fcode;
3796 machine_mode val_mode;
3797 char c;
3798 unsigned int dest_align;
3799 rtx dest_mem, dest_addr, len_rtx;
3800 HOST_WIDE_INT expected_size = -1;
3801 unsigned int expected_align = 0;
3802 unsigned HOST_WIDE_INT min_size;
3803 unsigned HOST_WIDE_INT max_size;
3804 unsigned HOST_WIDE_INT probable_max_size;
3805
3806 dest_align = get_pointer_alignment (dest);
3807
3808 /* If DEST is not a pointer type, don't do this operation in-line. */
3809 if (dest_align == 0)
3810 return NULL_RTX;
3811
3812 if (currently_expanding_gimple_stmt)
3813 stringop_block_profile (currently_expanding_gimple_stmt,
3814 &expected_align, &expected_size);
3815
3816 if (expected_align < dest_align)
3817 expected_align = dest_align;
3818
3819 /* If the LEN parameter is zero, return DEST. */
3820 if (integer_zerop (len))
3821 {
3822 /* Evaluate and ignore VAL in case it has side-effects. */
3823 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3824 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3825 }
3826
3827 /* Stabilize the arguments in case we fail. */
3828 dest = builtin_save_expr (dest);
3829 val = builtin_save_expr (val);
3830 len = builtin_save_expr (len);
3831
3832 len_rtx = expand_normal (len);
3833 determine_block_size (len, len_rtx, &min_size, &max_size,
3834 &probable_max_size);
3835 dest_mem = get_memory_rtx (dest, len);
3836 val_mode = TYPE_MODE (unsigned_char_type_node);
3837
3838 if (TREE_CODE (val) != INTEGER_CST)
3839 {
3840 rtx val_rtx;
3841
3842 val_rtx = expand_normal (val);
3843 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3844
3845 /* Assume that we can memset by pieces if we can store
3846 * the coefficients by pieces (in the required modes).
3847 * We can't pass builtin_memset_gen_str as that emits RTL. */
3848 c = 1;
3849 if (tree_fits_uhwi_p (len)
3850 && can_store_by_pieces (tree_to_uhwi (len),
3851 builtin_memset_read_str, &c, dest_align,
3852 true))
3853 {
3854 val_rtx = force_reg (val_mode, val_rtx);
3855 store_by_pieces (dest_mem, tree_to_uhwi (len),
3856 builtin_memset_gen_str, val_rtx, dest_align,
3857 true, 0);
3858 }
3859 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3860 dest_align, expected_align,
3861 expected_size, min_size, max_size,
3862 probable_max_size))
3863 goto do_libcall;
3864
3865 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3866 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3867 return dest_mem;
3868 }
3869
3870 if (target_char_cast (val, &c))
3871 goto do_libcall;
3872
3873 if (c)
3874 {
3875 if (tree_fits_uhwi_p (len)
3876 && can_store_by_pieces (tree_to_uhwi (len),
3877 builtin_memset_read_str, &c, dest_align,
3878 true))
3879 store_by_pieces (dest_mem, tree_to_uhwi (len),
3880 builtin_memset_read_str, &c, dest_align, true, 0);
3881 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3882 gen_int_mode (c, val_mode),
3883 dest_align, expected_align,
3884 expected_size, min_size, max_size,
3885 probable_max_size))
3886 goto do_libcall;
3887
3888 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3889 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3890 return dest_mem;
3891 }
3892
3893 set_mem_align (dest_mem, dest_align);
3894 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3895 CALL_EXPR_TAILCALL (orig_exp)
3896 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3897 expected_align, expected_size,
3898 min_size, max_size,
3899 probable_max_size);
3900
3901 if (dest_addr == 0)
3902 {
3903 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3904 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3905 }
3906
3907 return dest_addr;
3908
3909 do_libcall:
3910 fndecl = get_callee_fndecl (orig_exp);
3911 fcode = DECL_FUNCTION_CODE (fndecl);
3912 if (fcode == BUILT_IN_MEMSET
3913 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3914 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3915 dest, val, len);
3916 else if (fcode == BUILT_IN_BZERO)
3917 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3918 dest, len);
3919 else
3920 gcc_unreachable ();
3921 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3922 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3923 return expand_call (fn, target, target == const0_rtx);
3924 }
3925
3926 /* Expand expression EXP, which is a call to the bzero builtin. Return
3927 NULL_RTX if we failed the caller should emit a normal call. */
3928
3929 static rtx
3930 expand_builtin_bzero (tree exp)
3931 {
3932 tree dest, size;
3933 location_t loc = EXPR_LOCATION (exp);
3934
3935 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3936 return NULL_RTX;
3937
3938 dest = CALL_EXPR_ARG (exp, 0);
3939 size = CALL_EXPR_ARG (exp, 1);
3940
3941 /* New argument list transforming bzero(ptr x, int y) to
3942 memset(ptr x, int 0, size_t y). This is done this way
3943 so that if it isn't expanded inline, we fallback to
3944 calling bzero instead of memset. */
3945
3946 return expand_builtin_memset_args (dest, integer_zero_node,
3947 fold_convert_loc (loc,
3948 size_type_node, size),
3949 const0_rtx, VOIDmode, exp);
3950 }
3951
3952 /* Expand expression EXP, which is a call to the memcmp built-in function.
3953 Return NULL_RTX if we failed and the caller should emit a normal call,
3954 otherwise try to get the result in TARGET, if convenient (and in mode
3955 MODE, if that's convenient). */
3956
3957 static rtx
3958 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3959 ATTRIBUTE_UNUSED machine_mode mode)
3960 {
3961 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3962
3963 if (!validate_arglist (exp,
3964 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3965 return NULL_RTX;
3966
3967 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3968 implementing memcmp because it will stop if it encounters two
3969 zero bytes. */
3970 #if defined HAVE_cmpmemsi
3971 {
3972 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3973 rtx result;
3974 rtx insn;
3975 tree arg1 = CALL_EXPR_ARG (exp, 0);
3976 tree arg2 = CALL_EXPR_ARG (exp, 1);
3977 tree len = CALL_EXPR_ARG (exp, 2);
3978
3979 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3980 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3981 machine_mode insn_mode;
3982
3983 if (HAVE_cmpmemsi)
3984 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3985 else
3986 return NULL_RTX;
3987
3988 /* If we don't have POINTER_TYPE, call the function. */
3989 if (arg1_align == 0 || arg2_align == 0)
3990 return NULL_RTX;
3991
3992 /* Make a place to write the result of the instruction. */
3993 result = target;
3994 if (! (result != 0
3995 && REG_P (result) && GET_MODE (result) == insn_mode
3996 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3997 result = gen_reg_rtx (insn_mode);
3998
3999 arg1_rtx = get_memory_rtx (arg1, len);
4000 arg2_rtx = get_memory_rtx (arg2, len);
4001 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4002
4003 /* Set MEM_SIZE as appropriate. */
4004 if (CONST_INT_P (arg3_rtx))
4005 {
4006 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
4007 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
4008 }
4009
4010 if (HAVE_cmpmemsi)
4011 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4012 GEN_INT (MIN (arg1_align, arg2_align)));
4013 else
4014 gcc_unreachable ();
4015
4016 if (insn)
4017 emit_insn (insn);
4018 else
4019 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4020 TYPE_MODE (integer_type_node), 3,
4021 XEXP (arg1_rtx, 0), Pmode,
4022 XEXP (arg2_rtx, 0), Pmode,
4023 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4024 TYPE_UNSIGNED (sizetype)),
4025 TYPE_MODE (sizetype));
4026
4027 /* Return the value in the proper mode for this function. */
4028 mode = TYPE_MODE (TREE_TYPE (exp));
4029 if (GET_MODE (result) == mode)
4030 return result;
4031 else if (target != 0)
4032 {
4033 convert_move (target, result, 0);
4034 return target;
4035 }
4036 else
4037 return convert_to_mode (mode, result, 0);
4038 }
4039 #endif /* HAVE_cmpmemsi. */
4040
4041 return NULL_RTX;
4042 }
4043
4044 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4045 if we failed the caller should emit a normal call, otherwise try to get
4046 the result in TARGET, if convenient. */
4047
4048 static rtx
4049 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4050 {
4051 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4052 return NULL_RTX;
4053
4054 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4055 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
4056 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
4057 {
4058 rtx arg1_rtx, arg2_rtx;
4059 rtx result, insn = NULL_RTX;
4060 tree fndecl, fn;
4061 tree arg1 = CALL_EXPR_ARG (exp, 0);
4062 tree arg2 = CALL_EXPR_ARG (exp, 1);
4063
4064 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4065 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4066
4067 /* If we don't have POINTER_TYPE, call the function. */
4068 if (arg1_align == 0 || arg2_align == 0)
4069 return NULL_RTX;
4070
4071 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4072 arg1 = builtin_save_expr (arg1);
4073 arg2 = builtin_save_expr (arg2);
4074
4075 arg1_rtx = get_memory_rtx (arg1, NULL);
4076 arg2_rtx = get_memory_rtx (arg2, NULL);
4077
4078 #ifdef HAVE_cmpstrsi
4079 /* Try to call cmpstrsi. */
4080 if (HAVE_cmpstrsi)
4081 {
4082 machine_mode insn_mode
4083 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4084
4085 /* Make a place to write the result of the instruction. */
4086 result = target;
4087 if (! (result != 0
4088 && REG_P (result) && GET_MODE (result) == insn_mode
4089 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4090 result = gen_reg_rtx (insn_mode);
4091
4092 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4093 GEN_INT (MIN (arg1_align, arg2_align)));
4094 }
4095 #endif
4096 #ifdef HAVE_cmpstrnsi
4097 /* Try to determine at least one length and call cmpstrnsi. */
4098 if (!insn && HAVE_cmpstrnsi)
4099 {
4100 tree len;
4101 rtx arg3_rtx;
4102
4103 machine_mode insn_mode
4104 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4105 tree len1 = c_strlen (arg1, 1);
4106 tree len2 = c_strlen (arg2, 1);
4107
4108 if (len1)
4109 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4110 if (len2)
4111 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4112
4113 /* If we don't have a constant length for the first, use the length
4114 of the second, if we know it. We don't require a constant for
4115 this case; some cost analysis could be done if both are available
4116 but neither is constant. For now, assume they're equally cheap,
4117 unless one has side effects. If both strings have constant lengths,
4118 use the smaller. */
4119
4120 if (!len1)
4121 len = len2;
4122 else if (!len2)
4123 len = len1;
4124 else if (TREE_SIDE_EFFECTS (len1))
4125 len = len2;
4126 else if (TREE_SIDE_EFFECTS (len2))
4127 len = len1;
4128 else if (TREE_CODE (len1) != INTEGER_CST)
4129 len = len2;
4130 else if (TREE_CODE (len2) != INTEGER_CST)
4131 len = len1;
4132 else if (tree_int_cst_lt (len1, len2))
4133 len = len1;
4134 else
4135 len = len2;
4136
4137 /* If both arguments have side effects, we cannot optimize. */
4138 if (!len || TREE_SIDE_EFFECTS (len))
4139 goto do_libcall;
4140
4141 arg3_rtx = expand_normal (len);
4142
4143 /* Make a place to write the result of the instruction. */
4144 result = target;
4145 if (! (result != 0
4146 && REG_P (result) && GET_MODE (result) == insn_mode
4147 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4148 result = gen_reg_rtx (insn_mode);
4149
4150 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4151 GEN_INT (MIN (arg1_align, arg2_align)));
4152 }
4153 #endif
4154
4155 if (insn)
4156 {
4157 machine_mode mode;
4158 emit_insn (insn);
4159
4160 /* Return the value in the proper mode for this function. */
4161 mode = TYPE_MODE (TREE_TYPE (exp));
4162 if (GET_MODE (result) == mode)
4163 return result;
4164 if (target == 0)
4165 return convert_to_mode (mode, result, 0);
4166 convert_move (target, result, 0);
4167 return target;
4168 }
4169
4170 /* Expand the library call ourselves using a stabilized argument
4171 list to avoid re-evaluating the function's arguments twice. */
4172 #ifdef HAVE_cmpstrnsi
4173 do_libcall:
4174 #endif
4175 fndecl = get_callee_fndecl (exp);
4176 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4177 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4178 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4179 return expand_call (fn, target, target == const0_rtx);
4180 }
4181 #endif
4182 return NULL_RTX;
4183 }
4184
4185 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4186 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4187 the result in TARGET, if convenient. */
4188
4189 static rtx
4190 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4191 ATTRIBUTE_UNUSED machine_mode mode)
4192 {
4193 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4194
4195 if (!validate_arglist (exp,
4196 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4197 return NULL_RTX;
4198
4199 /* If c_strlen can determine an expression for one of the string
4200 lengths, and it doesn't have side effects, then emit cmpstrnsi
4201 using length MIN(strlen(string)+1, arg3). */
4202 #ifdef HAVE_cmpstrnsi
4203 if (HAVE_cmpstrnsi)
4204 {
4205 tree len, len1, len2;
4206 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4207 rtx result, insn;
4208 tree fndecl, fn;
4209 tree arg1 = CALL_EXPR_ARG (exp, 0);
4210 tree arg2 = CALL_EXPR_ARG (exp, 1);
4211 tree arg3 = CALL_EXPR_ARG (exp, 2);
4212
4213 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4214 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4215 machine_mode insn_mode
4216 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4217
4218 len1 = c_strlen (arg1, 1);
4219 len2 = c_strlen (arg2, 1);
4220
4221 if (len1)
4222 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4223 if (len2)
4224 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4225
4226 /* If we don't have a constant length for the first, use the length
4227 of the second, if we know it. We don't require a constant for
4228 this case; some cost analysis could be done if both are available
4229 but neither is constant. For now, assume they're equally cheap,
4230 unless one has side effects. If both strings have constant lengths,
4231 use the smaller. */
4232
4233 if (!len1)
4234 len = len2;
4235 else if (!len2)
4236 len = len1;
4237 else if (TREE_SIDE_EFFECTS (len1))
4238 len = len2;
4239 else if (TREE_SIDE_EFFECTS (len2))
4240 len = len1;
4241 else if (TREE_CODE (len1) != INTEGER_CST)
4242 len = len2;
4243 else if (TREE_CODE (len2) != INTEGER_CST)
4244 len = len1;
4245 else if (tree_int_cst_lt (len1, len2))
4246 len = len1;
4247 else
4248 len = len2;
4249
4250 /* If both arguments have side effects, we cannot optimize. */
4251 if (!len || TREE_SIDE_EFFECTS (len))
4252 return NULL_RTX;
4253
4254 /* The actual new length parameter is MIN(len,arg3). */
4255 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4256 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4257
4258 /* If we don't have POINTER_TYPE, call the function. */
4259 if (arg1_align == 0 || arg2_align == 0)
4260 return NULL_RTX;
4261
4262 /* Make a place to write the result of the instruction. */
4263 result = target;
4264 if (! (result != 0
4265 && REG_P (result) && GET_MODE (result) == insn_mode
4266 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4267 result = gen_reg_rtx (insn_mode);
4268
4269 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4270 arg1 = builtin_save_expr (arg1);
4271 arg2 = builtin_save_expr (arg2);
4272 len = builtin_save_expr (len);
4273
4274 arg1_rtx = get_memory_rtx (arg1, len);
4275 arg2_rtx = get_memory_rtx (arg2, len);
4276 arg3_rtx = expand_normal (len);
4277 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4278 GEN_INT (MIN (arg1_align, arg2_align)));
4279 if (insn)
4280 {
4281 emit_insn (insn);
4282
4283 /* Return the value in the proper mode for this function. */
4284 mode = TYPE_MODE (TREE_TYPE (exp));
4285 if (GET_MODE (result) == mode)
4286 return result;
4287 if (target == 0)
4288 return convert_to_mode (mode, result, 0);
4289 convert_move (target, result, 0);
4290 return target;
4291 }
4292
4293 /* Expand the library call ourselves using a stabilized argument
4294 list to avoid re-evaluating the function's arguments twice. */
4295 fndecl = get_callee_fndecl (exp);
4296 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4297 arg1, arg2, len);
4298 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4299 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4300 return expand_call (fn, target, target == const0_rtx);
4301 }
4302 #endif
4303 return NULL_RTX;
4304 }
4305
4306 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4307 if that's convenient. */
4308
4309 rtx
4310 expand_builtin_saveregs (void)
4311 {
4312 rtx val;
4313 rtx_insn *seq;
4314
4315 /* Don't do __builtin_saveregs more than once in a function.
4316 Save the result of the first call and reuse it. */
4317 if (saveregs_value != 0)
4318 return saveregs_value;
4319
4320 /* When this function is called, it means that registers must be
4321 saved on entry to this function. So we migrate the call to the
4322 first insn of this function. */
4323
4324 start_sequence ();
4325
4326 /* Do whatever the machine needs done in this case. */
4327 val = targetm.calls.expand_builtin_saveregs ();
4328
4329 seq = get_insns ();
4330 end_sequence ();
4331
4332 saveregs_value = val;
4333
4334 /* Put the insns after the NOTE that starts the function. If this
4335 is inside a start_sequence, make the outer-level insn chain current, so
4336 the code is placed at the start of the function. */
4337 push_topmost_sequence ();
4338 emit_insn_after (seq, entry_of_function ());
4339 pop_topmost_sequence ();
4340
4341 return val;
4342 }
4343
4344 /* Expand a call to __builtin_next_arg. */
4345
4346 static rtx
4347 expand_builtin_next_arg (void)
4348 {
4349 /* Checking arguments is already done in fold_builtin_next_arg
4350 that must be called before this function. */
4351 return expand_binop (ptr_mode, add_optab,
4352 crtl->args.internal_arg_pointer,
4353 crtl->args.arg_offset_rtx,
4354 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4355 }
4356
4357 /* Make it easier for the backends by protecting the valist argument
4358 from multiple evaluations. */
4359
4360 static tree
4361 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4362 {
4363 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4364
4365 /* The current way of determining the type of valist is completely
4366 bogus. We should have the information on the va builtin instead. */
4367 if (!vatype)
4368 vatype = targetm.fn_abi_va_list (cfun->decl);
4369
4370 if (TREE_CODE (vatype) == ARRAY_TYPE)
4371 {
4372 if (TREE_SIDE_EFFECTS (valist))
4373 valist = save_expr (valist);
4374
4375 /* For this case, the backends will be expecting a pointer to
4376 vatype, but it's possible we've actually been given an array
4377 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4378 So fix it. */
4379 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4380 {
4381 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4382 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4383 }
4384 }
4385 else
4386 {
4387 tree pt = build_pointer_type (vatype);
4388
4389 if (! needs_lvalue)
4390 {
4391 if (! TREE_SIDE_EFFECTS (valist))
4392 return valist;
4393
4394 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4395 TREE_SIDE_EFFECTS (valist) = 1;
4396 }
4397
4398 if (TREE_SIDE_EFFECTS (valist))
4399 valist = save_expr (valist);
4400 valist = fold_build2_loc (loc, MEM_REF,
4401 vatype, valist, build_int_cst (pt, 0));
4402 }
4403
4404 return valist;
4405 }
4406
4407 /* The "standard" definition of va_list is void*. */
4408
4409 tree
4410 std_build_builtin_va_list (void)
4411 {
4412 return ptr_type_node;
4413 }
4414
4415 /* The "standard" abi va_list is va_list_type_node. */
4416
4417 tree
4418 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4419 {
4420 return va_list_type_node;
4421 }
4422
4423 /* The "standard" type of va_list is va_list_type_node. */
4424
4425 tree
4426 std_canonical_va_list_type (tree type)
4427 {
4428 tree wtype, htype;
4429
4430 if (INDIRECT_REF_P (type))
4431 type = TREE_TYPE (type);
4432 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4433 type = TREE_TYPE (type);
4434 wtype = va_list_type_node;
4435 htype = type;
4436 /* Treat structure va_list types. */
4437 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4438 htype = TREE_TYPE (htype);
4439 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4440 {
4441 /* If va_list is an array type, the argument may have decayed
4442 to a pointer type, e.g. by being passed to another function.
4443 In that case, unwrap both types so that we can compare the
4444 underlying records. */
4445 if (TREE_CODE (htype) == ARRAY_TYPE
4446 || POINTER_TYPE_P (htype))
4447 {
4448 wtype = TREE_TYPE (wtype);
4449 htype = TREE_TYPE (htype);
4450 }
4451 }
4452 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4453 return va_list_type_node;
4454
4455 return NULL_TREE;
4456 }
4457
4458 /* The "standard" implementation of va_start: just assign `nextarg' to
4459 the variable. */
4460
4461 void
4462 std_expand_builtin_va_start (tree valist, rtx nextarg)
4463 {
4464 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4465 convert_move (va_r, nextarg, 0);
4466
4467 /* We do not have any valid bounds for the pointer, so
4468 just store zero bounds for it. */
4469 if (chkp_function_instrumented_p (current_function_decl))
4470 chkp_expand_bounds_reset_for_mem (valist,
4471 make_tree (TREE_TYPE (valist),
4472 nextarg));
4473 }
4474
4475 /* Expand EXP, a call to __builtin_va_start. */
4476
4477 static rtx
4478 expand_builtin_va_start (tree exp)
4479 {
4480 rtx nextarg;
4481 tree valist;
4482 location_t loc = EXPR_LOCATION (exp);
4483
4484 if (call_expr_nargs (exp) < 2)
4485 {
4486 error_at (loc, "too few arguments to function %<va_start%>");
4487 return const0_rtx;
4488 }
4489
4490 if (fold_builtin_next_arg (exp, true))
4491 return const0_rtx;
4492
4493 nextarg = expand_builtin_next_arg ();
4494 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4495
4496 if (targetm.expand_builtin_va_start)
4497 targetm.expand_builtin_va_start (valist, nextarg);
4498 else
4499 std_expand_builtin_va_start (valist, nextarg);
4500
4501 return const0_rtx;
4502 }
4503
4504 /* Expand EXP, a call to __builtin_va_end. */
4505
4506 static rtx
4507 expand_builtin_va_end (tree exp)
4508 {
4509 tree valist = CALL_EXPR_ARG (exp, 0);
4510
4511 /* Evaluate for side effects, if needed. I hate macros that don't
4512 do that. */
4513 if (TREE_SIDE_EFFECTS (valist))
4514 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4515
4516 return const0_rtx;
4517 }
4518
4519 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4520 builtin rather than just as an assignment in stdarg.h because of the
4521 nastiness of array-type va_list types. */
4522
4523 static rtx
4524 expand_builtin_va_copy (tree exp)
4525 {
4526 tree dst, src, t;
4527 location_t loc = EXPR_LOCATION (exp);
4528
4529 dst = CALL_EXPR_ARG (exp, 0);
4530 src = CALL_EXPR_ARG (exp, 1);
4531
4532 dst = stabilize_va_list_loc (loc, dst, 1);
4533 src = stabilize_va_list_loc (loc, src, 0);
4534
4535 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4536
4537 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4538 {
4539 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4540 TREE_SIDE_EFFECTS (t) = 1;
4541 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4542 }
4543 else
4544 {
4545 rtx dstb, srcb, size;
4546
4547 /* Evaluate to pointers. */
4548 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4549 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4550 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4551 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4552
4553 dstb = convert_memory_address (Pmode, dstb);
4554 srcb = convert_memory_address (Pmode, srcb);
4555
4556 /* "Dereference" to BLKmode memories. */
4557 dstb = gen_rtx_MEM (BLKmode, dstb);
4558 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4559 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4560 srcb = gen_rtx_MEM (BLKmode, srcb);
4561 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4562 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4563
4564 /* Copy. */
4565 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4566 }
4567
4568 return const0_rtx;
4569 }
4570
4571 /* Expand a call to one of the builtin functions __builtin_frame_address or
4572 __builtin_return_address. */
4573
4574 static rtx
4575 expand_builtin_frame_address (tree fndecl, tree exp)
4576 {
4577 /* The argument must be a nonnegative integer constant.
4578 It counts the number of frames to scan up the stack.
4579 The value is the return address saved in that frame. */
4580 if (call_expr_nargs (exp) == 0)
4581 /* Warning about missing arg was already issued. */
4582 return const0_rtx;
4583 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4584 {
4585 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4586 error ("invalid argument to %<__builtin_frame_address%>");
4587 else
4588 error ("invalid argument to %<__builtin_return_address%>");
4589 return const0_rtx;
4590 }
4591 else
4592 {
4593 rtx tem
4594 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4595 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4596
4597 /* Some ports cannot access arbitrary stack frames. */
4598 if (tem == NULL)
4599 {
4600 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4601 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4602 else
4603 warning (0, "unsupported argument to %<__builtin_return_address%>");
4604 return const0_rtx;
4605 }
4606
4607 /* For __builtin_frame_address, return what we've got. */
4608 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4609 return tem;
4610
4611 if (!REG_P (tem)
4612 && ! CONSTANT_P (tem))
4613 tem = copy_addr_to_reg (tem);
4614 return tem;
4615 }
4616 }
4617
4618 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4619 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4620 is the same as for allocate_dynamic_stack_space. */
4621
4622 static rtx
4623 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4624 {
4625 rtx op0;
4626 rtx result;
4627 bool valid_arglist;
4628 unsigned int align;
4629 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4630 == BUILT_IN_ALLOCA_WITH_ALIGN);
4631
4632 valid_arglist
4633 = (alloca_with_align
4634 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4635 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4636
4637 if (!valid_arglist)
4638 return NULL_RTX;
4639
4640 /* Compute the argument. */
4641 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4642
4643 /* Compute the alignment. */
4644 align = (alloca_with_align
4645 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4646 : BIGGEST_ALIGNMENT);
4647
4648 /* Allocate the desired space. */
4649 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4650 result = convert_memory_address (ptr_mode, result);
4651
4652 return result;
4653 }
4654
4655 /* Expand a call to bswap builtin in EXP.
4656 Return NULL_RTX if a normal call should be emitted rather than expanding the
4657 function in-line. If convenient, the result should be placed in TARGET.
4658 SUBTARGET may be used as the target for computing one of EXP's operands. */
4659
4660 static rtx
4661 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4662 rtx subtarget)
4663 {
4664 tree arg;
4665 rtx op0;
4666
4667 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4668 return NULL_RTX;
4669
4670 arg = CALL_EXPR_ARG (exp, 0);
4671 op0 = expand_expr (arg,
4672 subtarget && GET_MODE (subtarget) == target_mode
4673 ? subtarget : NULL_RTX,
4674 target_mode, EXPAND_NORMAL);
4675 if (GET_MODE (op0) != target_mode)
4676 op0 = convert_to_mode (target_mode, op0, 1);
4677
4678 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4679
4680 gcc_assert (target);
4681
4682 return convert_to_mode (target_mode, target, 1);
4683 }
4684
4685 /* Expand a call to a unary builtin in EXP.
4686 Return NULL_RTX if a normal call should be emitted rather than expanding the
4687 function in-line. If convenient, the result should be placed in TARGET.
4688 SUBTARGET may be used as the target for computing one of EXP's operands. */
4689
4690 static rtx
4691 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4692 rtx subtarget, optab op_optab)
4693 {
4694 rtx op0;
4695
4696 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4697 return NULL_RTX;
4698
4699 /* Compute the argument. */
4700 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4701 (subtarget
4702 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4703 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4704 VOIDmode, EXPAND_NORMAL);
4705 /* Compute op, into TARGET if possible.
4706 Set TARGET to wherever the result comes back. */
4707 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4708 op_optab, op0, target, op_optab != clrsb_optab);
4709 gcc_assert (target);
4710
4711 return convert_to_mode (target_mode, target, 0);
4712 }
4713
4714 /* Expand a call to __builtin_expect. We just return our argument
4715 as the builtin_expect semantic should've been already executed by
4716 tree branch prediction pass. */
4717
4718 static rtx
4719 expand_builtin_expect (tree exp, rtx target)
4720 {
4721 tree arg;
4722
4723 if (call_expr_nargs (exp) < 2)
4724 return const0_rtx;
4725 arg = CALL_EXPR_ARG (exp, 0);
4726
4727 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4728 /* When guessing was done, the hints should be already stripped away. */
4729 gcc_assert (!flag_guess_branch_prob
4730 || optimize == 0 || seen_error ());
4731 return target;
4732 }
4733
4734 /* Expand a call to __builtin_assume_aligned. We just return our first
4735 argument as the builtin_assume_aligned semantic should've been already
4736 executed by CCP. */
4737
4738 static rtx
4739 expand_builtin_assume_aligned (tree exp, rtx target)
4740 {
4741 if (call_expr_nargs (exp) < 2)
4742 return const0_rtx;
4743 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4744 EXPAND_NORMAL);
4745 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4746 && (call_expr_nargs (exp) < 3
4747 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4748 return target;
4749 }
4750
4751 void
4752 expand_builtin_trap (void)
4753 {
4754 #ifdef HAVE_trap
4755 if (HAVE_trap)
4756 {
4757 rtx_insn *insn = emit_insn (gen_trap ());
4758 /* For trap insns when not accumulating outgoing args force
4759 REG_ARGS_SIZE note to prevent crossjumping of calls with
4760 different args sizes. */
4761 if (!ACCUMULATE_OUTGOING_ARGS)
4762 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4763 }
4764 else
4765 #endif
4766 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4767 emit_barrier ();
4768 }
4769
4770 /* Expand a call to __builtin_unreachable. We do nothing except emit
4771 a barrier saying that control flow will not pass here.
4772
4773 It is the responsibility of the program being compiled to ensure
4774 that control flow does never reach __builtin_unreachable. */
4775 static void
4776 expand_builtin_unreachable (void)
4777 {
4778 emit_barrier ();
4779 }
4780
4781 /* Expand EXP, a call to fabs, fabsf or fabsl.
4782 Return NULL_RTX if a normal call should be emitted rather than expanding
4783 the function inline. If convenient, the result should be placed
4784 in TARGET. SUBTARGET may be used as the target for computing
4785 the operand. */
4786
4787 static rtx
4788 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4789 {
4790 machine_mode mode;
4791 tree arg;
4792 rtx op0;
4793
4794 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4795 return NULL_RTX;
4796
4797 arg = CALL_EXPR_ARG (exp, 0);
4798 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4799 mode = TYPE_MODE (TREE_TYPE (arg));
4800 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4801 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4802 }
4803
4804 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4805 Return NULL is a normal call should be emitted rather than expanding the
4806 function inline. If convenient, the result should be placed in TARGET.
4807 SUBTARGET may be used as the target for computing the operand. */
4808
4809 static rtx
4810 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4811 {
4812 rtx op0, op1;
4813 tree arg;
4814
4815 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4816 return NULL_RTX;
4817
4818 arg = CALL_EXPR_ARG (exp, 0);
4819 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4820
4821 arg = CALL_EXPR_ARG (exp, 1);
4822 op1 = expand_normal (arg);
4823
4824 return expand_copysign (op0, op1, target);
4825 }
4826
4827 /* Expand a call to __builtin___clear_cache. */
4828
4829 static rtx
4830 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4831 {
4832 #ifndef HAVE_clear_cache
4833 #ifdef CLEAR_INSN_CACHE
4834 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4835 does something. Just do the default expansion to a call to
4836 __clear_cache(). */
4837 return NULL_RTX;
4838 #else
4839 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4840 does nothing. There is no need to call it. Do nothing. */
4841 return const0_rtx;
4842 #endif /* CLEAR_INSN_CACHE */
4843 #else
4844 /* We have a "clear_cache" insn, and it will handle everything. */
4845 tree begin, end;
4846 rtx begin_rtx, end_rtx;
4847
4848 /* We must not expand to a library call. If we did, any
4849 fallback library function in libgcc that might contain a call to
4850 __builtin___clear_cache() would recurse infinitely. */
4851 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4852 {
4853 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4854 return const0_rtx;
4855 }
4856
4857 if (HAVE_clear_cache)
4858 {
4859 struct expand_operand ops[2];
4860
4861 begin = CALL_EXPR_ARG (exp, 0);
4862 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4863
4864 end = CALL_EXPR_ARG (exp, 1);
4865 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4866
4867 create_address_operand (&ops[0], begin_rtx);
4868 create_address_operand (&ops[1], end_rtx);
4869 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4870 return const0_rtx;
4871 }
4872 return const0_rtx;
4873 #endif /* HAVE_clear_cache */
4874 }
4875
4876 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4877
4878 static rtx
4879 round_trampoline_addr (rtx tramp)
4880 {
4881 rtx temp, addend, mask;
4882
4883 /* If we don't need too much alignment, we'll have been guaranteed
4884 proper alignment by get_trampoline_type. */
4885 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4886 return tramp;
4887
4888 /* Round address up to desired boundary. */
4889 temp = gen_reg_rtx (Pmode);
4890 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4891 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4892
4893 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4894 temp, 0, OPTAB_LIB_WIDEN);
4895 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4896 temp, 0, OPTAB_LIB_WIDEN);
4897
4898 return tramp;
4899 }
4900
4901 static rtx
4902 expand_builtin_init_trampoline (tree exp, bool onstack)
4903 {
4904 tree t_tramp, t_func, t_chain;
4905 rtx m_tramp, r_tramp, r_chain, tmp;
4906
4907 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4908 POINTER_TYPE, VOID_TYPE))
4909 return NULL_RTX;
4910
4911 t_tramp = CALL_EXPR_ARG (exp, 0);
4912 t_func = CALL_EXPR_ARG (exp, 1);
4913 t_chain = CALL_EXPR_ARG (exp, 2);
4914
4915 r_tramp = expand_normal (t_tramp);
4916 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4917 MEM_NOTRAP_P (m_tramp) = 1;
4918
4919 /* If ONSTACK, the TRAMP argument should be the address of a field
4920 within the local function's FRAME decl. Either way, let's see if
4921 we can fill in the MEM_ATTRs for this memory. */
4922 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4923 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4924
4925 /* Creator of a heap trampoline is responsible for making sure the
4926 address is aligned to at least STACK_BOUNDARY. Normally malloc
4927 will ensure this anyhow. */
4928 tmp = round_trampoline_addr (r_tramp);
4929 if (tmp != r_tramp)
4930 {
4931 m_tramp = change_address (m_tramp, BLKmode, tmp);
4932 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4933 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4934 }
4935
4936 /* The FUNC argument should be the address of the nested function.
4937 Extract the actual function decl to pass to the hook. */
4938 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4939 t_func = TREE_OPERAND (t_func, 0);
4940 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4941
4942 r_chain = expand_normal (t_chain);
4943
4944 /* Generate insns to initialize the trampoline. */
4945 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4946
4947 if (onstack)
4948 {
4949 trampolines_created = 1;
4950
4951 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4952 "trampoline generated for nested function %qD", t_func);
4953 }
4954
4955 return const0_rtx;
4956 }
4957
4958 static rtx
4959 expand_builtin_adjust_trampoline (tree exp)
4960 {
4961 rtx tramp;
4962
4963 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4964 return NULL_RTX;
4965
4966 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4967 tramp = round_trampoline_addr (tramp);
4968 if (targetm.calls.trampoline_adjust_address)
4969 tramp = targetm.calls.trampoline_adjust_address (tramp);
4970
4971 return tramp;
4972 }
4973
4974 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4975 function. The function first checks whether the back end provides
4976 an insn to implement signbit for the respective mode. If not, it
4977 checks whether the floating point format of the value is such that
4978 the sign bit can be extracted. If that is not the case, the
4979 function returns NULL_RTX to indicate that a normal call should be
4980 emitted rather than expanding the function in-line. EXP is the
4981 expression that is a call to the builtin function; if convenient,
4982 the result should be placed in TARGET. */
4983 static rtx
4984 expand_builtin_signbit (tree exp, rtx target)
4985 {
4986 const struct real_format *fmt;
4987 machine_mode fmode, imode, rmode;
4988 tree arg;
4989 int word, bitpos;
4990 enum insn_code icode;
4991 rtx temp;
4992 location_t loc = EXPR_LOCATION (exp);
4993
4994 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4995 return NULL_RTX;
4996
4997 arg = CALL_EXPR_ARG (exp, 0);
4998 fmode = TYPE_MODE (TREE_TYPE (arg));
4999 rmode = TYPE_MODE (TREE_TYPE (exp));
5000 fmt = REAL_MODE_FORMAT (fmode);
5001
5002 arg = builtin_save_expr (arg);
5003
5004 /* Expand the argument yielding a RTX expression. */
5005 temp = expand_normal (arg);
5006
5007 /* Check if the back end provides an insn that handles signbit for the
5008 argument's mode. */
5009 icode = optab_handler (signbit_optab, fmode);
5010 if (icode != CODE_FOR_nothing)
5011 {
5012 rtx_insn *last = get_last_insn ();
5013 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5014 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5015 return target;
5016 delete_insns_since (last);
5017 }
5018
5019 /* For floating point formats without a sign bit, implement signbit
5020 as "ARG < 0.0". */
5021 bitpos = fmt->signbit_ro;
5022 if (bitpos < 0)
5023 {
5024 /* But we can't do this if the format supports signed zero. */
5025 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5026 return NULL_RTX;
5027
5028 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5029 build_real (TREE_TYPE (arg), dconst0));
5030 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5031 }
5032
5033 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5034 {
5035 imode = int_mode_for_mode (fmode);
5036 if (imode == BLKmode)
5037 return NULL_RTX;
5038 temp = gen_lowpart (imode, temp);
5039 }
5040 else
5041 {
5042 imode = word_mode;
5043 /* Handle targets with different FP word orders. */
5044 if (FLOAT_WORDS_BIG_ENDIAN)
5045 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5046 else
5047 word = bitpos / BITS_PER_WORD;
5048 temp = operand_subword_force (temp, word, fmode);
5049 bitpos = bitpos % BITS_PER_WORD;
5050 }
5051
5052 /* Force the intermediate word_mode (or narrower) result into a
5053 register. This avoids attempting to create paradoxical SUBREGs
5054 of floating point modes below. */
5055 temp = force_reg (imode, temp);
5056
5057 /* If the bitpos is within the "result mode" lowpart, the operation
5058 can be implement with a single bitwise AND. Otherwise, we need
5059 a right shift and an AND. */
5060
5061 if (bitpos < GET_MODE_BITSIZE (rmode))
5062 {
5063 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5064
5065 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5066 temp = gen_lowpart (rmode, temp);
5067 temp = expand_binop (rmode, and_optab, temp,
5068 immed_wide_int_const (mask, rmode),
5069 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5070 }
5071 else
5072 {
5073 /* Perform a logical right shift to place the signbit in the least
5074 significant bit, then truncate the result to the desired mode
5075 and mask just this bit. */
5076 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5077 temp = gen_lowpart (rmode, temp);
5078 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5079 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5080 }
5081
5082 return temp;
5083 }
5084
5085 /* Expand fork or exec calls. TARGET is the desired target of the
5086 call. EXP is the call. FN is the
5087 identificator of the actual function. IGNORE is nonzero if the
5088 value is to be ignored. */
5089
5090 static rtx
5091 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5092 {
5093 tree id, decl;
5094 tree call;
5095
5096 /* If we are not profiling, just call the function. */
5097 if (!profile_arc_flag)
5098 return NULL_RTX;
5099
5100 /* Otherwise call the wrapper. This should be equivalent for the rest of
5101 compiler, so the code does not diverge, and the wrapper may run the
5102 code necessary for keeping the profiling sane. */
5103
5104 switch (DECL_FUNCTION_CODE (fn))
5105 {
5106 case BUILT_IN_FORK:
5107 id = get_identifier ("__gcov_fork");
5108 break;
5109
5110 case BUILT_IN_EXECL:
5111 id = get_identifier ("__gcov_execl");
5112 break;
5113
5114 case BUILT_IN_EXECV:
5115 id = get_identifier ("__gcov_execv");
5116 break;
5117
5118 case BUILT_IN_EXECLP:
5119 id = get_identifier ("__gcov_execlp");
5120 break;
5121
5122 case BUILT_IN_EXECLE:
5123 id = get_identifier ("__gcov_execle");
5124 break;
5125
5126 case BUILT_IN_EXECVP:
5127 id = get_identifier ("__gcov_execvp");
5128 break;
5129
5130 case BUILT_IN_EXECVE:
5131 id = get_identifier ("__gcov_execve");
5132 break;
5133
5134 default:
5135 gcc_unreachable ();
5136 }
5137
5138 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5139 FUNCTION_DECL, id, TREE_TYPE (fn));
5140 DECL_EXTERNAL (decl) = 1;
5141 TREE_PUBLIC (decl) = 1;
5142 DECL_ARTIFICIAL (decl) = 1;
5143 TREE_NOTHROW (decl) = 1;
5144 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5145 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5146 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5147 return expand_call (call, target, ignore);
5148 }
5149
5150
5151 \f
5152 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5153 the pointer in these functions is void*, the tree optimizers may remove
5154 casts. The mode computed in expand_builtin isn't reliable either, due
5155 to __sync_bool_compare_and_swap.
5156
5157 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5158 group of builtins. This gives us log2 of the mode size. */
5159
5160 static inline machine_mode
5161 get_builtin_sync_mode (int fcode_diff)
5162 {
5163 /* The size is not negotiable, so ask not to get BLKmode in return
5164 if the target indicates that a smaller size would be better. */
5165 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5166 }
5167
5168 /* Expand the memory expression LOC and return the appropriate memory operand
5169 for the builtin_sync operations. */
5170
5171 static rtx
5172 get_builtin_sync_mem (tree loc, machine_mode mode)
5173 {
5174 rtx addr, mem;
5175
5176 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5177 addr = convert_memory_address (Pmode, addr);
5178
5179 /* Note that we explicitly do not want any alias information for this
5180 memory, so that we kill all other live memories. Otherwise we don't
5181 satisfy the full barrier semantics of the intrinsic. */
5182 mem = validize_mem (gen_rtx_MEM (mode, addr));
5183
5184 /* The alignment needs to be at least according to that of the mode. */
5185 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5186 get_pointer_alignment (loc)));
5187 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5188 MEM_VOLATILE_P (mem) = 1;
5189
5190 return mem;
5191 }
5192
5193 /* Make sure an argument is in the right mode.
5194 EXP is the tree argument.
5195 MODE is the mode it should be in. */
5196
5197 static rtx
5198 expand_expr_force_mode (tree exp, machine_mode mode)
5199 {
5200 rtx val;
5201 machine_mode old_mode;
5202
5203 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5204 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5205 of CONST_INTs, where we know the old_mode only from the call argument. */
5206
5207 old_mode = GET_MODE (val);
5208 if (old_mode == VOIDmode)
5209 old_mode = TYPE_MODE (TREE_TYPE (exp));
5210 val = convert_modes (mode, old_mode, val, 1);
5211 return val;
5212 }
5213
5214
5215 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5216 EXP is the CALL_EXPR. CODE is the rtx code
5217 that corresponds to the arithmetic or logical operation from the name;
5218 an exception here is that NOT actually means NAND. TARGET is an optional
5219 place for us to store the results; AFTER is true if this is the
5220 fetch_and_xxx form. */
5221
5222 static rtx
5223 expand_builtin_sync_operation (machine_mode mode, tree exp,
5224 enum rtx_code code, bool after,
5225 rtx target)
5226 {
5227 rtx val, mem;
5228 location_t loc = EXPR_LOCATION (exp);
5229
5230 if (code == NOT && warn_sync_nand)
5231 {
5232 tree fndecl = get_callee_fndecl (exp);
5233 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5234
5235 static bool warned_f_a_n, warned_n_a_f;
5236
5237 switch (fcode)
5238 {
5239 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5240 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5241 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5242 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5243 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5244 if (warned_f_a_n)
5245 break;
5246
5247 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5248 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5249 warned_f_a_n = true;
5250 break;
5251
5252 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5253 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5254 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5255 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5256 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5257 if (warned_n_a_f)
5258 break;
5259
5260 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5261 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5262 warned_n_a_f = true;
5263 break;
5264
5265 default:
5266 gcc_unreachable ();
5267 }
5268 }
5269
5270 /* Expand the operands. */
5271 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5272 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5273
5274 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5275 after);
5276 }
5277
5278 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5279 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5280 true if this is the boolean form. TARGET is a place for us to store the
5281 results; this is NOT optional if IS_BOOL is true. */
5282
5283 static rtx
5284 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5285 bool is_bool, rtx target)
5286 {
5287 rtx old_val, new_val, mem;
5288 rtx *pbool, *poval;
5289
5290 /* Expand the operands. */
5291 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5292 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5293 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5294
5295 pbool = poval = NULL;
5296 if (target != const0_rtx)
5297 {
5298 if (is_bool)
5299 pbool = &target;
5300 else
5301 poval = &target;
5302 }
5303 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5304 false, MEMMODEL_SEQ_CST,
5305 MEMMODEL_SEQ_CST))
5306 return NULL_RTX;
5307
5308 return target;
5309 }
5310
5311 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5312 general form is actually an atomic exchange, and some targets only
5313 support a reduced form with the second argument being a constant 1.
5314 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5315 the results. */
5316
5317 static rtx
5318 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5319 rtx target)
5320 {
5321 rtx val, mem;
5322
5323 /* Expand the operands. */
5324 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5325 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5326
5327 return expand_sync_lock_test_and_set (target, mem, val);
5328 }
5329
5330 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5331
5332 static void
5333 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5334 {
5335 rtx mem;
5336
5337 /* Expand the operands. */
5338 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5339
5340 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5341 }
5342
5343 /* Given an integer representing an ``enum memmodel'', verify its
5344 correctness and return the memory model enum. */
5345
5346 static enum memmodel
5347 get_memmodel (tree exp)
5348 {
5349 rtx op;
5350 unsigned HOST_WIDE_INT val;
5351
5352 /* If the parameter is not a constant, it's a run time value so we'll just
5353 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5354 if (TREE_CODE (exp) != INTEGER_CST)
5355 return MEMMODEL_SEQ_CST;
5356
5357 op = expand_normal (exp);
5358
5359 val = INTVAL (op);
5360 if (targetm.memmodel_check)
5361 val = targetm.memmodel_check (val);
5362 else if (val & ~MEMMODEL_MASK)
5363 {
5364 warning (OPT_Winvalid_memory_model,
5365 "Unknown architecture specifier in memory model to builtin.");
5366 return MEMMODEL_SEQ_CST;
5367 }
5368
5369 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5370 {
5371 warning (OPT_Winvalid_memory_model,
5372 "invalid memory model argument to builtin");
5373 return MEMMODEL_SEQ_CST;
5374 }
5375
5376 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5377 be conservative and promote consume to acquire. */
5378 if (val == MEMMODEL_CONSUME)
5379 val = MEMMODEL_ACQUIRE;
5380
5381 return (enum memmodel) val;
5382 }
5383
5384 /* Expand the __atomic_exchange intrinsic:
5385 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5386 EXP is the CALL_EXPR.
5387 TARGET is an optional place for us to store the results. */
5388
5389 static rtx
5390 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5391 {
5392 rtx val, mem;
5393 enum memmodel model;
5394
5395 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5396
5397 if (!flag_inline_atomics)
5398 return NULL_RTX;
5399
5400 /* Expand the operands. */
5401 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5402 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5403
5404 return expand_atomic_exchange (target, mem, val, model);
5405 }
5406
5407 /* Expand the __atomic_compare_exchange intrinsic:
5408 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5409 TYPE desired, BOOL weak,
5410 enum memmodel success,
5411 enum memmodel failure)
5412 EXP is the CALL_EXPR.
5413 TARGET is an optional place for us to store the results. */
5414
5415 static rtx
5416 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5417 rtx target)
5418 {
5419 rtx expect, desired, mem, oldval;
5420 rtx_code_label *label;
5421 enum memmodel success, failure;
5422 tree weak;
5423 bool is_weak;
5424
5425 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5426 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5427
5428 if (failure > success)
5429 {
5430 warning (OPT_Winvalid_memory_model,
5431 "failure memory model cannot be stronger than success memory "
5432 "model for %<__atomic_compare_exchange%>");
5433 success = MEMMODEL_SEQ_CST;
5434 }
5435
5436 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5437 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5438 {
5439 warning (OPT_Winvalid_memory_model,
5440 "invalid failure memory model for "
5441 "%<__atomic_compare_exchange%>");
5442 failure = MEMMODEL_SEQ_CST;
5443 success = MEMMODEL_SEQ_CST;
5444 }
5445
5446
5447 if (!flag_inline_atomics)
5448 return NULL_RTX;
5449
5450 /* Expand the operands. */
5451 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5452
5453 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5454 expect = convert_memory_address (Pmode, expect);
5455 expect = gen_rtx_MEM (mode, expect);
5456 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5457
5458 weak = CALL_EXPR_ARG (exp, 3);
5459 is_weak = false;
5460 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5461 is_weak = true;
5462
5463 if (target == const0_rtx)
5464 target = NULL;
5465
5466 /* Lest the rtl backend create a race condition with an imporoper store
5467 to memory, always create a new pseudo for OLDVAL. */
5468 oldval = NULL;
5469
5470 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5471 is_weak, success, failure))
5472 return NULL_RTX;
5473
5474 /* Conditionally store back to EXPECT, lest we create a race condition
5475 with an improper store to memory. */
5476 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5477 the normal case where EXPECT is totally private, i.e. a register. At
5478 which point the store can be unconditional. */
5479 label = gen_label_rtx ();
5480 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL, VOIDmode, 1, label);
5481 emit_move_insn (expect, oldval);
5482 emit_label (label);
5483
5484 return target;
5485 }
5486
5487 /* Expand the __atomic_load intrinsic:
5488 TYPE __atomic_load (TYPE *object, enum memmodel)
5489 EXP is the CALL_EXPR.
5490 TARGET is an optional place for us to store the results. */
5491
5492 static rtx
5493 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5494 {
5495 rtx mem;
5496 enum memmodel model;
5497
5498 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5499 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5500 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5501 {
5502 warning (OPT_Winvalid_memory_model,
5503 "invalid memory model for %<__atomic_load%>");
5504 model = MEMMODEL_SEQ_CST;
5505 }
5506
5507 if (!flag_inline_atomics)
5508 return NULL_RTX;
5509
5510 /* Expand the operand. */
5511 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5512
5513 return expand_atomic_load (target, mem, model);
5514 }
5515
5516
5517 /* Expand the __atomic_store intrinsic:
5518 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5519 EXP is the CALL_EXPR.
5520 TARGET is an optional place for us to store the results. */
5521
5522 static rtx
5523 expand_builtin_atomic_store (machine_mode mode, tree exp)
5524 {
5525 rtx mem, val;
5526 enum memmodel model;
5527
5528 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5529 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5530 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5531 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5532 {
5533 warning (OPT_Winvalid_memory_model,
5534 "invalid memory model for %<__atomic_store%>");
5535 model = MEMMODEL_SEQ_CST;
5536 }
5537
5538 if (!flag_inline_atomics)
5539 return NULL_RTX;
5540
5541 /* Expand the operands. */
5542 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5543 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5544
5545 return expand_atomic_store (mem, val, model, false);
5546 }
5547
5548 /* Expand the __atomic_fetch_XXX intrinsic:
5549 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5550 EXP is the CALL_EXPR.
5551 TARGET is an optional place for us to store the results.
5552 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5553 FETCH_AFTER is true if returning the result of the operation.
5554 FETCH_AFTER is false if returning the value before the operation.
5555 IGNORE is true if the result is not used.
5556 EXT_CALL is the correct builtin for an external call if this cannot be
5557 resolved to an instruction sequence. */
5558
5559 static rtx
5560 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5561 enum rtx_code code, bool fetch_after,
5562 bool ignore, enum built_in_function ext_call)
5563 {
5564 rtx val, mem, ret;
5565 enum memmodel model;
5566 tree fndecl;
5567 tree addr;
5568
5569 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5570
5571 /* Expand the operands. */
5572 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5573 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5574
5575 /* Only try generating instructions if inlining is turned on. */
5576 if (flag_inline_atomics)
5577 {
5578 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5579 if (ret)
5580 return ret;
5581 }
5582
5583 /* Return if a different routine isn't needed for the library call. */
5584 if (ext_call == BUILT_IN_NONE)
5585 return NULL_RTX;
5586
5587 /* Change the call to the specified function. */
5588 fndecl = get_callee_fndecl (exp);
5589 addr = CALL_EXPR_FN (exp);
5590 STRIP_NOPS (addr);
5591
5592 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5593 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5594
5595 /* Expand the call here so we can emit trailing code. */
5596 ret = expand_call (exp, target, ignore);
5597
5598 /* Replace the original function just in case it matters. */
5599 TREE_OPERAND (addr, 0) = fndecl;
5600
5601 /* Then issue the arithmetic correction to return the right result. */
5602 if (!ignore)
5603 {
5604 if (code == NOT)
5605 {
5606 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5607 OPTAB_LIB_WIDEN);
5608 ret = expand_simple_unop (mode, NOT, ret, target, true);
5609 }
5610 else
5611 ret = expand_simple_binop (mode, code, ret, val, target, true,
5612 OPTAB_LIB_WIDEN);
5613 }
5614 return ret;
5615 }
5616
5617
5618 #ifndef HAVE_atomic_clear
5619 # define HAVE_atomic_clear 0
5620 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5621 #endif
5622
5623 /* Expand an atomic clear operation.
5624 void _atomic_clear (BOOL *obj, enum memmodel)
5625 EXP is the call expression. */
5626
5627 static rtx
5628 expand_builtin_atomic_clear (tree exp)
5629 {
5630 machine_mode mode;
5631 rtx mem, ret;
5632 enum memmodel model;
5633
5634 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5635 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5636 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5637
5638 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME
5639 || (model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5640 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5641 {
5642 warning (OPT_Winvalid_memory_model,
5643 "invalid memory model for %<__atomic_store%>");
5644 model = MEMMODEL_SEQ_CST;
5645 }
5646
5647 if (HAVE_atomic_clear)
5648 {
5649 emit_insn (gen_atomic_clear (mem, model));
5650 return const0_rtx;
5651 }
5652
5653 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5654 Failing that, a store is issued by __atomic_store. The only way this can
5655 fail is if the bool type is larger than a word size. Unlikely, but
5656 handle it anyway for completeness. Assume a single threaded model since
5657 there is no atomic support in this case, and no barriers are required. */
5658 ret = expand_atomic_store (mem, const0_rtx, model, true);
5659 if (!ret)
5660 emit_move_insn (mem, const0_rtx);
5661 return const0_rtx;
5662 }
5663
5664 /* Expand an atomic test_and_set operation.
5665 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5666 EXP is the call expression. */
5667
5668 static rtx
5669 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5670 {
5671 rtx mem;
5672 enum memmodel model;
5673 machine_mode mode;
5674
5675 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5676 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5677 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5678
5679 return expand_atomic_test_and_set (target, mem, model);
5680 }
5681
5682
5683 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5684 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5685
5686 static tree
5687 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5688 {
5689 int size;
5690 machine_mode mode;
5691 unsigned int mode_align, type_align;
5692
5693 if (TREE_CODE (arg0) != INTEGER_CST)
5694 return NULL_TREE;
5695
5696 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5697 mode = mode_for_size (size, MODE_INT, 0);
5698 mode_align = GET_MODE_ALIGNMENT (mode);
5699
5700 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5701 type_align = mode_align;
5702 else
5703 {
5704 tree ttype = TREE_TYPE (arg1);
5705
5706 /* This function is usually invoked and folded immediately by the front
5707 end before anything else has a chance to look at it. The pointer
5708 parameter at this point is usually cast to a void *, so check for that
5709 and look past the cast. */
5710 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5711 && VOID_TYPE_P (TREE_TYPE (ttype)))
5712 arg1 = TREE_OPERAND (arg1, 0);
5713
5714 ttype = TREE_TYPE (arg1);
5715 gcc_assert (POINTER_TYPE_P (ttype));
5716
5717 /* Get the underlying type of the object. */
5718 ttype = TREE_TYPE (ttype);
5719 type_align = TYPE_ALIGN (ttype);
5720 }
5721
5722 /* If the object has smaller alignment, the the lock free routines cannot
5723 be used. */
5724 if (type_align < mode_align)
5725 return boolean_false_node;
5726
5727 /* Check if a compare_and_swap pattern exists for the mode which represents
5728 the required size. The pattern is not allowed to fail, so the existence
5729 of the pattern indicates support is present. */
5730 if (can_compare_and_swap_p (mode, true))
5731 return boolean_true_node;
5732 else
5733 return boolean_false_node;
5734 }
5735
5736 /* Return true if the parameters to call EXP represent an object which will
5737 always generate lock free instructions. The first argument represents the
5738 size of the object, and the second parameter is a pointer to the object
5739 itself. If NULL is passed for the object, then the result is based on
5740 typical alignment for an object of the specified size. Otherwise return
5741 false. */
5742
5743 static rtx
5744 expand_builtin_atomic_always_lock_free (tree exp)
5745 {
5746 tree size;
5747 tree arg0 = CALL_EXPR_ARG (exp, 0);
5748 tree arg1 = CALL_EXPR_ARG (exp, 1);
5749
5750 if (TREE_CODE (arg0) != INTEGER_CST)
5751 {
5752 error ("non-constant argument 1 to __atomic_always_lock_free");
5753 return const0_rtx;
5754 }
5755
5756 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5757 if (size == boolean_true_node)
5758 return const1_rtx;
5759 return const0_rtx;
5760 }
5761
5762 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5763 is lock free on this architecture. */
5764
5765 static tree
5766 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5767 {
5768 if (!flag_inline_atomics)
5769 return NULL_TREE;
5770
5771 /* If it isn't always lock free, don't generate a result. */
5772 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5773 return boolean_true_node;
5774
5775 return NULL_TREE;
5776 }
5777
5778 /* Return true if the parameters to call EXP represent an object which will
5779 always generate lock free instructions. The first argument represents the
5780 size of the object, and the second parameter is a pointer to the object
5781 itself. If NULL is passed for the object, then the result is based on
5782 typical alignment for an object of the specified size. Otherwise return
5783 NULL*/
5784
5785 static rtx
5786 expand_builtin_atomic_is_lock_free (tree exp)
5787 {
5788 tree size;
5789 tree arg0 = CALL_EXPR_ARG (exp, 0);
5790 tree arg1 = CALL_EXPR_ARG (exp, 1);
5791
5792 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5793 {
5794 error ("non-integer argument 1 to __atomic_is_lock_free");
5795 return NULL_RTX;
5796 }
5797
5798 if (!flag_inline_atomics)
5799 return NULL_RTX;
5800
5801 /* If the value is known at compile time, return the RTX for it. */
5802 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5803 if (size == boolean_true_node)
5804 return const1_rtx;
5805
5806 return NULL_RTX;
5807 }
5808
5809 /* Expand the __atomic_thread_fence intrinsic:
5810 void __atomic_thread_fence (enum memmodel)
5811 EXP is the CALL_EXPR. */
5812
5813 static void
5814 expand_builtin_atomic_thread_fence (tree exp)
5815 {
5816 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5817 expand_mem_thread_fence (model);
5818 }
5819
5820 /* Expand the __atomic_signal_fence intrinsic:
5821 void __atomic_signal_fence (enum memmodel)
5822 EXP is the CALL_EXPR. */
5823
5824 static void
5825 expand_builtin_atomic_signal_fence (tree exp)
5826 {
5827 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5828 expand_mem_signal_fence (model);
5829 }
5830
5831 /* Expand the __sync_synchronize intrinsic. */
5832
5833 static void
5834 expand_builtin_sync_synchronize (void)
5835 {
5836 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5837 }
5838
5839 static rtx
5840 expand_builtin_thread_pointer (tree exp, rtx target)
5841 {
5842 enum insn_code icode;
5843 if (!validate_arglist (exp, VOID_TYPE))
5844 return const0_rtx;
5845 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5846 if (icode != CODE_FOR_nothing)
5847 {
5848 struct expand_operand op;
5849 /* If the target is not sutitable then create a new target. */
5850 if (target == NULL_RTX
5851 || !REG_P (target)
5852 || GET_MODE (target) != Pmode)
5853 target = gen_reg_rtx (Pmode);
5854 create_output_operand (&op, target, Pmode);
5855 expand_insn (icode, 1, &op);
5856 return target;
5857 }
5858 error ("__builtin_thread_pointer is not supported on this target");
5859 return const0_rtx;
5860 }
5861
5862 static void
5863 expand_builtin_set_thread_pointer (tree exp)
5864 {
5865 enum insn_code icode;
5866 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5867 return;
5868 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5869 if (icode != CODE_FOR_nothing)
5870 {
5871 struct expand_operand op;
5872 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5873 Pmode, EXPAND_NORMAL);
5874 create_input_operand (&op, val, Pmode);
5875 expand_insn (icode, 1, &op);
5876 return;
5877 }
5878 error ("__builtin_set_thread_pointer is not supported on this target");
5879 }
5880
5881 \f
5882 /* Emit code to restore the current value of stack. */
5883
5884 static void
5885 expand_stack_restore (tree var)
5886 {
5887 rtx_insn *prev;
5888 rtx sa = expand_normal (var);
5889
5890 sa = convert_memory_address (Pmode, sa);
5891
5892 prev = get_last_insn ();
5893 emit_stack_restore (SAVE_BLOCK, sa);
5894 fixup_args_size_notes (prev, get_last_insn (), 0);
5895 }
5896
5897
5898 /* Emit code to save the current value of stack. */
5899
5900 static rtx
5901 expand_stack_save (void)
5902 {
5903 rtx ret = NULL_RTX;
5904
5905 do_pending_stack_adjust ();
5906 emit_stack_save (SAVE_BLOCK, &ret);
5907 return ret;
5908 }
5909
5910
5911 /* Expand OpenACC acc_on_device.
5912
5913 This has to happen late (that is, not in early folding; expand_builtin_*,
5914 rather than fold_builtin_*), as we have to act differently for host and
5915 acceleration device (ACCEL_COMPILER conditional). */
5916
5917 static rtx
5918 expand_builtin_acc_on_device (tree exp, rtx target)
5919 {
5920 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5921 return NULL_RTX;
5922
5923 tree arg = CALL_EXPR_ARG (exp, 0);
5924
5925 /* Return (arg == v1 || arg == v2) ? 1 : 0. */
5926 machine_mode v_mode = TYPE_MODE (TREE_TYPE (arg));
5927 rtx v = expand_normal (arg), v1, v2;
5928 #ifdef ACCEL_COMPILER
5929 v1 = GEN_INT (GOMP_DEVICE_NOT_HOST);
5930 v2 = GEN_INT (ACCEL_COMPILER_acc_device);
5931 #else
5932 v1 = GEN_INT (GOMP_DEVICE_NONE);
5933 v2 = GEN_INT (GOMP_DEVICE_HOST);
5934 #endif
5935 machine_mode target_mode = TYPE_MODE (integer_type_node);
5936 if (!target || !register_operand (target, target_mode))
5937 target = gen_reg_rtx (target_mode);
5938 emit_move_insn (target, const1_rtx);
5939 rtx_code_label *done_label = gen_label_rtx ();
5940 do_compare_rtx_and_jump (v, v1, EQ, false, v_mode, NULL_RTX,
5941 NULL_RTX, done_label, PROB_EVEN);
5942 do_compare_rtx_and_jump (v, v2, EQ, false, v_mode, NULL_RTX,
5943 NULL_RTX, done_label, PROB_EVEN);
5944 emit_move_insn (target, const0_rtx);
5945 emit_label (done_label);
5946
5947 return target;
5948 }
5949
5950
5951 /* Expand an expression EXP that calls a built-in function,
5952 with result going to TARGET if that's convenient
5953 (and in mode MODE if that's convenient).
5954 SUBTARGET may be used as the target for computing one of EXP's operands.
5955 IGNORE is nonzero if the value is to be ignored. */
5956
5957 rtx
5958 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5959 int ignore)
5960 {
5961 tree fndecl = get_callee_fndecl (exp);
5962 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5963 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5964 int flags;
5965
5966 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5967 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5968
5969 /* When ASan is enabled, we don't want to expand some memory/string
5970 builtins and rely on libsanitizer's hooks. This allows us to avoid
5971 redundant checks and be sure, that possible overflow will be detected
5972 by ASan. */
5973
5974 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5975 return expand_call (exp, target, ignore);
5976
5977 /* When not optimizing, generate calls to library functions for a certain
5978 set of builtins. */
5979 if (!optimize
5980 && !called_as_built_in (fndecl)
5981 && fcode != BUILT_IN_FORK
5982 && fcode != BUILT_IN_EXECL
5983 && fcode != BUILT_IN_EXECV
5984 && fcode != BUILT_IN_EXECLP
5985 && fcode != BUILT_IN_EXECLE
5986 && fcode != BUILT_IN_EXECVP
5987 && fcode != BUILT_IN_EXECVE
5988 && fcode != BUILT_IN_ALLOCA
5989 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5990 && fcode != BUILT_IN_FREE
5991 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5992 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5993 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5994 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5995 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5996 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5997 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5998 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5999 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6000 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
6001 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
6002 && fcode != BUILT_IN_CHKP_BNDRET)
6003 return expand_call (exp, target, ignore);
6004
6005 /* The built-in function expanders test for target == const0_rtx
6006 to determine whether the function's result will be ignored. */
6007 if (ignore)
6008 target = const0_rtx;
6009
6010 /* If the result of a pure or const built-in function is ignored, and
6011 none of its arguments are volatile, we can avoid expanding the
6012 built-in call and just evaluate the arguments for side-effects. */
6013 if (target == const0_rtx
6014 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6015 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6016 {
6017 bool volatilep = false;
6018 tree arg;
6019 call_expr_arg_iterator iter;
6020
6021 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6022 if (TREE_THIS_VOLATILE (arg))
6023 {
6024 volatilep = true;
6025 break;
6026 }
6027
6028 if (! volatilep)
6029 {
6030 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6031 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6032 return const0_rtx;
6033 }
6034 }
6035
6036 /* expand_builtin_with_bounds is supposed to be used for
6037 instrumented builtin calls. */
6038 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6039
6040 switch (fcode)
6041 {
6042 CASE_FLT_FN (BUILT_IN_FABS):
6043 case BUILT_IN_FABSD32:
6044 case BUILT_IN_FABSD64:
6045 case BUILT_IN_FABSD128:
6046 target = expand_builtin_fabs (exp, target, subtarget);
6047 if (target)
6048 return target;
6049 break;
6050
6051 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6052 target = expand_builtin_copysign (exp, target, subtarget);
6053 if (target)
6054 return target;
6055 break;
6056
6057 /* Just do a normal library call if we were unable to fold
6058 the values. */
6059 CASE_FLT_FN (BUILT_IN_CABS):
6060 break;
6061
6062 CASE_FLT_FN (BUILT_IN_EXP):
6063 CASE_FLT_FN (BUILT_IN_EXP10):
6064 CASE_FLT_FN (BUILT_IN_POW10):
6065 CASE_FLT_FN (BUILT_IN_EXP2):
6066 CASE_FLT_FN (BUILT_IN_EXPM1):
6067 CASE_FLT_FN (BUILT_IN_LOGB):
6068 CASE_FLT_FN (BUILT_IN_LOG):
6069 CASE_FLT_FN (BUILT_IN_LOG10):
6070 CASE_FLT_FN (BUILT_IN_LOG2):
6071 CASE_FLT_FN (BUILT_IN_LOG1P):
6072 CASE_FLT_FN (BUILT_IN_TAN):
6073 CASE_FLT_FN (BUILT_IN_ASIN):
6074 CASE_FLT_FN (BUILT_IN_ACOS):
6075 CASE_FLT_FN (BUILT_IN_ATAN):
6076 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
6077 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6078 because of possible accuracy problems. */
6079 if (! flag_unsafe_math_optimizations)
6080 break;
6081 CASE_FLT_FN (BUILT_IN_SQRT):
6082 CASE_FLT_FN (BUILT_IN_FLOOR):
6083 CASE_FLT_FN (BUILT_IN_CEIL):
6084 CASE_FLT_FN (BUILT_IN_TRUNC):
6085 CASE_FLT_FN (BUILT_IN_ROUND):
6086 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6087 CASE_FLT_FN (BUILT_IN_RINT):
6088 target = expand_builtin_mathfn (exp, target, subtarget);
6089 if (target)
6090 return target;
6091 break;
6092
6093 CASE_FLT_FN (BUILT_IN_FMA):
6094 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6095 if (target)
6096 return target;
6097 break;
6098
6099 CASE_FLT_FN (BUILT_IN_ILOGB):
6100 if (! flag_unsafe_math_optimizations)
6101 break;
6102 CASE_FLT_FN (BUILT_IN_ISINF):
6103 CASE_FLT_FN (BUILT_IN_FINITE):
6104 case BUILT_IN_ISFINITE:
6105 case BUILT_IN_ISNORMAL:
6106 target = expand_builtin_interclass_mathfn (exp, target);
6107 if (target)
6108 return target;
6109 break;
6110
6111 CASE_FLT_FN (BUILT_IN_ICEIL):
6112 CASE_FLT_FN (BUILT_IN_LCEIL):
6113 CASE_FLT_FN (BUILT_IN_LLCEIL):
6114 CASE_FLT_FN (BUILT_IN_LFLOOR):
6115 CASE_FLT_FN (BUILT_IN_IFLOOR):
6116 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6117 target = expand_builtin_int_roundingfn (exp, target);
6118 if (target)
6119 return target;
6120 break;
6121
6122 CASE_FLT_FN (BUILT_IN_IRINT):
6123 CASE_FLT_FN (BUILT_IN_LRINT):
6124 CASE_FLT_FN (BUILT_IN_LLRINT):
6125 CASE_FLT_FN (BUILT_IN_IROUND):
6126 CASE_FLT_FN (BUILT_IN_LROUND):
6127 CASE_FLT_FN (BUILT_IN_LLROUND):
6128 target = expand_builtin_int_roundingfn_2 (exp, target);
6129 if (target)
6130 return target;
6131 break;
6132
6133 CASE_FLT_FN (BUILT_IN_POWI):
6134 target = expand_builtin_powi (exp, target);
6135 if (target)
6136 return target;
6137 break;
6138
6139 CASE_FLT_FN (BUILT_IN_ATAN2):
6140 CASE_FLT_FN (BUILT_IN_LDEXP):
6141 CASE_FLT_FN (BUILT_IN_SCALB):
6142 CASE_FLT_FN (BUILT_IN_SCALBN):
6143 CASE_FLT_FN (BUILT_IN_SCALBLN):
6144 if (! flag_unsafe_math_optimizations)
6145 break;
6146
6147 CASE_FLT_FN (BUILT_IN_FMOD):
6148 CASE_FLT_FN (BUILT_IN_REMAINDER):
6149 CASE_FLT_FN (BUILT_IN_DREM):
6150 CASE_FLT_FN (BUILT_IN_POW):
6151 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6152 if (target)
6153 return target;
6154 break;
6155
6156 CASE_FLT_FN (BUILT_IN_CEXPI):
6157 target = expand_builtin_cexpi (exp, target);
6158 gcc_assert (target);
6159 return target;
6160
6161 CASE_FLT_FN (BUILT_IN_SIN):
6162 CASE_FLT_FN (BUILT_IN_COS):
6163 if (! flag_unsafe_math_optimizations)
6164 break;
6165 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6166 if (target)
6167 return target;
6168 break;
6169
6170 CASE_FLT_FN (BUILT_IN_SINCOS):
6171 if (! flag_unsafe_math_optimizations)
6172 break;
6173 target = expand_builtin_sincos (exp);
6174 if (target)
6175 return target;
6176 break;
6177
6178 case BUILT_IN_APPLY_ARGS:
6179 return expand_builtin_apply_args ();
6180
6181 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6182 FUNCTION with a copy of the parameters described by
6183 ARGUMENTS, and ARGSIZE. It returns a block of memory
6184 allocated on the stack into which is stored all the registers
6185 that might possibly be used for returning the result of a
6186 function. ARGUMENTS is the value returned by
6187 __builtin_apply_args. ARGSIZE is the number of bytes of
6188 arguments that must be copied. ??? How should this value be
6189 computed? We'll also need a safe worst case value for varargs
6190 functions. */
6191 case BUILT_IN_APPLY:
6192 if (!validate_arglist (exp, POINTER_TYPE,
6193 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6194 && !validate_arglist (exp, REFERENCE_TYPE,
6195 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6196 return const0_rtx;
6197 else
6198 {
6199 rtx ops[3];
6200
6201 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6202 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6203 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6204
6205 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6206 }
6207
6208 /* __builtin_return (RESULT) causes the function to return the
6209 value described by RESULT. RESULT is address of the block of
6210 memory returned by __builtin_apply. */
6211 case BUILT_IN_RETURN:
6212 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6213 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6214 return const0_rtx;
6215
6216 case BUILT_IN_SAVEREGS:
6217 return expand_builtin_saveregs ();
6218
6219 case BUILT_IN_VA_ARG_PACK:
6220 /* All valid uses of __builtin_va_arg_pack () are removed during
6221 inlining. */
6222 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6223 return const0_rtx;
6224
6225 case BUILT_IN_VA_ARG_PACK_LEN:
6226 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6227 inlining. */
6228 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6229 return const0_rtx;
6230
6231 /* Return the address of the first anonymous stack arg. */
6232 case BUILT_IN_NEXT_ARG:
6233 if (fold_builtin_next_arg (exp, false))
6234 return const0_rtx;
6235 return expand_builtin_next_arg ();
6236
6237 case BUILT_IN_CLEAR_CACHE:
6238 target = expand_builtin___clear_cache (exp);
6239 if (target)
6240 return target;
6241 break;
6242
6243 case BUILT_IN_CLASSIFY_TYPE:
6244 return expand_builtin_classify_type (exp);
6245
6246 case BUILT_IN_CONSTANT_P:
6247 return const0_rtx;
6248
6249 case BUILT_IN_FRAME_ADDRESS:
6250 case BUILT_IN_RETURN_ADDRESS:
6251 return expand_builtin_frame_address (fndecl, exp);
6252
6253 /* Returns the address of the area where the structure is returned.
6254 0 otherwise. */
6255 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6256 if (call_expr_nargs (exp) != 0
6257 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6258 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6259 return const0_rtx;
6260 else
6261 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6262
6263 case BUILT_IN_ALLOCA:
6264 case BUILT_IN_ALLOCA_WITH_ALIGN:
6265 /* If the allocation stems from the declaration of a variable-sized
6266 object, it cannot accumulate. */
6267 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6268 if (target)
6269 return target;
6270 break;
6271
6272 case BUILT_IN_STACK_SAVE:
6273 return expand_stack_save ();
6274
6275 case BUILT_IN_STACK_RESTORE:
6276 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6277 return const0_rtx;
6278
6279 case BUILT_IN_BSWAP16:
6280 case BUILT_IN_BSWAP32:
6281 case BUILT_IN_BSWAP64:
6282 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6283 if (target)
6284 return target;
6285 break;
6286
6287 CASE_INT_FN (BUILT_IN_FFS):
6288 target = expand_builtin_unop (target_mode, exp, target,
6289 subtarget, ffs_optab);
6290 if (target)
6291 return target;
6292 break;
6293
6294 CASE_INT_FN (BUILT_IN_CLZ):
6295 target = expand_builtin_unop (target_mode, exp, target,
6296 subtarget, clz_optab);
6297 if (target)
6298 return target;
6299 break;
6300
6301 CASE_INT_FN (BUILT_IN_CTZ):
6302 target = expand_builtin_unop (target_mode, exp, target,
6303 subtarget, ctz_optab);
6304 if (target)
6305 return target;
6306 break;
6307
6308 CASE_INT_FN (BUILT_IN_CLRSB):
6309 target = expand_builtin_unop (target_mode, exp, target,
6310 subtarget, clrsb_optab);
6311 if (target)
6312 return target;
6313 break;
6314
6315 CASE_INT_FN (BUILT_IN_POPCOUNT):
6316 target = expand_builtin_unop (target_mode, exp, target,
6317 subtarget, popcount_optab);
6318 if (target)
6319 return target;
6320 break;
6321
6322 CASE_INT_FN (BUILT_IN_PARITY):
6323 target = expand_builtin_unop (target_mode, exp, target,
6324 subtarget, parity_optab);
6325 if (target)
6326 return target;
6327 break;
6328
6329 case BUILT_IN_STRLEN:
6330 target = expand_builtin_strlen (exp, target, target_mode);
6331 if (target)
6332 return target;
6333 break;
6334
6335 case BUILT_IN_STRCPY:
6336 target = expand_builtin_strcpy (exp, target);
6337 if (target)
6338 return target;
6339 break;
6340
6341 case BUILT_IN_STRNCPY:
6342 target = expand_builtin_strncpy (exp, target);
6343 if (target)
6344 return target;
6345 break;
6346
6347 case BUILT_IN_STPCPY:
6348 target = expand_builtin_stpcpy (exp, target, mode);
6349 if (target)
6350 return target;
6351 break;
6352
6353 case BUILT_IN_MEMCPY:
6354 target = expand_builtin_memcpy (exp, target);
6355 if (target)
6356 return target;
6357 break;
6358
6359 case BUILT_IN_MEMPCPY:
6360 target = expand_builtin_mempcpy (exp, target, mode);
6361 if (target)
6362 return target;
6363 break;
6364
6365 case BUILT_IN_MEMSET:
6366 target = expand_builtin_memset (exp, target, mode);
6367 if (target)
6368 return target;
6369 break;
6370
6371 case BUILT_IN_BZERO:
6372 target = expand_builtin_bzero (exp);
6373 if (target)
6374 return target;
6375 break;
6376
6377 case BUILT_IN_STRCMP:
6378 target = expand_builtin_strcmp (exp, target);
6379 if (target)
6380 return target;
6381 break;
6382
6383 case BUILT_IN_STRNCMP:
6384 target = expand_builtin_strncmp (exp, target, mode);
6385 if (target)
6386 return target;
6387 break;
6388
6389 case BUILT_IN_BCMP:
6390 case BUILT_IN_MEMCMP:
6391 target = expand_builtin_memcmp (exp, target, mode);
6392 if (target)
6393 return target;
6394 break;
6395
6396 case BUILT_IN_SETJMP:
6397 /* This should have been lowered to the builtins below. */
6398 gcc_unreachable ();
6399
6400 case BUILT_IN_SETJMP_SETUP:
6401 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6402 and the receiver label. */
6403 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6404 {
6405 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6406 VOIDmode, EXPAND_NORMAL);
6407 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6408 rtx label_r = label_rtx (label);
6409
6410 /* This is copied from the handling of non-local gotos. */
6411 expand_builtin_setjmp_setup (buf_addr, label_r);
6412 nonlocal_goto_handler_labels
6413 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6414 nonlocal_goto_handler_labels);
6415 /* ??? Do not let expand_label treat us as such since we would
6416 not want to be both on the list of non-local labels and on
6417 the list of forced labels. */
6418 FORCED_LABEL (label) = 0;
6419 return const0_rtx;
6420 }
6421 break;
6422
6423 case BUILT_IN_SETJMP_RECEIVER:
6424 /* __builtin_setjmp_receiver is passed the receiver label. */
6425 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6426 {
6427 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6428 rtx label_r = label_rtx (label);
6429
6430 expand_builtin_setjmp_receiver (label_r);
6431 return const0_rtx;
6432 }
6433 break;
6434
6435 /* __builtin_longjmp is passed a pointer to an array of five words.
6436 It's similar to the C library longjmp function but works with
6437 __builtin_setjmp above. */
6438 case BUILT_IN_LONGJMP:
6439 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6440 {
6441 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6442 VOIDmode, EXPAND_NORMAL);
6443 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6444
6445 if (value != const1_rtx)
6446 {
6447 error ("%<__builtin_longjmp%> second argument must be 1");
6448 return const0_rtx;
6449 }
6450
6451 expand_builtin_longjmp (buf_addr, value);
6452 return const0_rtx;
6453 }
6454 break;
6455
6456 case BUILT_IN_NONLOCAL_GOTO:
6457 target = expand_builtin_nonlocal_goto (exp);
6458 if (target)
6459 return target;
6460 break;
6461
6462 /* This updates the setjmp buffer that is its argument with the value
6463 of the current stack pointer. */
6464 case BUILT_IN_UPDATE_SETJMP_BUF:
6465 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6466 {
6467 rtx buf_addr
6468 = expand_normal (CALL_EXPR_ARG (exp, 0));
6469
6470 expand_builtin_update_setjmp_buf (buf_addr);
6471 return const0_rtx;
6472 }
6473 break;
6474
6475 case BUILT_IN_TRAP:
6476 expand_builtin_trap ();
6477 return const0_rtx;
6478
6479 case BUILT_IN_UNREACHABLE:
6480 expand_builtin_unreachable ();
6481 return const0_rtx;
6482
6483 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6484 case BUILT_IN_SIGNBITD32:
6485 case BUILT_IN_SIGNBITD64:
6486 case BUILT_IN_SIGNBITD128:
6487 target = expand_builtin_signbit (exp, target);
6488 if (target)
6489 return target;
6490 break;
6491
6492 /* Various hooks for the DWARF 2 __throw routine. */
6493 case BUILT_IN_UNWIND_INIT:
6494 expand_builtin_unwind_init ();
6495 return const0_rtx;
6496 case BUILT_IN_DWARF_CFA:
6497 return virtual_cfa_rtx;
6498 #ifdef DWARF2_UNWIND_INFO
6499 case BUILT_IN_DWARF_SP_COLUMN:
6500 return expand_builtin_dwarf_sp_column ();
6501 case BUILT_IN_INIT_DWARF_REG_SIZES:
6502 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6503 return const0_rtx;
6504 #endif
6505 case BUILT_IN_FROB_RETURN_ADDR:
6506 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6507 case BUILT_IN_EXTRACT_RETURN_ADDR:
6508 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6509 case BUILT_IN_EH_RETURN:
6510 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6511 CALL_EXPR_ARG (exp, 1));
6512 return const0_rtx;
6513 case BUILT_IN_EH_RETURN_DATA_REGNO:
6514 return expand_builtin_eh_return_data_regno (exp);
6515 case BUILT_IN_EXTEND_POINTER:
6516 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6517 case BUILT_IN_EH_POINTER:
6518 return expand_builtin_eh_pointer (exp);
6519 case BUILT_IN_EH_FILTER:
6520 return expand_builtin_eh_filter (exp);
6521 case BUILT_IN_EH_COPY_VALUES:
6522 return expand_builtin_eh_copy_values (exp);
6523
6524 case BUILT_IN_VA_START:
6525 return expand_builtin_va_start (exp);
6526 case BUILT_IN_VA_END:
6527 return expand_builtin_va_end (exp);
6528 case BUILT_IN_VA_COPY:
6529 return expand_builtin_va_copy (exp);
6530 case BUILT_IN_EXPECT:
6531 return expand_builtin_expect (exp, target);
6532 case BUILT_IN_ASSUME_ALIGNED:
6533 return expand_builtin_assume_aligned (exp, target);
6534 case BUILT_IN_PREFETCH:
6535 expand_builtin_prefetch (exp);
6536 return const0_rtx;
6537
6538 case BUILT_IN_INIT_TRAMPOLINE:
6539 return expand_builtin_init_trampoline (exp, true);
6540 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6541 return expand_builtin_init_trampoline (exp, false);
6542 case BUILT_IN_ADJUST_TRAMPOLINE:
6543 return expand_builtin_adjust_trampoline (exp);
6544
6545 case BUILT_IN_FORK:
6546 case BUILT_IN_EXECL:
6547 case BUILT_IN_EXECV:
6548 case BUILT_IN_EXECLP:
6549 case BUILT_IN_EXECLE:
6550 case BUILT_IN_EXECVP:
6551 case BUILT_IN_EXECVE:
6552 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6553 if (target)
6554 return target;
6555 break;
6556
6557 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6558 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6559 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6560 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6561 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6562 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6563 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6564 if (target)
6565 return target;
6566 break;
6567
6568 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6569 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6570 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6571 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6572 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6573 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6574 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6575 if (target)
6576 return target;
6577 break;
6578
6579 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6580 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6581 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6582 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6583 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6584 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6585 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6586 if (target)
6587 return target;
6588 break;
6589
6590 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6591 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6592 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6593 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6594 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6595 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6596 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6597 if (target)
6598 return target;
6599 break;
6600
6601 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6602 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6603 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6604 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6605 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6606 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6607 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6608 if (target)
6609 return target;
6610 break;
6611
6612 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6613 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6614 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6615 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6616 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6617 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6618 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6619 if (target)
6620 return target;
6621 break;
6622
6623 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6624 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6625 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6626 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6627 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6628 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6629 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6630 if (target)
6631 return target;
6632 break;
6633
6634 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6635 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6636 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6637 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6638 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6639 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6640 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6641 if (target)
6642 return target;
6643 break;
6644
6645 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6646 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6647 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6648 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6649 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6650 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6651 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6652 if (target)
6653 return target;
6654 break;
6655
6656 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6657 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6658 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6659 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6660 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6661 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6662 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6663 if (target)
6664 return target;
6665 break;
6666
6667 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6668 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6669 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6670 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6671 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6672 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6673 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6674 if (target)
6675 return target;
6676 break;
6677
6678 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6679 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6680 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6681 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6682 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6683 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6684 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6685 if (target)
6686 return target;
6687 break;
6688
6689 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6690 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6691 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6692 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6693 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6694 if (mode == VOIDmode)
6695 mode = TYPE_MODE (boolean_type_node);
6696 if (!target || !register_operand (target, mode))
6697 target = gen_reg_rtx (mode);
6698
6699 mode = get_builtin_sync_mode
6700 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6701 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6702 if (target)
6703 return target;
6704 break;
6705
6706 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6707 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6708 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6709 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6710 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6711 mode = get_builtin_sync_mode
6712 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6713 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6714 if (target)
6715 return target;
6716 break;
6717
6718 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6719 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6720 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6721 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6722 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6723 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6724 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6725 if (target)
6726 return target;
6727 break;
6728
6729 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6730 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6731 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6732 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6733 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6734 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6735 expand_builtin_sync_lock_release (mode, exp);
6736 return const0_rtx;
6737
6738 case BUILT_IN_SYNC_SYNCHRONIZE:
6739 expand_builtin_sync_synchronize ();
6740 return const0_rtx;
6741
6742 case BUILT_IN_ATOMIC_EXCHANGE_1:
6743 case BUILT_IN_ATOMIC_EXCHANGE_2:
6744 case BUILT_IN_ATOMIC_EXCHANGE_4:
6745 case BUILT_IN_ATOMIC_EXCHANGE_8:
6746 case BUILT_IN_ATOMIC_EXCHANGE_16:
6747 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6748 target = expand_builtin_atomic_exchange (mode, exp, target);
6749 if (target)
6750 return target;
6751 break;
6752
6753 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6754 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6755 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6756 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6757 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6758 {
6759 unsigned int nargs, z;
6760 vec<tree, va_gc> *vec;
6761
6762 mode =
6763 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6764 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6765 if (target)
6766 return target;
6767
6768 /* If this is turned into an external library call, the weak parameter
6769 must be dropped to match the expected parameter list. */
6770 nargs = call_expr_nargs (exp);
6771 vec_alloc (vec, nargs - 1);
6772 for (z = 0; z < 3; z++)
6773 vec->quick_push (CALL_EXPR_ARG (exp, z));
6774 /* Skip the boolean weak parameter. */
6775 for (z = 4; z < 6; z++)
6776 vec->quick_push (CALL_EXPR_ARG (exp, z));
6777 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6778 break;
6779 }
6780
6781 case BUILT_IN_ATOMIC_LOAD_1:
6782 case BUILT_IN_ATOMIC_LOAD_2:
6783 case BUILT_IN_ATOMIC_LOAD_4:
6784 case BUILT_IN_ATOMIC_LOAD_8:
6785 case BUILT_IN_ATOMIC_LOAD_16:
6786 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6787 target = expand_builtin_atomic_load (mode, exp, target);
6788 if (target)
6789 return target;
6790 break;
6791
6792 case BUILT_IN_ATOMIC_STORE_1:
6793 case BUILT_IN_ATOMIC_STORE_2:
6794 case BUILT_IN_ATOMIC_STORE_4:
6795 case BUILT_IN_ATOMIC_STORE_8:
6796 case BUILT_IN_ATOMIC_STORE_16:
6797 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6798 target = expand_builtin_atomic_store (mode, exp);
6799 if (target)
6800 return const0_rtx;
6801 break;
6802
6803 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6804 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6805 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6806 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6807 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6808 {
6809 enum built_in_function lib;
6810 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6811 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6812 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6813 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6814 ignore, lib);
6815 if (target)
6816 return target;
6817 break;
6818 }
6819 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6820 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6821 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6822 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6823 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6824 {
6825 enum built_in_function lib;
6826 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6827 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6828 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6829 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6830 ignore, lib);
6831 if (target)
6832 return target;
6833 break;
6834 }
6835 case BUILT_IN_ATOMIC_AND_FETCH_1:
6836 case BUILT_IN_ATOMIC_AND_FETCH_2:
6837 case BUILT_IN_ATOMIC_AND_FETCH_4:
6838 case BUILT_IN_ATOMIC_AND_FETCH_8:
6839 case BUILT_IN_ATOMIC_AND_FETCH_16:
6840 {
6841 enum built_in_function lib;
6842 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6843 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6844 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6845 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6846 ignore, lib);
6847 if (target)
6848 return target;
6849 break;
6850 }
6851 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6852 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6853 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6854 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6855 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6856 {
6857 enum built_in_function lib;
6858 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6859 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6860 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6861 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6862 ignore, lib);
6863 if (target)
6864 return target;
6865 break;
6866 }
6867 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6868 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6869 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6870 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6871 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6872 {
6873 enum built_in_function lib;
6874 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6875 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6876 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6877 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6878 ignore, lib);
6879 if (target)
6880 return target;
6881 break;
6882 }
6883 case BUILT_IN_ATOMIC_OR_FETCH_1:
6884 case BUILT_IN_ATOMIC_OR_FETCH_2:
6885 case BUILT_IN_ATOMIC_OR_FETCH_4:
6886 case BUILT_IN_ATOMIC_OR_FETCH_8:
6887 case BUILT_IN_ATOMIC_OR_FETCH_16:
6888 {
6889 enum built_in_function lib;
6890 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6891 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6892 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6893 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6894 ignore, lib);
6895 if (target)
6896 return target;
6897 break;
6898 }
6899 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6900 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6901 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6902 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6903 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6904 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6905 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6906 ignore, BUILT_IN_NONE);
6907 if (target)
6908 return target;
6909 break;
6910
6911 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6912 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6913 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6914 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6915 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6916 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6917 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6918 ignore, BUILT_IN_NONE);
6919 if (target)
6920 return target;
6921 break;
6922
6923 case BUILT_IN_ATOMIC_FETCH_AND_1:
6924 case BUILT_IN_ATOMIC_FETCH_AND_2:
6925 case BUILT_IN_ATOMIC_FETCH_AND_4:
6926 case BUILT_IN_ATOMIC_FETCH_AND_8:
6927 case BUILT_IN_ATOMIC_FETCH_AND_16:
6928 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6929 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6930 ignore, BUILT_IN_NONE);
6931 if (target)
6932 return target;
6933 break;
6934
6935 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6936 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6937 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6938 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6939 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6940 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6941 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6942 ignore, BUILT_IN_NONE);
6943 if (target)
6944 return target;
6945 break;
6946
6947 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6948 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6949 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6950 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6951 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6952 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6953 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6954 ignore, BUILT_IN_NONE);
6955 if (target)
6956 return target;
6957 break;
6958
6959 case BUILT_IN_ATOMIC_FETCH_OR_1:
6960 case BUILT_IN_ATOMIC_FETCH_OR_2:
6961 case BUILT_IN_ATOMIC_FETCH_OR_4:
6962 case BUILT_IN_ATOMIC_FETCH_OR_8:
6963 case BUILT_IN_ATOMIC_FETCH_OR_16:
6964 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6965 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6966 ignore, BUILT_IN_NONE);
6967 if (target)
6968 return target;
6969 break;
6970
6971 case BUILT_IN_ATOMIC_TEST_AND_SET:
6972 return expand_builtin_atomic_test_and_set (exp, target);
6973
6974 case BUILT_IN_ATOMIC_CLEAR:
6975 return expand_builtin_atomic_clear (exp);
6976
6977 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6978 return expand_builtin_atomic_always_lock_free (exp);
6979
6980 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6981 target = expand_builtin_atomic_is_lock_free (exp);
6982 if (target)
6983 return target;
6984 break;
6985
6986 case BUILT_IN_ATOMIC_THREAD_FENCE:
6987 expand_builtin_atomic_thread_fence (exp);
6988 return const0_rtx;
6989
6990 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6991 expand_builtin_atomic_signal_fence (exp);
6992 return const0_rtx;
6993
6994 case BUILT_IN_OBJECT_SIZE:
6995 return expand_builtin_object_size (exp);
6996
6997 case BUILT_IN_MEMCPY_CHK:
6998 case BUILT_IN_MEMPCPY_CHK:
6999 case BUILT_IN_MEMMOVE_CHK:
7000 case BUILT_IN_MEMSET_CHK:
7001 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7002 if (target)
7003 return target;
7004 break;
7005
7006 case BUILT_IN_STRCPY_CHK:
7007 case BUILT_IN_STPCPY_CHK:
7008 case BUILT_IN_STRNCPY_CHK:
7009 case BUILT_IN_STPNCPY_CHK:
7010 case BUILT_IN_STRCAT_CHK:
7011 case BUILT_IN_STRNCAT_CHK:
7012 case BUILT_IN_SNPRINTF_CHK:
7013 case BUILT_IN_VSNPRINTF_CHK:
7014 maybe_emit_chk_warning (exp, fcode);
7015 break;
7016
7017 case BUILT_IN_SPRINTF_CHK:
7018 case BUILT_IN_VSPRINTF_CHK:
7019 maybe_emit_sprintf_chk_warning (exp, fcode);
7020 break;
7021
7022 case BUILT_IN_FREE:
7023 if (warn_free_nonheap_object)
7024 maybe_emit_free_warning (exp);
7025 break;
7026
7027 case BUILT_IN_THREAD_POINTER:
7028 return expand_builtin_thread_pointer (exp, target);
7029
7030 case BUILT_IN_SET_THREAD_POINTER:
7031 expand_builtin_set_thread_pointer (exp);
7032 return const0_rtx;
7033
7034 case BUILT_IN_CILK_DETACH:
7035 expand_builtin_cilk_detach (exp);
7036 return const0_rtx;
7037
7038 case BUILT_IN_CILK_POP_FRAME:
7039 expand_builtin_cilk_pop_frame (exp);
7040 return const0_rtx;
7041
7042 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7043 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7044 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7045 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7046 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7047 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7048 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7049 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7050 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7051 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7052 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7053 /* We allow user CHKP builtins if Pointer Bounds
7054 Checker is off. */
7055 if (!chkp_function_instrumented_p (current_function_decl))
7056 {
7057 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7058 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7059 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7060 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7061 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7062 return expand_normal (CALL_EXPR_ARG (exp, 0));
7063 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7064 return expand_normal (size_zero_node);
7065 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7066 return expand_normal (size_int (-1));
7067 else
7068 return const0_rtx;
7069 }
7070 /* FALLTHROUGH */
7071
7072 case BUILT_IN_CHKP_BNDMK:
7073 case BUILT_IN_CHKP_BNDSTX:
7074 case BUILT_IN_CHKP_BNDCL:
7075 case BUILT_IN_CHKP_BNDCU:
7076 case BUILT_IN_CHKP_BNDLDX:
7077 case BUILT_IN_CHKP_BNDRET:
7078 case BUILT_IN_CHKP_INTERSECT:
7079 case BUILT_IN_CHKP_NARROW:
7080 case BUILT_IN_CHKP_EXTRACT_LOWER:
7081 case BUILT_IN_CHKP_EXTRACT_UPPER:
7082 /* Software implementation of Pointer Bounds Checker is NYI.
7083 Target support is required. */
7084 error ("Your target platform does not support -fcheck-pointer-bounds");
7085 break;
7086
7087 case BUILT_IN_ACC_ON_DEVICE:
7088 target = expand_builtin_acc_on_device (exp, target);
7089 if (target)
7090 return target;
7091 break;
7092
7093 default: /* just do library call, if unknown builtin */
7094 break;
7095 }
7096
7097 /* The switch statement above can drop through to cause the function
7098 to be called normally. */
7099 return expand_call (exp, target, ignore);
7100 }
7101
7102 /* Similar to expand_builtin but is used for instrumented calls. */
7103
7104 rtx
7105 expand_builtin_with_bounds (tree exp, rtx target,
7106 rtx subtarget ATTRIBUTE_UNUSED,
7107 machine_mode mode, int ignore)
7108 {
7109 tree fndecl = get_callee_fndecl (exp);
7110 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7111
7112 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7113
7114 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7115 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7116
7117 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7118 && fcode < END_CHKP_BUILTINS);
7119
7120 switch (fcode)
7121 {
7122 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7123 target = expand_builtin_memcpy_with_bounds (exp, target);
7124 if (target)
7125 return target;
7126 break;
7127
7128 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7129 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7130 if (target)
7131 return target;
7132 break;
7133
7134 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7135 target = expand_builtin_memset_with_bounds (exp, target, mode);
7136 if (target)
7137 return target;
7138 break;
7139
7140 default:
7141 break;
7142 }
7143
7144 /* The switch statement above can drop through to cause the function
7145 to be called normally. */
7146 return expand_call (exp, target, ignore);
7147 }
7148
7149 /* Determine whether a tree node represents a call to a built-in
7150 function. If the tree T is a call to a built-in function with
7151 the right number of arguments of the appropriate types, return
7152 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7153 Otherwise the return value is END_BUILTINS. */
7154
7155 enum built_in_function
7156 builtin_mathfn_code (const_tree t)
7157 {
7158 const_tree fndecl, arg, parmlist;
7159 const_tree argtype, parmtype;
7160 const_call_expr_arg_iterator iter;
7161
7162 if (TREE_CODE (t) != CALL_EXPR
7163 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7164 return END_BUILTINS;
7165
7166 fndecl = get_callee_fndecl (t);
7167 if (fndecl == NULL_TREE
7168 || TREE_CODE (fndecl) != FUNCTION_DECL
7169 || ! DECL_BUILT_IN (fndecl)
7170 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7171 return END_BUILTINS;
7172
7173 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7174 init_const_call_expr_arg_iterator (t, &iter);
7175 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7176 {
7177 /* If a function doesn't take a variable number of arguments,
7178 the last element in the list will have type `void'. */
7179 parmtype = TREE_VALUE (parmlist);
7180 if (VOID_TYPE_P (parmtype))
7181 {
7182 if (more_const_call_expr_args_p (&iter))
7183 return END_BUILTINS;
7184 return DECL_FUNCTION_CODE (fndecl);
7185 }
7186
7187 if (! more_const_call_expr_args_p (&iter))
7188 return END_BUILTINS;
7189
7190 arg = next_const_call_expr_arg (&iter);
7191 argtype = TREE_TYPE (arg);
7192
7193 if (SCALAR_FLOAT_TYPE_P (parmtype))
7194 {
7195 if (! SCALAR_FLOAT_TYPE_P (argtype))
7196 return END_BUILTINS;
7197 }
7198 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7199 {
7200 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7201 return END_BUILTINS;
7202 }
7203 else if (POINTER_TYPE_P (parmtype))
7204 {
7205 if (! POINTER_TYPE_P (argtype))
7206 return END_BUILTINS;
7207 }
7208 else if (INTEGRAL_TYPE_P (parmtype))
7209 {
7210 if (! INTEGRAL_TYPE_P (argtype))
7211 return END_BUILTINS;
7212 }
7213 else
7214 return END_BUILTINS;
7215 }
7216
7217 /* Variable-length argument list. */
7218 return DECL_FUNCTION_CODE (fndecl);
7219 }
7220
7221 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7222 evaluate to a constant. */
7223
7224 static tree
7225 fold_builtin_constant_p (tree arg)
7226 {
7227 /* We return 1 for a numeric type that's known to be a constant
7228 value at compile-time or for an aggregate type that's a
7229 literal constant. */
7230 STRIP_NOPS (arg);
7231
7232 /* If we know this is a constant, emit the constant of one. */
7233 if (CONSTANT_CLASS_P (arg)
7234 || (TREE_CODE (arg) == CONSTRUCTOR
7235 && TREE_CONSTANT (arg)))
7236 return integer_one_node;
7237 if (TREE_CODE (arg) == ADDR_EXPR)
7238 {
7239 tree op = TREE_OPERAND (arg, 0);
7240 if (TREE_CODE (op) == STRING_CST
7241 || (TREE_CODE (op) == ARRAY_REF
7242 && integer_zerop (TREE_OPERAND (op, 1))
7243 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7244 return integer_one_node;
7245 }
7246
7247 /* If this expression has side effects, show we don't know it to be a
7248 constant. Likewise if it's a pointer or aggregate type since in
7249 those case we only want literals, since those are only optimized
7250 when generating RTL, not later.
7251 And finally, if we are compiling an initializer, not code, we
7252 need to return a definite result now; there's not going to be any
7253 more optimization done. */
7254 if (TREE_SIDE_EFFECTS (arg)
7255 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7256 || POINTER_TYPE_P (TREE_TYPE (arg))
7257 || cfun == 0
7258 || folding_initializer
7259 || force_folding_builtin_constant_p)
7260 return integer_zero_node;
7261
7262 return NULL_TREE;
7263 }
7264
7265 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7266 return it as a truthvalue. */
7267
7268 static tree
7269 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7270 tree predictor)
7271 {
7272 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7273
7274 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7275 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7276 ret_type = TREE_TYPE (TREE_TYPE (fn));
7277 pred_type = TREE_VALUE (arg_types);
7278 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7279
7280 pred = fold_convert_loc (loc, pred_type, pred);
7281 expected = fold_convert_loc (loc, expected_type, expected);
7282 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7283 predictor);
7284
7285 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7286 build_int_cst (ret_type, 0));
7287 }
7288
7289 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7290 NULL_TREE if no simplification is possible. */
7291
7292 tree
7293 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7294 {
7295 tree inner, fndecl, inner_arg0;
7296 enum tree_code code;
7297
7298 /* Distribute the expected value over short-circuiting operators.
7299 See through the cast from truthvalue_type_node to long. */
7300 inner_arg0 = arg0;
7301 while (CONVERT_EXPR_P (inner_arg0)
7302 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7303 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7304 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7305
7306 /* If this is a builtin_expect within a builtin_expect keep the
7307 inner one. See through a comparison against a constant. It
7308 might have been added to create a thruthvalue. */
7309 inner = inner_arg0;
7310
7311 if (COMPARISON_CLASS_P (inner)
7312 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7313 inner = TREE_OPERAND (inner, 0);
7314
7315 if (TREE_CODE (inner) == CALL_EXPR
7316 && (fndecl = get_callee_fndecl (inner))
7317 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7318 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7319 return arg0;
7320
7321 inner = inner_arg0;
7322 code = TREE_CODE (inner);
7323 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7324 {
7325 tree op0 = TREE_OPERAND (inner, 0);
7326 tree op1 = TREE_OPERAND (inner, 1);
7327
7328 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7329 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7330 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7331
7332 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7333 }
7334
7335 /* If the argument isn't invariant then there's nothing else we can do. */
7336 if (!TREE_CONSTANT (inner_arg0))
7337 return NULL_TREE;
7338
7339 /* If we expect that a comparison against the argument will fold to
7340 a constant return the constant. In practice, this means a true
7341 constant or the address of a non-weak symbol. */
7342 inner = inner_arg0;
7343 STRIP_NOPS (inner);
7344 if (TREE_CODE (inner) == ADDR_EXPR)
7345 {
7346 do
7347 {
7348 inner = TREE_OPERAND (inner, 0);
7349 }
7350 while (TREE_CODE (inner) == COMPONENT_REF
7351 || TREE_CODE (inner) == ARRAY_REF);
7352 if ((TREE_CODE (inner) == VAR_DECL
7353 || TREE_CODE (inner) == FUNCTION_DECL)
7354 && DECL_WEAK (inner))
7355 return NULL_TREE;
7356 }
7357
7358 /* Otherwise, ARG0 already has the proper type for the return value. */
7359 return arg0;
7360 }
7361
7362 /* Fold a call to __builtin_classify_type with argument ARG. */
7363
7364 static tree
7365 fold_builtin_classify_type (tree arg)
7366 {
7367 if (arg == 0)
7368 return build_int_cst (integer_type_node, no_type_class);
7369
7370 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7371 }
7372
7373 /* Fold a call to __builtin_strlen with argument ARG. */
7374
7375 static tree
7376 fold_builtin_strlen (location_t loc, tree type, tree arg)
7377 {
7378 if (!validate_arg (arg, POINTER_TYPE))
7379 return NULL_TREE;
7380 else
7381 {
7382 tree len = c_strlen (arg, 0);
7383
7384 if (len)
7385 return fold_convert_loc (loc, type, len);
7386
7387 return NULL_TREE;
7388 }
7389 }
7390
7391 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7392
7393 static tree
7394 fold_builtin_inf (location_t loc, tree type, int warn)
7395 {
7396 REAL_VALUE_TYPE real;
7397
7398 /* __builtin_inff is intended to be usable to define INFINITY on all
7399 targets. If an infinity is not available, INFINITY expands "to a
7400 positive constant of type float that overflows at translation
7401 time", footnote "In this case, using INFINITY will violate the
7402 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7403 Thus we pedwarn to ensure this constraint violation is
7404 diagnosed. */
7405 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7406 pedwarn (loc, 0, "target format does not support infinity");
7407
7408 real_inf (&real);
7409 return build_real (type, real);
7410 }
7411
7412 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7413
7414 static tree
7415 fold_builtin_nan (tree arg, tree type, int quiet)
7416 {
7417 REAL_VALUE_TYPE real;
7418 const char *str;
7419
7420 if (!validate_arg (arg, POINTER_TYPE))
7421 return NULL_TREE;
7422 str = c_getstr (arg);
7423 if (!str)
7424 return NULL_TREE;
7425
7426 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7427 return NULL_TREE;
7428
7429 return build_real (type, real);
7430 }
7431
7432 /* Return true if the floating point expression T has an integer value.
7433 We also allow +Inf, -Inf and NaN to be considered integer values. */
7434
7435 static bool
7436 integer_valued_real_p (tree t)
7437 {
7438 switch (TREE_CODE (t))
7439 {
7440 case FLOAT_EXPR:
7441 return true;
7442
7443 case ABS_EXPR:
7444 case SAVE_EXPR:
7445 return integer_valued_real_p (TREE_OPERAND (t, 0));
7446
7447 case COMPOUND_EXPR:
7448 case MODIFY_EXPR:
7449 case BIND_EXPR:
7450 return integer_valued_real_p (TREE_OPERAND (t, 1));
7451
7452 case PLUS_EXPR:
7453 case MINUS_EXPR:
7454 case MULT_EXPR:
7455 case MIN_EXPR:
7456 case MAX_EXPR:
7457 return integer_valued_real_p (TREE_OPERAND (t, 0))
7458 && integer_valued_real_p (TREE_OPERAND (t, 1));
7459
7460 case COND_EXPR:
7461 return integer_valued_real_p (TREE_OPERAND (t, 1))
7462 && integer_valued_real_p (TREE_OPERAND (t, 2));
7463
7464 case REAL_CST:
7465 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7466
7467 CASE_CONVERT:
7468 {
7469 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7470 if (TREE_CODE (type) == INTEGER_TYPE)
7471 return true;
7472 if (TREE_CODE (type) == REAL_TYPE)
7473 return integer_valued_real_p (TREE_OPERAND (t, 0));
7474 break;
7475 }
7476
7477 case CALL_EXPR:
7478 switch (builtin_mathfn_code (t))
7479 {
7480 CASE_FLT_FN (BUILT_IN_CEIL):
7481 CASE_FLT_FN (BUILT_IN_FLOOR):
7482 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7483 CASE_FLT_FN (BUILT_IN_RINT):
7484 CASE_FLT_FN (BUILT_IN_ROUND):
7485 CASE_FLT_FN (BUILT_IN_TRUNC):
7486 return true;
7487
7488 CASE_FLT_FN (BUILT_IN_FMIN):
7489 CASE_FLT_FN (BUILT_IN_FMAX):
7490 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7491 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7492
7493 default:
7494 break;
7495 }
7496 break;
7497
7498 default:
7499 break;
7500 }
7501 return false;
7502 }
7503
7504 /* FNDECL is assumed to be a builtin where truncation can be propagated
7505 across (for instance floor((double)f) == (double)floorf (f).
7506 Do the transformation for a call with argument ARG. */
7507
7508 static tree
7509 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7510 {
7511 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7512
7513 if (!validate_arg (arg, REAL_TYPE))
7514 return NULL_TREE;
7515
7516 /* Integer rounding functions are idempotent. */
7517 if (fcode == builtin_mathfn_code (arg))
7518 return arg;
7519
7520 /* If argument is already integer valued, and we don't need to worry
7521 about setting errno, there's no need to perform rounding. */
7522 if (! flag_errno_math && integer_valued_real_p (arg))
7523 return arg;
7524
7525 if (optimize)
7526 {
7527 tree arg0 = strip_float_extensions (arg);
7528 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7529 tree newtype = TREE_TYPE (arg0);
7530 tree decl;
7531
7532 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7533 && (decl = mathfn_built_in (newtype, fcode)))
7534 return fold_convert_loc (loc, ftype,
7535 build_call_expr_loc (loc, decl, 1,
7536 fold_convert_loc (loc,
7537 newtype,
7538 arg0)));
7539 }
7540 return NULL_TREE;
7541 }
7542
7543 /* FNDECL is assumed to be builtin which can narrow the FP type of
7544 the argument, for instance lround((double)f) -> lroundf (f).
7545 Do the transformation for a call with argument ARG. */
7546
7547 static tree
7548 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7549 {
7550 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7551
7552 if (!validate_arg (arg, REAL_TYPE))
7553 return NULL_TREE;
7554
7555 /* If argument is already integer valued, and we don't need to worry
7556 about setting errno, there's no need to perform rounding. */
7557 if (! flag_errno_math && integer_valued_real_p (arg))
7558 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7559 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7560
7561 if (optimize)
7562 {
7563 tree ftype = TREE_TYPE (arg);
7564 tree arg0 = strip_float_extensions (arg);
7565 tree newtype = TREE_TYPE (arg0);
7566 tree decl;
7567
7568 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7569 && (decl = mathfn_built_in (newtype, fcode)))
7570 return build_call_expr_loc (loc, decl, 1,
7571 fold_convert_loc (loc, newtype, arg0));
7572 }
7573
7574 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7575 sizeof (int) == sizeof (long). */
7576 if (TYPE_PRECISION (integer_type_node)
7577 == TYPE_PRECISION (long_integer_type_node))
7578 {
7579 tree newfn = NULL_TREE;
7580 switch (fcode)
7581 {
7582 CASE_FLT_FN (BUILT_IN_ICEIL):
7583 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7584 break;
7585
7586 CASE_FLT_FN (BUILT_IN_IFLOOR):
7587 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7588 break;
7589
7590 CASE_FLT_FN (BUILT_IN_IROUND):
7591 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7592 break;
7593
7594 CASE_FLT_FN (BUILT_IN_IRINT):
7595 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7596 break;
7597
7598 default:
7599 break;
7600 }
7601
7602 if (newfn)
7603 {
7604 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7605 return fold_convert_loc (loc,
7606 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7607 }
7608 }
7609
7610 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7611 sizeof (long long) == sizeof (long). */
7612 if (TYPE_PRECISION (long_long_integer_type_node)
7613 == TYPE_PRECISION (long_integer_type_node))
7614 {
7615 tree newfn = NULL_TREE;
7616 switch (fcode)
7617 {
7618 CASE_FLT_FN (BUILT_IN_LLCEIL):
7619 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7620 break;
7621
7622 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7623 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7624 break;
7625
7626 CASE_FLT_FN (BUILT_IN_LLROUND):
7627 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7628 break;
7629
7630 CASE_FLT_FN (BUILT_IN_LLRINT):
7631 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7632 break;
7633
7634 default:
7635 break;
7636 }
7637
7638 if (newfn)
7639 {
7640 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7641 return fold_convert_loc (loc,
7642 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7643 }
7644 }
7645
7646 return NULL_TREE;
7647 }
7648
7649 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7650 return type. Return NULL_TREE if no simplification can be made. */
7651
7652 static tree
7653 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7654 {
7655 tree res;
7656
7657 if (!validate_arg (arg, COMPLEX_TYPE)
7658 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7659 return NULL_TREE;
7660
7661 /* Calculate the result when the argument is a constant. */
7662 if (TREE_CODE (arg) == COMPLEX_CST
7663 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7664 type, mpfr_hypot)))
7665 return res;
7666
7667 if (TREE_CODE (arg) == COMPLEX_EXPR)
7668 {
7669 tree real = TREE_OPERAND (arg, 0);
7670 tree imag = TREE_OPERAND (arg, 1);
7671
7672 /* If either part is zero, cabs is fabs of the other. */
7673 if (real_zerop (real))
7674 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7675 if (real_zerop (imag))
7676 return fold_build1_loc (loc, ABS_EXPR, type, real);
7677
7678 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7679 if (flag_unsafe_math_optimizations
7680 && operand_equal_p (real, imag, OEP_PURE_SAME))
7681 {
7682 const REAL_VALUE_TYPE sqrt2_trunc
7683 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7684 STRIP_NOPS (real);
7685 return fold_build2_loc (loc, MULT_EXPR, type,
7686 fold_build1_loc (loc, ABS_EXPR, type, real),
7687 build_real (type, sqrt2_trunc));
7688 }
7689 }
7690
7691 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7692 if (TREE_CODE (arg) == NEGATE_EXPR
7693 || TREE_CODE (arg) == CONJ_EXPR)
7694 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7695
7696 /* Don't do this when optimizing for size. */
7697 if (flag_unsafe_math_optimizations
7698 && optimize && optimize_function_for_speed_p (cfun))
7699 {
7700 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7701
7702 if (sqrtfn != NULL_TREE)
7703 {
7704 tree rpart, ipart, result;
7705
7706 arg = builtin_save_expr (arg);
7707
7708 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7709 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7710
7711 rpart = builtin_save_expr (rpart);
7712 ipart = builtin_save_expr (ipart);
7713
7714 result = fold_build2_loc (loc, PLUS_EXPR, type,
7715 fold_build2_loc (loc, MULT_EXPR, type,
7716 rpart, rpart),
7717 fold_build2_loc (loc, MULT_EXPR, type,
7718 ipart, ipart));
7719
7720 return build_call_expr_loc (loc, sqrtfn, 1, result);
7721 }
7722 }
7723
7724 return NULL_TREE;
7725 }
7726
7727 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7728 complex tree type of the result. If NEG is true, the imaginary
7729 zero is negative. */
7730
7731 static tree
7732 build_complex_cproj (tree type, bool neg)
7733 {
7734 REAL_VALUE_TYPE rinf, rzero = dconst0;
7735
7736 real_inf (&rinf);
7737 rzero.sign = neg;
7738 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7739 build_real (TREE_TYPE (type), rzero));
7740 }
7741
7742 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7743 return type. Return NULL_TREE if no simplification can be made. */
7744
7745 static tree
7746 fold_builtin_cproj (location_t loc, tree arg, tree type)
7747 {
7748 if (!validate_arg (arg, COMPLEX_TYPE)
7749 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7750 return NULL_TREE;
7751
7752 /* If there are no infinities, return arg. */
7753 if (! HONOR_INFINITIES (type))
7754 return non_lvalue_loc (loc, arg);
7755
7756 /* Calculate the result when the argument is a constant. */
7757 if (TREE_CODE (arg) == COMPLEX_CST)
7758 {
7759 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7760 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7761
7762 if (real_isinf (real) || real_isinf (imag))
7763 return build_complex_cproj (type, imag->sign);
7764 else
7765 return arg;
7766 }
7767 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7768 {
7769 tree real = TREE_OPERAND (arg, 0);
7770 tree imag = TREE_OPERAND (arg, 1);
7771
7772 STRIP_NOPS (real);
7773 STRIP_NOPS (imag);
7774
7775 /* If the real part is inf and the imag part is known to be
7776 nonnegative, return (inf + 0i). Remember side-effects are
7777 possible in the imag part. */
7778 if (TREE_CODE (real) == REAL_CST
7779 && real_isinf (TREE_REAL_CST_PTR (real))
7780 && tree_expr_nonnegative_p (imag))
7781 return omit_one_operand_loc (loc, type,
7782 build_complex_cproj (type, false),
7783 arg);
7784
7785 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7786 Remember side-effects are possible in the real part. */
7787 if (TREE_CODE (imag) == REAL_CST
7788 && real_isinf (TREE_REAL_CST_PTR (imag)))
7789 return
7790 omit_one_operand_loc (loc, type,
7791 build_complex_cproj (type, TREE_REAL_CST_PTR
7792 (imag)->sign), arg);
7793 }
7794
7795 return NULL_TREE;
7796 }
7797
7798 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7799 Return NULL_TREE if no simplification can be made. */
7800
7801 static tree
7802 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7803 {
7804
7805 enum built_in_function fcode;
7806 tree res;
7807
7808 if (!validate_arg (arg, REAL_TYPE))
7809 return NULL_TREE;
7810
7811 /* Calculate the result when the argument is a constant. */
7812 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7813 return res;
7814
7815 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7816 fcode = builtin_mathfn_code (arg);
7817 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7818 {
7819 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7820 arg = fold_build2_loc (loc, MULT_EXPR, type,
7821 CALL_EXPR_ARG (arg, 0),
7822 build_real (type, dconsthalf));
7823 return build_call_expr_loc (loc, expfn, 1, arg);
7824 }
7825
7826 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7827 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7828 {
7829 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7830
7831 if (powfn)
7832 {
7833 tree arg0 = CALL_EXPR_ARG (arg, 0);
7834 tree tree_root;
7835 /* The inner root was either sqrt or cbrt. */
7836 /* This was a conditional expression but it triggered a bug
7837 in Sun C 5.5. */
7838 REAL_VALUE_TYPE dconstroot;
7839 if (BUILTIN_SQRT_P (fcode))
7840 dconstroot = dconsthalf;
7841 else
7842 dconstroot = dconst_third ();
7843
7844 /* Adjust for the outer root. */
7845 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7846 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7847 tree_root = build_real (type, dconstroot);
7848 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7849 }
7850 }
7851
7852 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7853 if (flag_unsafe_math_optimizations
7854 && (fcode == BUILT_IN_POW
7855 || fcode == BUILT_IN_POWF
7856 || fcode == BUILT_IN_POWL))
7857 {
7858 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7859 tree arg0 = CALL_EXPR_ARG (arg, 0);
7860 tree arg1 = CALL_EXPR_ARG (arg, 1);
7861 tree narg1;
7862 if (!tree_expr_nonnegative_p (arg0))
7863 arg0 = build1 (ABS_EXPR, type, arg0);
7864 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7865 build_real (type, dconsthalf));
7866 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7867 }
7868
7869 return NULL_TREE;
7870 }
7871
7872 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7873 Return NULL_TREE if no simplification can be made. */
7874
7875 static tree
7876 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7877 {
7878 const enum built_in_function fcode = builtin_mathfn_code (arg);
7879 tree res;
7880
7881 if (!validate_arg (arg, REAL_TYPE))
7882 return NULL_TREE;
7883
7884 /* Calculate the result when the argument is a constant. */
7885 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7886 return res;
7887
7888 if (flag_unsafe_math_optimizations)
7889 {
7890 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7891 if (BUILTIN_EXPONENT_P (fcode))
7892 {
7893 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7894 const REAL_VALUE_TYPE third_trunc =
7895 real_value_truncate (TYPE_MODE (type), dconst_third ());
7896 arg = fold_build2_loc (loc, MULT_EXPR, type,
7897 CALL_EXPR_ARG (arg, 0),
7898 build_real (type, third_trunc));
7899 return build_call_expr_loc (loc, expfn, 1, arg);
7900 }
7901
7902 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7903 if (BUILTIN_SQRT_P (fcode))
7904 {
7905 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7906
7907 if (powfn)
7908 {
7909 tree arg0 = CALL_EXPR_ARG (arg, 0);
7910 tree tree_root;
7911 REAL_VALUE_TYPE dconstroot = dconst_third ();
7912
7913 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7914 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7915 tree_root = build_real (type, dconstroot);
7916 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7917 }
7918 }
7919
7920 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7921 if (BUILTIN_CBRT_P (fcode))
7922 {
7923 tree arg0 = CALL_EXPR_ARG (arg, 0);
7924 if (tree_expr_nonnegative_p (arg0))
7925 {
7926 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7927
7928 if (powfn)
7929 {
7930 tree tree_root;
7931 REAL_VALUE_TYPE dconstroot;
7932
7933 real_arithmetic (&dconstroot, MULT_EXPR,
7934 dconst_third_ptr (), dconst_third_ptr ());
7935 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7936 tree_root = build_real (type, dconstroot);
7937 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7938 }
7939 }
7940 }
7941
7942 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7943 if (fcode == BUILT_IN_POW
7944 || fcode == BUILT_IN_POWF
7945 || fcode == BUILT_IN_POWL)
7946 {
7947 tree arg00 = CALL_EXPR_ARG (arg, 0);
7948 tree arg01 = CALL_EXPR_ARG (arg, 1);
7949 if (tree_expr_nonnegative_p (arg00))
7950 {
7951 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7952 const REAL_VALUE_TYPE dconstroot
7953 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7954 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7955 build_real (type, dconstroot));
7956 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7957 }
7958 }
7959 }
7960 return NULL_TREE;
7961 }
7962
7963 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7964 TYPE is the type of the return value. Return NULL_TREE if no
7965 simplification can be made. */
7966
7967 static tree
7968 fold_builtin_cos (location_t loc,
7969 tree arg, tree type, tree fndecl)
7970 {
7971 tree res, narg;
7972
7973 if (!validate_arg (arg, REAL_TYPE))
7974 return NULL_TREE;
7975
7976 /* Calculate the result when the argument is a constant. */
7977 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7978 return res;
7979
7980 /* Optimize cos(-x) into cos (x). */
7981 if ((narg = fold_strip_sign_ops (arg)))
7982 return build_call_expr_loc (loc, fndecl, 1, narg);
7983
7984 return NULL_TREE;
7985 }
7986
7987 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7988 Return NULL_TREE if no simplification can be made. */
7989
7990 static tree
7991 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7992 {
7993 if (validate_arg (arg, REAL_TYPE))
7994 {
7995 tree res, narg;
7996
7997 /* Calculate the result when the argument is a constant. */
7998 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7999 return res;
8000
8001 /* Optimize cosh(-x) into cosh (x). */
8002 if ((narg = fold_strip_sign_ops (arg)))
8003 return build_call_expr_loc (loc, fndecl, 1, narg);
8004 }
8005
8006 return NULL_TREE;
8007 }
8008
8009 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
8010 argument ARG. TYPE is the type of the return value. Return
8011 NULL_TREE if no simplification can be made. */
8012
8013 static tree
8014 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
8015 bool hyper)
8016 {
8017 if (validate_arg (arg, COMPLEX_TYPE)
8018 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8019 {
8020 tree tmp;
8021
8022 /* Calculate the result when the argument is a constant. */
8023 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
8024 return tmp;
8025
8026 /* Optimize fn(-x) into fn(x). */
8027 if ((tmp = fold_strip_sign_ops (arg)))
8028 return build_call_expr_loc (loc, fndecl, 1, tmp);
8029 }
8030
8031 return NULL_TREE;
8032 }
8033
8034 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
8035 Return NULL_TREE if no simplification can be made. */
8036
8037 static tree
8038 fold_builtin_tan (tree arg, tree type)
8039 {
8040 enum built_in_function fcode;
8041 tree res;
8042
8043 if (!validate_arg (arg, REAL_TYPE))
8044 return NULL_TREE;
8045
8046 /* Calculate the result when the argument is a constant. */
8047 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
8048 return res;
8049
8050 /* Optimize tan(atan(x)) = x. */
8051 fcode = builtin_mathfn_code (arg);
8052 if (flag_unsafe_math_optimizations
8053 && (fcode == BUILT_IN_ATAN
8054 || fcode == BUILT_IN_ATANF
8055 || fcode == BUILT_IN_ATANL))
8056 return CALL_EXPR_ARG (arg, 0);
8057
8058 return NULL_TREE;
8059 }
8060
8061 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8062 NULL_TREE if no simplification can be made. */
8063
8064 static tree
8065 fold_builtin_sincos (location_t loc,
8066 tree arg0, tree arg1, tree arg2)
8067 {
8068 tree type;
8069 tree res, fn, call;
8070
8071 if (!validate_arg (arg0, REAL_TYPE)
8072 || !validate_arg (arg1, POINTER_TYPE)
8073 || !validate_arg (arg2, POINTER_TYPE))
8074 return NULL_TREE;
8075
8076 type = TREE_TYPE (arg0);
8077
8078 /* Calculate the result when the argument is a constant. */
8079 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
8080 return res;
8081
8082 /* Canonicalize sincos to cexpi. */
8083 if (!targetm.libc_has_function (function_c99_math_complex))
8084 return NULL_TREE;
8085 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
8086 if (!fn)
8087 return NULL_TREE;
8088
8089 call = build_call_expr_loc (loc, fn, 1, arg0);
8090 call = builtin_save_expr (call);
8091
8092 return build2 (COMPOUND_EXPR, void_type_node,
8093 build2 (MODIFY_EXPR, void_type_node,
8094 build_fold_indirect_ref_loc (loc, arg1),
8095 build1 (IMAGPART_EXPR, type, call)),
8096 build2 (MODIFY_EXPR, void_type_node,
8097 build_fold_indirect_ref_loc (loc, arg2),
8098 build1 (REALPART_EXPR, type, call)));
8099 }
8100
8101 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
8102 NULL_TREE if no simplification can be made. */
8103
8104 static tree
8105 fold_builtin_cexp (location_t loc, tree arg0, tree type)
8106 {
8107 tree rtype;
8108 tree realp, imagp, ifn;
8109 tree res;
8110
8111 if (!validate_arg (arg0, COMPLEX_TYPE)
8112 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
8113 return NULL_TREE;
8114
8115 /* Calculate the result when the argument is a constant. */
8116 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8117 return res;
8118
8119 rtype = TREE_TYPE (TREE_TYPE (arg0));
8120
8121 /* In case we can figure out the real part of arg0 and it is constant zero
8122 fold to cexpi. */
8123 if (!targetm.libc_has_function (function_c99_math_complex))
8124 return NULL_TREE;
8125 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8126 if (!ifn)
8127 return NULL_TREE;
8128
8129 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
8130 && real_zerop (realp))
8131 {
8132 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
8133 return build_call_expr_loc (loc, ifn, 1, narg);
8134 }
8135
8136 /* In case we can easily decompose real and imaginary parts split cexp
8137 to exp (r) * cexpi (i). */
8138 if (flag_unsafe_math_optimizations
8139 && realp)
8140 {
8141 tree rfn, rcall, icall;
8142
8143 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8144 if (!rfn)
8145 return NULL_TREE;
8146
8147 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
8148 if (!imagp)
8149 return NULL_TREE;
8150
8151 icall = build_call_expr_loc (loc, ifn, 1, imagp);
8152 icall = builtin_save_expr (icall);
8153 rcall = build_call_expr_loc (loc, rfn, 1, realp);
8154 rcall = builtin_save_expr (rcall);
8155 return fold_build2_loc (loc, COMPLEX_EXPR, type,
8156 fold_build2_loc (loc, MULT_EXPR, rtype,
8157 rcall,
8158 fold_build1_loc (loc, REALPART_EXPR,
8159 rtype, icall)),
8160 fold_build2_loc (loc, MULT_EXPR, rtype,
8161 rcall,
8162 fold_build1_loc (loc, IMAGPART_EXPR,
8163 rtype, icall)));
8164 }
8165
8166 return NULL_TREE;
8167 }
8168
8169 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8170 Return NULL_TREE if no simplification can be made. */
8171
8172 static tree
8173 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
8174 {
8175 if (!validate_arg (arg, REAL_TYPE))
8176 return NULL_TREE;
8177
8178 /* Optimize trunc of constant value. */
8179 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8180 {
8181 REAL_VALUE_TYPE r, x;
8182 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8183
8184 x = TREE_REAL_CST (arg);
8185 real_trunc (&r, TYPE_MODE (type), &x);
8186 return build_real (type, r);
8187 }
8188
8189 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8190 }
8191
8192 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8193 Return NULL_TREE if no simplification can be made. */
8194
8195 static tree
8196 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
8197 {
8198 if (!validate_arg (arg, REAL_TYPE))
8199 return NULL_TREE;
8200
8201 /* Optimize floor of constant value. */
8202 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8203 {
8204 REAL_VALUE_TYPE x;
8205
8206 x = TREE_REAL_CST (arg);
8207 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8208 {
8209 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8210 REAL_VALUE_TYPE r;
8211
8212 real_floor (&r, TYPE_MODE (type), &x);
8213 return build_real (type, r);
8214 }
8215 }
8216
8217 /* Fold floor (x) where x is nonnegative to trunc (x). */
8218 if (tree_expr_nonnegative_p (arg))
8219 {
8220 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8221 if (truncfn)
8222 return build_call_expr_loc (loc, truncfn, 1, arg);
8223 }
8224
8225 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8226 }
8227
8228 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8229 Return NULL_TREE if no simplification can be made. */
8230
8231 static tree
8232 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
8233 {
8234 if (!validate_arg (arg, REAL_TYPE))
8235 return NULL_TREE;
8236
8237 /* Optimize ceil of constant value. */
8238 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8239 {
8240 REAL_VALUE_TYPE x;
8241
8242 x = TREE_REAL_CST (arg);
8243 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8244 {
8245 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8246 REAL_VALUE_TYPE r;
8247
8248 real_ceil (&r, TYPE_MODE (type), &x);
8249 return build_real (type, r);
8250 }
8251 }
8252
8253 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8254 }
8255
8256 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8257 Return NULL_TREE if no simplification can be made. */
8258
8259 static tree
8260 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8261 {
8262 if (!validate_arg (arg, REAL_TYPE))
8263 return NULL_TREE;
8264
8265 /* Optimize round of constant value. */
8266 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8267 {
8268 REAL_VALUE_TYPE x;
8269
8270 x = TREE_REAL_CST (arg);
8271 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8272 {
8273 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8274 REAL_VALUE_TYPE r;
8275
8276 real_round (&r, TYPE_MODE (type), &x);
8277 return build_real (type, r);
8278 }
8279 }
8280
8281 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8282 }
8283
8284 /* Fold function call to builtin lround, lroundf or lroundl (or the
8285 corresponding long long versions) and other rounding functions. ARG
8286 is the argument to the call. Return NULL_TREE if no simplification
8287 can be made. */
8288
8289 static tree
8290 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8291 {
8292 if (!validate_arg (arg, REAL_TYPE))
8293 return NULL_TREE;
8294
8295 /* Optimize lround of constant value. */
8296 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8297 {
8298 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8299
8300 if (real_isfinite (&x))
8301 {
8302 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8303 tree ftype = TREE_TYPE (arg);
8304 REAL_VALUE_TYPE r;
8305 bool fail = false;
8306
8307 switch (DECL_FUNCTION_CODE (fndecl))
8308 {
8309 CASE_FLT_FN (BUILT_IN_IFLOOR):
8310 CASE_FLT_FN (BUILT_IN_LFLOOR):
8311 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8312 real_floor (&r, TYPE_MODE (ftype), &x);
8313 break;
8314
8315 CASE_FLT_FN (BUILT_IN_ICEIL):
8316 CASE_FLT_FN (BUILT_IN_LCEIL):
8317 CASE_FLT_FN (BUILT_IN_LLCEIL):
8318 real_ceil (&r, TYPE_MODE (ftype), &x);
8319 break;
8320
8321 CASE_FLT_FN (BUILT_IN_IROUND):
8322 CASE_FLT_FN (BUILT_IN_LROUND):
8323 CASE_FLT_FN (BUILT_IN_LLROUND):
8324 real_round (&r, TYPE_MODE (ftype), &x);
8325 break;
8326
8327 default:
8328 gcc_unreachable ();
8329 }
8330
8331 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
8332 if (!fail)
8333 return wide_int_to_tree (itype, val);
8334 }
8335 }
8336
8337 switch (DECL_FUNCTION_CODE (fndecl))
8338 {
8339 CASE_FLT_FN (BUILT_IN_LFLOOR):
8340 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8341 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8342 if (tree_expr_nonnegative_p (arg))
8343 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8344 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8345 break;
8346 default:;
8347 }
8348
8349 return fold_fixed_mathfn (loc, fndecl, arg);
8350 }
8351
8352 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8353 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8354 the argument to the call. Return NULL_TREE if no simplification can
8355 be made. */
8356
8357 static tree
8358 fold_builtin_bitop (tree fndecl, tree arg)
8359 {
8360 if (!validate_arg (arg, INTEGER_TYPE))
8361 return NULL_TREE;
8362
8363 /* Optimize for constant argument. */
8364 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8365 {
8366 tree type = TREE_TYPE (arg);
8367 int result;
8368
8369 switch (DECL_FUNCTION_CODE (fndecl))
8370 {
8371 CASE_INT_FN (BUILT_IN_FFS):
8372 result = wi::ffs (arg);
8373 break;
8374
8375 CASE_INT_FN (BUILT_IN_CLZ):
8376 if (wi::ne_p (arg, 0))
8377 result = wi::clz (arg);
8378 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8379 result = TYPE_PRECISION (type);
8380 break;
8381
8382 CASE_INT_FN (BUILT_IN_CTZ):
8383 if (wi::ne_p (arg, 0))
8384 result = wi::ctz (arg);
8385 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8386 result = TYPE_PRECISION (type);
8387 break;
8388
8389 CASE_INT_FN (BUILT_IN_CLRSB):
8390 result = wi::clrsb (arg);
8391 break;
8392
8393 CASE_INT_FN (BUILT_IN_POPCOUNT):
8394 result = wi::popcount (arg);
8395 break;
8396
8397 CASE_INT_FN (BUILT_IN_PARITY):
8398 result = wi::parity (arg);
8399 break;
8400
8401 default:
8402 gcc_unreachable ();
8403 }
8404
8405 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8406 }
8407
8408 return NULL_TREE;
8409 }
8410
8411 /* Fold function call to builtin_bswap and the short, long and long long
8412 variants. Return NULL_TREE if no simplification can be made. */
8413 static tree
8414 fold_builtin_bswap (tree fndecl, tree arg)
8415 {
8416 if (! validate_arg (arg, INTEGER_TYPE))
8417 return NULL_TREE;
8418
8419 /* Optimize constant value. */
8420 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8421 {
8422 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8423
8424 switch (DECL_FUNCTION_CODE (fndecl))
8425 {
8426 case BUILT_IN_BSWAP16:
8427 case BUILT_IN_BSWAP32:
8428 case BUILT_IN_BSWAP64:
8429 {
8430 signop sgn = TYPE_SIGN (type);
8431 tree result =
8432 wide_int_to_tree (type,
8433 wide_int::from (arg, TYPE_PRECISION (type),
8434 sgn).bswap ());
8435 return result;
8436 }
8437 default:
8438 gcc_unreachable ();
8439 }
8440 }
8441
8442 return NULL_TREE;
8443 }
8444
8445 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8446 NULL_TREE if no simplification can be made. */
8447
8448 static tree
8449 fold_builtin_hypot (location_t loc, tree fndecl,
8450 tree arg0, tree arg1, tree type)
8451 {
8452 tree res, narg0, narg1;
8453
8454 if (!validate_arg (arg0, REAL_TYPE)
8455 || !validate_arg (arg1, REAL_TYPE))
8456 return NULL_TREE;
8457
8458 /* Calculate the result when the argument is a constant. */
8459 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8460 return res;
8461
8462 /* If either argument to hypot has a negate or abs, strip that off.
8463 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8464 narg0 = fold_strip_sign_ops (arg0);
8465 narg1 = fold_strip_sign_ops (arg1);
8466 if (narg0 || narg1)
8467 {
8468 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8469 narg1 ? narg1 : arg1);
8470 }
8471
8472 /* If either argument is zero, hypot is fabs of the other. */
8473 if (real_zerop (arg0))
8474 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8475 else if (real_zerop (arg1))
8476 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8477
8478 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8479 if (flag_unsafe_math_optimizations
8480 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8481 {
8482 const REAL_VALUE_TYPE sqrt2_trunc
8483 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8484 return fold_build2_loc (loc, MULT_EXPR, type,
8485 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8486 build_real (type, sqrt2_trunc));
8487 }
8488
8489 return NULL_TREE;
8490 }
8491
8492
8493 /* Fold a builtin function call to pow, powf, or powl. Return
8494 NULL_TREE if no simplification can be made. */
8495 static tree
8496 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8497 {
8498 tree res;
8499
8500 if (!validate_arg (arg0, REAL_TYPE)
8501 || !validate_arg (arg1, REAL_TYPE))
8502 return NULL_TREE;
8503
8504 /* Calculate the result when the argument is a constant. */
8505 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8506 return res;
8507
8508 /* Optimize pow(1.0,y) = 1.0. */
8509 if (real_onep (arg0))
8510 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8511
8512 if (TREE_CODE (arg1) == REAL_CST
8513 && !TREE_OVERFLOW (arg1))
8514 {
8515 REAL_VALUE_TYPE cint;
8516 REAL_VALUE_TYPE c;
8517 HOST_WIDE_INT n;
8518
8519 c = TREE_REAL_CST (arg1);
8520
8521 /* Optimize pow(x,0.0) = 1.0. */
8522 if (REAL_VALUES_EQUAL (c, dconst0))
8523 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8524 arg0);
8525
8526 /* Optimize pow(x,1.0) = x. */
8527 if (REAL_VALUES_EQUAL (c, dconst1))
8528 return arg0;
8529
8530 /* Optimize pow(x,-1.0) = 1.0/x. */
8531 if (REAL_VALUES_EQUAL (c, dconstm1))
8532 return fold_build2_loc (loc, RDIV_EXPR, type,
8533 build_real (type, dconst1), arg0);
8534
8535 /* Optimize pow(x,0.5) = sqrt(x). */
8536 if (flag_unsafe_math_optimizations
8537 && REAL_VALUES_EQUAL (c, dconsthalf))
8538 {
8539 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8540
8541 if (sqrtfn != NULL_TREE)
8542 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8543 }
8544
8545 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8546 if (flag_unsafe_math_optimizations)
8547 {
8548 const REAL_VALUE_TYPE dconstroot
8549 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8550
8551 if (REAL_VALUES_EQUAL (c, dconstroot))
8552 {
8553 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8554 if (cbrtfn != NULL_TREE)
8555 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8556 }
8557 }
8558
8559 /* Check for an integer exponent. */
8560 n = real_to_integer (&c);
8561 real_from_integer (&cint, VOIDmode, n, SIGNED);
8562 if (real_identical (&c, &cint))
8563 {
8564 /* Attempt to evaluate pow at compile-time, unless this should
8565 raise an exception. */
8566 if (TREE_CODE (arg0) == REAL_CST
8567 && !TREE_OVERFLOW (arg0)
8568 && (n > 0
8569 || (!flag_trapping_math && !flag_errno_math)
8570 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8571 {
8572 REAL_VALUE_TYPE x;
8573 bool inexact;
8574
8575 x = TREE_REAL_CST (arg0);
8576 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8577 if (flag_unsafe_math_optimizations || !inexact)
8578 return build_real (type, x);
8579 }
8580
8581 /* Strip sign ops from even integer powers. */
8582 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8583 {
8584 tree narg0 = fold_strip_sign_ops (arg0);
8585 if (narg0)
8586 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8587 }
8588 }
8589 }
8590
8591 if (flag_unsafe_math_optimizations)
8592 {
8593 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8594
8595 /* Optimize pow(expN(x),y) = expN(x*y). */
8596 if (BUILTIN_EXPONENT_P (fcode))
8597 {
8598 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8599 tree arg = CALL_EXPR_ARG (arg0, 0);
8600 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8601 return build_call_expr_loc (loc, expfn, 1, arg);
8602 }
8603
8604 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8605 if (BUILTIN_SQRT_P (fcode))
8606 {
8607 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8608 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8609 build_real (type, dconsthalf));
8610 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8611 }
8612
8613 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8614 if (BUILTIN_CBRT_P (fcode))
8615 {
8616 tree arg = CALL_EXPR_ARG (arg0, 0);
8617 if (tree_expr_nonnegative_p (arg))
8618 {
8619 const REAL_VALUE_TYPE dconstroot
8620 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8621 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8622 build_real (type, dconstroot));
8623 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8624 }
8625 }
8626
8627 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8628 if (fcode == BUILT_IN_POW
8629 || fcode == BUILT_IN_POWF
8630 || fcode == BUILT_IN_POWL)
8631 {
8632 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8633 if (tree_expr_nonnegative_p (arg00))
8634 {
8635 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8636 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8637 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8638 }
8639 }
8640 }
8641
8642 return NULL_TREE;
8643 }
8644
8645 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8646 Return NULL_TREE if no simplification can be made. */
8647 static tree
8648 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8649 tree arg0, tree arg1, tree type)
8650 {
8651 if (!validate_arg (arg0, REAL_TYPE)
8652 || !validate_arg (arg1, INTEGER_TYPE))
8653 return NULL_TREE;
8654
8655 /* Optimize pow(1.0,y) = 1.0. */
8656 if (real_onep (arg0))
8657 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8658
8659 if (tree_fits_shwi_p (arg1))
8660 {
8661 HOST_WIDE_INT c = tree_to_shwi (arg1);
8662
8663 /* Evaluate powi at compile-time. */
8664 if (TREE_CODE (arg0) == REAL_CST
8665 && !TREE_OVERFLOW (arg0))
8666 {
8667 REAL_VALUE_TYPE x;
8668 x = TREE_REAL_CST (arg0);
8669 real_powi (&x, TYPE_MODE (type), &x, c);
8670 return build_real (type, x);
8671 }
8672
8673 /* Optimize pow(x,0) = 1.0. */
8674 if (c == 0)
8675 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8676 arg0);
8677
8678 /* Optimize pow(x,1) = x. */
8679 if (c == 1)
8680 return arg0;
8681
8682 /* Optimize pow(x,-1) = 1.0/x. */
8683 if (c == -1)
8684 return fold_build2_loc (loc, RDIV_EXPR, type,
8685 build_real (type, dconst1), arg0);
8686 }
8687
8688 return NULL_TREE;
8689 }
8690
8691 /* A subroutine of fold_builtin to fold the various exponent
8692 functions. Return NULL_TREE if no simplification can be made.
8693 FUNC is the corresponding MPFR exponent function. */
8694
8695 static tree
8696 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8697 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8698 {
8699 if (validate_arg (arg, REAL_TYPE))
8700 {
8701 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8702 tree res;
8703
8704 /* Calculate the result when the argument is a constant. */
8705 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8706 return res;
8707
8708 /* Optimize expN(logN(x)) = x. */
8709 if (flag_unsafe_math_optimizations)
8710 {
8711 const enum built_in_function fcode = builtin_mathfn_code (arg);
8712
8713 if ((func == mpfr_exp
8714 && (fcode == BUILT_IN_LOG
8715 || fcode == BUILT_IN_LOGF
8716 || fcode == BUILT_IN_LOGL))
8717 || (func == mpfr_exp2
8718 && (fcode == BUILT_IN_LOG2
8719 || fcode == BUILT_IN_LOG2F
8720 || fcode == BUILT_IN_LOG2L))
8721 || (func == mpfr_exp10
8722 && (fcode == BUILT_IN_LOG10
8723 || fcode == BUILT_IN_LOG10F
8724 || fcode == BUILT_IN_LOG10L)))
8725 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8726 }
8727 }
8728
8729 return NULL_TREE;
8730 }
8731
8732 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8733 arguments to the call, and TYPE is its return type.
8734 Return NULL_TREE if no simplification can be made. */
8735
8736 static tree
8737 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8738 {
8739 if (!validate_arg (arg1, POINTER_TYPE)
8740 || !validate_arg (arg2, INTEGER_TYPE)
8741 || !validate_arg (len, INTEGER_TYPE))
8742 return NULL_TREE;
8743 else
8744 {
8745 const char *p1;
8746
8747 if (TREE_CODE (arg2) != INTEGER_CST
8748 || !tree_fits_uhwi_p (len))
8749 return NULL_TREE;
8750
8751 p1 = c_getstr (arg1);
8752 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8753 {
8754 char c;
8755 const char *r;
8756 tree tem;
8757
8758 if (target_char_cast (arg2, &c))
8759 return NULL_TREE;
8760
8761 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
8762
8763 if (r == NULL)
8764 return build_int_cst (TREE_TYPE (arg1), 0);
8765
8766 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8767 return fold_convert_loc (loc, type, tem);
8768 }
8769 return NULL_TREE;
8770 }
8771 }
8772
8773 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8774 Return NULL_TREE if no simplification can be made. */
8775
8776 static tree
8777 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8778 {
8779 const char *p1, *p2;
8780
8781 if (!validate_arg (arg1, POINTER_TYPE)
8782 || !validate_arg (arg2, POINTER_TYPE)
8783 || !validate_arg (len, INTEGER_TYPE))
8784 return NULL_TREE;
8785
8786 /* If the LEN parameter is zero, return zero. */
8787 if (integer_zerop (len))
8788 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8789 arg1, arg2);
8790
8791 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8792 if (operand_equal_p (arg1, arg2, 0))
8793 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8794
8795 p1 = c_getstr (arg1);
8796 p2 = c_getstr (arg2);
8797
8798 /* If all arguments are constant, and the value of len is not greater
8799 than the lengths of arg1 and arg2, evaluate at compile-time. */
8800 if (tree_fits_uhwi_p (len) && p1 && p2
8801 && compare_tree_int (len, strlen (p1) + 1) <= 0
8802 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8803 {
8804 const int r = memcmp (p1, p2, tree_to_uhwi (len));
8805
8806 if (r > 0)
8807 return integer_one_node;
8808 else if (r < 0)
8809 return integer_minus_one_node;
8810 else
8811 return integer_zero_node;
8812 }
8813
8814 /* If len parameter is one, return an expression corresponding to
8815 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8816 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8817 {
8818 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8819 tree cst_uchar_ptr_node
8820 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8821
8822 tree ind1
8823 = fold_convert_loc (loc, integer_type_node,
8824 build1 (INDIRECT_REF, cst_uchar_node,
8825 fold_convert_loc (loc,
8826 cst_uchar_ptr_node,
8827 arg1)));
8828 tree ind2
8829 = fold_convert_loc (loc, integer_type_node,
8830 build1 (INDIRECT_REF, cst_uchar_node,
8831 fold_convert_loc (loc,
8832 cst_uchar_ptr_node,
8833 arg2)));
8834 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8835 }
8836
8837 return NULL_TREE;
8838 }
8839
8840 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8841 Return NULL_TREE if no simplification can be made. */
8842
8843 static tree
8844 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8845 {
8846 const char *p1, *p2;
8847
8848 if (!validate_arg (arg1, POINTER_TYPE)
8849 || !validate_arg (arg2, POINTER_TYPE))
8850 return NULL_TREE;
8851
8852 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8853 if (operand_equal_p (arg1, arg2, 0))
8854 return integer_zero_node;
8855
8856 p1 = c_getstr (arg1);
8857 p2 = c_getstr (arg2);
8858
8859 if (p1 && p2)
8860 {
8861 const int i = strcmp (p1, p2);
8862 if (i < 0)
8863 return integer_minus_one_node;
8864 else if (i > 0)
8865 return integer_one_node;
8866 else
8867 return integer_zero_node;
8868 }
8869
8870 /* If the second arg is "", return *(const unsigned char*)arg1. */
8871 if (p2 && *p2 == '\0')
8872 {
8873 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8874 tree cst_uchar_ptr_node
8875 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8876
8877 return fold_convert_loc (loc, integer_type_node,
8878 build1 (INDIRECT_REF, cst_uchar_node,
8879 fold_convert_loc (loc,
8880 cst_uchar_ptr_node,
8881 arg1)));
8882 }
8883
8884 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8885 if (p1 && *p1 == '\0')
8886 {
8887 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8888 tree cst_uchar_ptr_node
8889 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8890
8891 tree temp
8892 = fold_convert_loc (loc, integer_type_node,
8893 build1 (INDIRECT_REF, cst_uchar_node,
8894 fold_convert_loc (loc,
8895 cst_uchar_ptr_node,
8896 arg2)));
8897 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8898 }
8899
8900 return NULL_TREE;
8901 }
8902
8903 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8904 Return NULL_TREE if no simplification can be made. */
8905
8906 static tree
8907 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8908 {
8909 const char *p1, *p2;
8910
8911 if (!validate_arg (arg1, POINTER_TYPE)
8912 || !validate_arg (arg2, POINTER_TYPE)
8913 || !validate_arg (len, INTEGER_TYPE))
8914 return NULL_TREE;
8915
8916 /* If the LEN parameter is zero, return zero. */
8917 if (integer_zerop (len))
8918 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8919 arg1, arg2);
8920
8921 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8922 if (operand_equal_p (arg1, arg2, 0))
8923 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8924
8925 p1 = c_getstr (arg1);
8926 p2 = c_getstr (arg2);
8927
8928 if (tree_fits_uhwi_p (len) && p1 && p2)
8929 {
8930 const int i = strncmp (p1, p2, tree_to_uhwi (len));
8931 if (i > 0)
8932 return integer_one_node;
8933 else if (i < 0)
8934 return integer_minus_one_node;
8935 else
8936 return integer_zero_node;
8937 }
8938
8939 /* If the second arg is "", and the length is greater than zero,
8940 return *(const unsigned char*)arg1. */
8941 if (p2 && *p2 == '\0'
8942 && TREE_CODE (len) == INTEGER_CST
8943 && tree_int_cst_sgn (len) == 1)
8944 {
8945 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8946 tree cst_uchar_ptr_node
8947 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8948
8949 return fold_convert_loc (loc, integer_type_node,
8950 build1 (INDIRECT_REF, cst_uchar_node,
8951 fold_convert_loc (loc,
8952 cst_uchar_ptr_node,
8953 arg1)));
8954 }
8955
8956 /* If the first arg is "", and the length is greater than zero,
8957 return -*(const unsigned char*)arg2. */
8958 if (p1 && *p1 == '\0'
8959 && TREE_CODE (len) == INTEGER_CST
8960 && tree_int_cst_sgn (len) == 1)
8961 {
8962 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8963 tree cst_uchar_ptr_node
8964 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8965
8966 tree temp = fold_convert_loc (loc, integer_type_node,
8967 build1 (INDIRECT_REF, cst_uchar_node,
8968 fold_convert_loc (loc,
8969 cst_uchar_ptr_node,
8970 arg2)));
8971 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8972 }
8973
8974 /* If len parameter is one, return an expression corresponding to
8975 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8976 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8977 {
8978 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8979 tree cst_uchar_ptr_node
8980 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8981
8982 tree ind1 = fold_convert_loc (loc, integer_type_node,
8983 build1 (INDIRECT_REF, cst_uchar_node,
8984 fold_convert_loc (loc,
8985 cst_uchar_ptr_node,
8986 arg1)));
8987 tree ind2 = fold_convert_loc (loc, integer_type_node,
8988 build1 (INDIRECT_REF, cst_uchar_node,
8989 fold_convert_loc (loc,
8990 cst_uchar_ptr_node,
8991 arg2)));
8992 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8993 }
8994
8995 return NULL_TREE;
8996 }
8997
8998 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8999 ARG. Return NULL_TREE if no simplification can be made. */
9000
9001 static tree
9002 fold_builtin_signbit (location_t loc, tree arg, tree type)
9003 {
9004 if (!validate_arg (arg, REAL_TYPE))
9005 return NULL_TREE;
9006
9007 /* If ARG is a compile-time constant, determine the result. */
9008 if (TREE_CODE (arg) == REAL_CST
9009 && !TREE_OVERFLOW (arg))
9010 {
9011 REAL_VALUE_TYPE c;
9012
9013 c = TREE_REAL_CST (arg);
9014 return (REAL_VALUE_NEGATIVE (c)
9015 ? build_one_cst (type)
9016 : build_zero_cst (type));
9017 }
9018
9019 /* If ARG is non-negative, the result is always zero. */
9020 if (tree_expr_nonnegative_p (arg))
9021 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9022
9023 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9024 if (!HONOR_SIGNED_ZEROS (arg))
9025 return fold_convert (type,
9026 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9027 build_real (TREE_TYPE (arg), dconst0)));
9028
9029 return NULL_TREE;
9030 }
9031
9032 /* Fold function call to builtin copysign, copysignf or copysignl with
9033 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9034 be made. */
9035
9036 static tree
9037 fold_builtin_copysign (location_t loc, tree fndecl,
9038 tree arg1, tree arg2, tree type)
9039 {
9040 tree tem;
9041
9042 if (!validate_arg (arg1, REAL_TYPE)
9043 || !validate_arg (arg2, REAL_TYPE))
9044 return NULL_TREE;
9045
9046 /* copysign(X,X) is X. */
9047 if (operand_equal_p (arg1, arg2, 0))
9048 return fold_convert_loc (loc, type, arg1);
9049
9050 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9051 if (TREE_CODE (arg1) == REAL_CST
9052 && TREE_CODE (arg2) == REAL_CST
9053 && !TREE_OVERFLOW (arg1)
9054 && !TREE_OVERFLOW (arg2))
9055 {
9056 REAL_VALUE_TYPE c1, c2;
9057
9058 c1 = TREE_REAL_CST (arg1);
9059 c2 = TREE_REAL_CST (arg2);
9060 /* c1.sign := c2.sign. */
9061 real_copysign (&c1, &c2);
9062 return build_real (type, c1);
9063 }
9064
9065 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9066 Remember to evaluate Y for side-effects. */
9067 if (tree_expr_nonnegative_p (arg2))
9068 return omit_one_operand_loc (loc, type,
9069 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9070 arg2);
9071
9072 /* Strip sign changing operations for the first argument. */
9073 tem = fold_strip_sign_ops (arg1);
9074 if (tem)
9075 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9076
9077 return NULL_TREE;
9078 }
9079
9080 /* Fold a call to builtin isascii with argument ARG. */
9081
9082 static tree
9083 fold_builtin_isascii (location_t loc, tree arg)
9084 {
9085 if (!validate_arg (arg, INTEGER_TYPE))
9086 return NULL_TREE;
9087 else
9088 {
9089 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9090 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9091 build_int_cst (integer_type_node,
9092 ~ (unsigned HOST_WIDE_INT) 0x7f));
9093 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9094 arg, integer_zero_node);
9095 }
9096 }
9097
9098 /* Fold a call to builtin toascii with argument ARG. */
9099
9100 static tree
9101 fold_builtin_toascii (location_t loc, tree arg)
9102 {
9103 if (!validate_arg (arg, INTEGER_TYPE))
9104 return NULL_TREE;
9105
9106 /* Transform toascii(c) -> (c & 0x7f). */
9107 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9108 build_int_cst (integer_type_node, 0x7f));
9109 }
9110
9111 /* Fold a call to builtin isdigit with argument ARG. */
9112
9113 static tree
9114 fold_builtin_isdigit (location_t loc, tree arg)
9115 {
9116 if (!validate_arg (arg, INTEGER_TYPE))
9117 return NULL_TREE;
9118 else
9119 {
9120 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9121 /* According to the C standard, isdigit is unaffected by locale.
9122 However, it definitely is affected by the target character set. */
9123 unsigned HOST_WIDE_INT target_digit0
9124 = lang_hooks.to_target_charset ('0');
9125
9126 if (target_digit0 == 0)
9127 return NULL_TREE;
9128
9129 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9130 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9131 build_int_cst (unsigned_type_node, target_digit0));
9132 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9133 build_int_cst (unsigned_type_node, 9));
9134 }
9135 }
9136
9137 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9138
9139 static tree
9140 fold_builtin_fabs (location_t loc, tree arg, tree type)
9141 {
9142 if (!validate_arg (arg, REAL_TYPE))
9143 return NULL_TREE;
9144
9145 arg = fold_convert_loc (loc, type, arg);
9146 if (TREE_CODE (arg) == REAL_CST)
9147 return fold_abs_const (arg, type);
9148 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9149 }
9150
9151 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9152
9153 static tree
9154 fold_builtin_abs (location_t loc, tree arg, tree type)
9155 {
9156 if (!validate_arg (arg, INTEGER_TYPE))
9157 return NULL_TREE;
9158
9159 arg = fold_convert_loc (loc, type, arg);
9160 if (TREE_CODE (arg) == INTEGER_CST)
9161 return fold_abs_const (arg, type);
9162 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9163 }
9164
9165 /* Fold a fma operation with arguments ARG[012]. */
9166
9167 tree
9168 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9169 tree type, tree arg0, tree arg1, tree arg2)
9170 {
9171 if (TREE_CODE (arg0) == REAL_CST
9172 && TREE_CODE (arg1) == REAL_CST
9173 && TREE_CODE (arg2) == REAL_CST)
9174 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9175
9176 return NULL_TREE;
9177 }
9178
9179 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9180
9181 static tree
9182 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9183 {
9184 if (validate_arg (arg0, REAL_TYPE)
9185 && validate_arg (arg1, REAL_TYPE)
9186 && validate_arg (arg2, REAL_TYPE))
9187 {
9188 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9189 if (tem)
9190 return tem;
9191
9192 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9193 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9194 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9195 }
9196 return NULL_TREE;
9197 }
9198
9199 /* Fold a call to builtin fmin or fmax. */
9200
9201 static tree
9202 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9203 tree type, bool max)
9204 {
9205 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9206 {
9207 /* Calculate the result when the argument is a constant. */
9208 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9209
9210 if (res)
9211 return res;
9212
9213 /* If either argument is NaN, return the other one. Avoid the
9214 transformation if we get (and honor) a signalling NaN. Using
9215 omit_one_operand() ensures we create a non-lvalue. */
9216 if (TREE_CODE (arg0) == REAL_CST
9217 && real_isnan (&TREE_REAL_CST (arg0))
9218 && (! HONOR_SNANS (arg0)
9219 || ! TREE_REAL_CST (arg0).signalling))
9220 return omit_one_operand_loc (loc, type, arg1, arg0);
9221 if (TREE_CODE (arg1) == REAL_CST
9222 && real_isnan (&TREE_REAL_CST (arg1))
9223 && (! HONOR_SNANS (arg1)
9224 || ! TREE_REAL_CST (arg1).signalling))
9225 return omit_one_operand_loc (loc, type, arg0, arg1);
9226
9227 /* Transform fmin/fmax(x,x) -> x. */
9228 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9229 return omit_one_operand_loc (loc, type, arg0, arg1);
9230
9231 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9232 functions to return the numeric arg if the other one is NaN.
9233 These tree codes don't honor that, so only transform if
9234 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9235 handled, so we don't have to worry about it either. */
9236 if (flag_finite_math_only)
9237 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9238 fold_convert_loc (loc, type, arg0),
9239 fold_convert_loc (loc, type, arg1));
9240 }
9241 return NULL_TREE;
9242 }
9243
9244 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9245
9246 static tree
9247 fold_builtin_carg (location_t loc, tree arg, tree type)
9248 {
9249 if (validate_arg (arg, COMPLEX_TYPE)
9250 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9251 {
9252 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9253
9254 if (atan2_fn)
9255 {
9256 tree new_arg = builtin_save_expr (arg);
9257 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9258 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9259 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9260 }
9261 }
9262
9263 return NULL_TREE;
9264 }
9265
9266 /* Fold a call to builtin logb/ilogb. */
9267
9268 static tree
9269 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9270 {
9271 if (! validate_arg (arg, REAL_TYPE))
9272 return NULL_TREE;
9273
9274 STRIP_NOPS (arg);
9275
9276 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9277 {
9278 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9279
9280 switch (value->cl)
9281 {
9282 case rvc_nan:
9283 case rvc_inf:
9284 /* If arg is Inf or NaN and we're logb, return it. */
9285 if (TREE_CODE (rettype) == REAL_TYPE)
9286 {
9287 /* For logb(-Inf) we have to return +Inf. */
9288 if (real_isinf (value) && real_isneg (value))
9289 {
9290 REAL_VALUE_TYPE tem;
9291 real_inf (&tem);
9292 return build_real (rettype, tem);
9293 }
9294 return fold_convert_loc (loc, rettype, arg);
9295 }
9296 /* Fall through... */
9297 case rvc_zero:
9298 /* Zero may set errno and/or raise an exception for logb, also
9299 for ilogb we don't know FP_ILOGB0. */
9300 return NULL_TREE;
9301 case rvc_normal:
9302 /* For normal numbers, proceed iff radix == 2. In GCC,
9303 normalized significands are in the range [0.5, 1.0). We
9304 want the exponent as if they were [1.0, 2.0) so get the
9305 exponent and subtract 1. */
9306 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9307 return fold_convert_loc (loc, rettype,
9308 build_int_cst (integer_type_node,
9309 REAL_EXP (value)-1));
9310 break;
9311 }
9312 }
9313
9314 return NULL_TREE;
9315 }
9316
9317 /* Fold a call to builtin significand, if radix == 2. */
9318
9319 static tree
9320 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9321 {
9322 if (! validate_arg (arg, REAL_TYPE))
9323 return NULL_TREE;
9324
9325 STRIP_NOPS (arg);
9326
9327 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9328 {
9329 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9330
9331 switch (value->cl)
9332 {
9333 case rvc_zero:
9334 case rvc_nan:
9335 case rvc_inf:
9336 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9337 return fold_convert_loc (loc, rettype, arg);
9338 case rvc_normal:
9339 /* For normal numbers, proceed iff radix == 2. */
9340 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9341 {
9342 REAL_VALUE_TYPE result = *value;
9343 /* In GCC, normalized significands are in the range [0.5,
9344 1.0). We want them to be [1.0, 2.0) so set the
9345 exponent to 1. */
9346 SET_REAL_EXP (&result, 1);
9347 return build_real (rettype, result);
9348 }
9349 break;
9350 }
9351 }
9352
9353 return NULL_TREE;
9354 }
9355
9356 /* Fold a call to builtin frexp, we can assume the base is 2. */
9357
9358 static tree
9359 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9360 {
9361 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9362 return NULL_TREE;
9363
9364 STRIP_NOPS (arg0);
9365
9366 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9367 return NULL_TREE;
9368
9369 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9370
9371 /* Proceed if a valid pointer type was passed in. */
9372 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9373 {
9374 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9375 tree frac, exp;
9376
9377 switch (value->cl)
9378 {
9379 case rvc_zero:
9380 /* For +-0, return (*exp = 0, +-0). */
9381 exp = integer_zero_node;
9382 frac = arg0;
9383 break;
9384 case rvc_nan:
9385 case rvc_inf:
9386 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9387 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9388 case rvc_normal:
9389 {
9390 /* Since the frexp function always expects base 2, and in
9391 GCC normalized significands are already in the range
9392 [0.5, 1.0), we have exactly what frexp wants. */
9393 REAL_VALUE_TYPE frac_rvt = *value;
9394 SET_REAL_EXP (&frac_rvt, 0);
9395 frac = build_real (rettype, frac_rvt);
9396 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9397 }
9398 break;
9399 default:
9400 gcc_unreachable ();
9401 }
9402
9403 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9404 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9405 TREE_SIDE_EFFECTS (arg1) = 1;
9406 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9407 }
9408
9409 return NULL_TREE;
9410 }
9411
9412 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9413 then we can assume the base is two. If it's false, then we have to
9414 check the mode of the TYPE parameter in certain cases. */
9415
9416 static tree
9417 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9418 tree type, bool ldexp)
9419 {
9420 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9421 {
9422 STRIP_NOPS (arg0);
9423 STRIP_NOPS (arg1);
9424
9425 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9426 if (real_zerop (arg0) || integer_zerop (arg1)
9427 || (TREE_CODE (arg0) == REAL_CST
9428 && !real_isfinite (&TREE_REAL_CST (arg0))))
9429 return omit_one_operand_loc (loc, type, arg0, arg1);
9430
9431 /* If both arguments are constant, then try to evaluate it. */
9432 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9433 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9434 && tree_fits_shwi_p (arg1))
9435 {
9436 /* Bound the maximum adjustment to twice the range of the
9437 mode's valid exponents. Use abs to ensure the range is
9438 positive as a sanity check. */
9439 const long max_exp_adj = 2 *
9440 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9441 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9442
9443 /* Get the user-requested adjustment. */
9444 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9445
9446 /* The requested adjustment must be inside this range. This
9447 is a preliminary cap to avoid things like overflow, we
9448 may still fail to compute the result for other reasons. */
9449 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9450 {
9451 REAL_VALUE_TYPE initial_result;
9452
9453 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9454
9455 /* Ensure we didn't overflow. */
9456 if (! real_isinf (&initial_result))
9457 {
9458 const REAL_VALUE_TYPE trunc_result
9459 = real_value_truncate (TYPE_MODE (type), initial_result);
9460
9461 /* Only proceed if the target mode can hold the
9462 resulting value. */
9463 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9464 return build_real (type, trunc_result);
9465 }
9466 }
9467 }
9468 }
9469
9470 return NULL_TREE;
9471 }
9472
9473 /* Fold a call to builtin modf. */
9474
9475 static tree
9476 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9477 {
9478 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9479 return NULL_TREE;
9480
9481 STRIP_NOPS (arg0);
9482
9483 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9484 return NULL_TREE;
9485
9486 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9487
9488 /* Proceed if a valid pointer type was passed in. */
9489 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9490 {
9491 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9492 REAL_VALUE_TYPE trunc, frac;
9493
9494 switch (value->cl)
9495 {
9496 case rvc_nan:
9497 case rvc_zero:
9498 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9499 trunc = frac = *value;
9500 break;
9501 case rvc_inf:
9502 /* For +-Inf, return (*arg1 = arg0, +-0). */
9503 frac = dconst0;
9504 frac.sign = value->sign;
9505 trunc = *value;
9506 break;
9507 case rvc_normal:
9508 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9509 real_trunc (&trunc, VOIDmode, value);
9510 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9511 /* If the original number was negative and already
9512 integral, then the fractional part is -0.0. */
9513 if (value->sign && frac.cl == rvc_zero)
9514 frac.sign = value->sign;
9515 break;
9516 }
9517
9518 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9519 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9520 build_real (rettype, trunc));
9521 TREE_SIDE_EFFECTS (arg1) = 1;
9522 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9523 build_real (rettype, frac));
9524 }
9525
9526 return NULL_TREE;
9527 }
9528
9529 /* Given a location LOC, an interclass builtin function decl FNDECL
9530 and its single argument ARG, return an folded expression computing
9531 the same, or NULL_TREE if we either couldn't or didn't want to fold
9532 (the latter happen if there's an RTL instruction available). */
9533
9534 static tree
9535 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9536 {
9537 machine_mode mode;
9538
9539 if (!validate_arg (arg, REAL_TYPE))
9540 return NULL_TREE;
9541
9542 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9543 return NULL_TREE;
9544
9545 mode = TYPE_MODE (TREE_TYPE (arg));
9546
9547 /* If there is no optab, try generic code. */
9548 switch (DECL_FUNCTION_CODE (fndecl))
9549 {
9550 tree result;
9551
9552 CASE_FLT_FN (BUILT_IN_ISINF):
9553 {
9554 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9555 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9556 tree const type = TREE_TYPE (arg);
9557 REAL_VALUE_TYPE r;
9558 char buf[128];
9559
9560 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9561 real_from_string (&r, buf);
9562 result = build_call_expr (isgr_fn, 2,
9563 fold_build1_loc (loc, ABS_EXPR, type, arg),
9564 build_real (type, r));
9565 return result;
9566 }
9567 CASE_FLT_FN (BUILT_IN_FINITE):
9568 case BUILT_IN_ISFINITE:
9569 {
9570 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9571 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9572 tree const type = TREE_TYPE (arg);
9573 REAL_VALUE_TYPE r;
9574 char buf[128];
9575
9576 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9577 real_from_string (&r, buf);
9578 result = build_call_expr (isle_fn, 2,
9579 fold_build1_loc (loc, ABS_EXPR, type, arg),
9580 build_real (type, r));
9581 /*result = fold_build2_loc (loc, UNGT_EXPR,
9582 TREE_TYPE (TREE_TYPE (fndecl)),
9583 fold_build1_loc (loc, ABS_EXPR, type, arg),
9584 build_real (type, r));
9585 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9586 TREE_TYPE (TREE_TYPE (fndecl)),
9587 result);*/
9588 return result;
9589 }
9590 case BUILT_IN_ISNORMAL:
9591 {
9592 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9593 islessequal(fabs(x),DBL_MAX). */
9594 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9595 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9596 tree const type = TREE_TYPE (arg);
9597 REAL_VALUE_TYPE rmax, rmin;
9598 char buf[128];
9599
9600 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9601 real_from_string (&rmax, buf);
9602 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9603 real_from_string (&rmin, buf);
9604 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9605 result = build_call_expr (isle_fn, 2, arg,
9606 build_real (type, rmax));
9607 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9608 build_call_expr (isge_fn, 2, arg,
9609 build_real (type, rmin)));
9610 return result;
9611 }
9612 default:
9613 break;
9614 }
9615
9616 return NULL_TREE;
9617 }
9618
9619 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9620 ARG is the argument for the call. */
9621
9622 static tree
9623 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9624 {
9625 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9626 REAL_VALUE_TYPE r;
9627
9628 if (!validate_arg (arg, REAL_TYPE))
9629 return NULL_TREE;
9630
9631 switch (builtin_index)
9632 {
9633 case BUILT_IN_ISINF:
9634 if (!HONOR_INFINITIES (arg))
9635 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9636
9637 if (TREE_CODE (arg) == REAL_CST)
9638 {
9639 r = TREE_REAL_CST (arg);
9640 if (real_isinf (&r))
9641 return real_compare (GT_EXPR, &r, &dconst0)
9642 ? integer_one_node : integer_minus_one_node;
9643 else
9644 return integer_zero_node;
9645 }
9646
9647 return NULL_TREE;
9648
9649 case BUILT_IN_ISINF_SIGN:
9650 {
9651 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9652 /* In a boolean context, GCC will fold the inner COND_EXPR to
9653 1. So e.g. "if (isinf_sign(x))" would be folded to just
9654 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9655 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9656 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9657 tree tmp = NULL_TREE;
9658
9659 arg = builtin_save_expr (arg);
9660
9661 if (signbit_fn && isinf_fn)
9662 {
9663 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9664 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9665
9666 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9667 signbit_call, integer_zero_node);
9668 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9669 isinf_call, integer_zero_node);
9670
9671 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9672 integer_minus_one_node, integer_one_node);
9673 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9674 isinf_call, tmp,
9675 integer_zero_node);
9676 }
9677
9678 return tmp;
9679 }
9680
9681 case BUILT_IN_ISFINITE:
9682 if (!HONOR_NANS (arg)
9683 && !HONOR_INFINITIES (arg))
9684 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9685
9686 if (TREE_CODE (arg) == REAL_CST)
9687 {
9688 r = TREE_REAL_CST (arg);
9689 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9690 }
9691
9692 return NULL_TREE;
9693
9694 case BUILT_IN_ISNAN:
9695 if (!HONOR_NANS (arg))
9696 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9697
9698 if (TREE_CODE (arg) == REAL_CST)
9699 {
9700 r = TREE_REAL_CST (arg);
9701 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9702 }
9703
9704 arg = builtin_save_expr (arg);
9705 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9706
9707 default:
9708 gcc_unreachable ();
9709 }
9710 }
9711
9712 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9713 This builtin will generate code to return the appropriate floating
9714 point classification depending on the value of the floating point
9715 number passed in. The possible return values must be supplied as
9716 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9717 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9718 one floating point argument which is "type generic". */
9719
9720 static tree
9721 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9722 {
9723 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9724 arg, type, res, tmp;
9725 machine_mode mode;
9726 REAL_VALUE_TYPE r;
9727 char buf[128];
9728
9729 /* Verify the required arguments in the original call. */
9730 if (nargs != 6
9731 || !validate_arg (args[0], INTEGER_TYPE)
9732 || !validate_arg (args[1], INTEGER_TYPE)
9733 || !validate_arg (args[2], INTEGER_TYPE)
9734 || !validate_arg (args[3], INTEGER_TYPE)
9735 || !validate_arg (args[4], INTEGER_TYPE)
9736 || !validate_arg (args[5], REAL_TYPE))
9737 return NULL_TREE;
9738
9739 fp_nan = args[0];
9740 fp_infinite = args[1];
9741 fp_normal = args[2];
9742 fp_subnormal = args[3];
9743 fp_zero = args[4];
9744 arg = args[5];
9745 type = TREE_TYPE (arg);
9746 mode = TYPE_MODE (type);
9747 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9748
9749 /* fpclassify(x) ->
9750 isnan(x) ? FP_NAN :
9751 (fabs(x) == Inf ? FP_INFINITE :
9752 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9753 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9754
9755 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9756 build_real (type, dconst0));
9757 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9758 tmp, fp_zero, fp_subnormal);
9759
9760 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9761 real_from_string (&r, buf);
9762 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9763 arg, build_real (type, r));
9764 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9765
9766 if (HONOR_INFINITIES (mode))
9767 {
9768 real_inf (&r);
9769 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9770 build_real (type, r));
9771 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9772 fp_infinite, res);
9773 }
9774
9775 if (HONOR_NANS (mode))
9776 {
9777 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9778 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9779 }
9780
9781 return res;
9782 }
9783
9784 /* Fold a call to an unordered comparison function such as
9785 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9786 being called and ARG0 and ARG1 are the arguments for the call.
9787 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9788 the opposite of the desired result. UNORDERED_CODE is used
9789 for modes that can hold NaNs and ORDERED_CODE is used for
9790 the rest. */
9791
9792 static tree
9793 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9794 enum tree_code unordered_code,
9795 enum tree_code ordered_code)
9796 {
9797 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9798 enum tree_code code;
9799 tree type0, type1;
9800 enum tree_code code0, code1;
9801 tree cmp_type = NULL_TREE;
9802
9803 type0 = TREE_TYPE (arg0);
9804 type1 = TREE_TYPE (arg1);
9805
9806 code0 = TREE_CODE (type0);
9807 code1 = TREE_CODE (type1);
9808
9809 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9810 /* Choose the wider of two real types. */
9811 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9812 ? type0 : type1;
9813 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9814 cmp_type = type0;
9815 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9816 cmp_type = type1;
9817
9818 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9819 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9820
9821 if (unordered_code == UNORDERED_EXPR)
9822 {
9823 if (!HONOR_NANS (arg0))
9824 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9825 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9826 }
9827
9828 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9829 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9830 fold_build2_loc (loc, code, type, arg0, arg1));
9831 }
9832
9833 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9834 arithmetics if it can never overflow, or into internal functions that
9835 return both result of arithmetics and overflowed boolean flag in
9836 a complex integer result, or some other check for overflow. */
9837
9838 static tree
9839 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9840 tree arg0, tree arg1, tree arg2)
9841 {
9842 enum internal_fn ifn = IFN_LAST;
9843 tree type = TREE_TYPE (TREE_TYPE (arg2));
9844 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9845 switch (fcode)
9846 {
9847 case BUILT_IN_ADD_OVERFLOW:
9848 case BUILT_IN_SADD_OVERFLOW:
9849 case BUILT_IN_SADDL_OVERFLOW:
9850 case BUILT_IN_SADDLL_OVERFLOW:
9851 case BUILT_IN_UADD_OVERFLOW:
9852 case BUILT_IN_UADDL_OVERFLOW:
9853 case BUILT_IN_UADDLL_OVERFLOW:
9854 ifn = IFN_ADD_OVERFLOW;
9855 break;
9856 case BUILT_IN_SUB_OVERFLOW:
9857 case BUILT_IN_SSUB_OVERFLOW:
9858 case BUILT_IN_SSUBL_OVERFLOW:
9859 case BUILT_IN_SSUBLL_OVERFLOW:
9860 case BUILT_IN_USUB_OVERFLOW:
9861 case BUILT_IN_USUBL_OVERFLOW:
9862 case BUILT_IN_USUBLL_OVERFLOW:
9863 ifn = IFN_SUB_OVERFLOW;
9864 break;
9865 case BUILT_IN_MUL_OVERFLOW:
9866 case BUILT_IN_SMUL_OVERFLOW:
9867 case BUILT_IN_SMULL_OVERFLOW:
9868 case BUILT_IN_SMULLL_OVERFLOW:
9869 case BUILT_IN_UMUL_OVERFLOW:
9870 case BUILT_IN_UMULL_OVERFLOW:
9871 case BUILT_IN_UMULLL_OVERFLOW:
9872 ifn = IFN_MUL_OVERFLOW;
9873 break;
9874 default:
9875 gcc_unreachable ();
9876 }
9877 tree ctype = build_complex_type (type);
9878 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9879 2, arg0, arg1);
9880 tree tgt = save_expr (call);
9881 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9882 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9883 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9884 tree store
9885 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9886 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9887 }
9888
9889 /* Fold a call to built-in function FNDECL with 0 arguments.
9890 This function returns NULL_TREE if no simplification was possible. */
9891
9892 static tree
9893 fold_builtin_0 (location_t loc, tree fndecl)
9894 {
9895 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9896 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9897 switch (fcode)
9898 {
9899 CASE_FLT_FN (BUILT_IN_INF):
9900 case BUILT_IN_INFD32:
9901 case BUILT_IN_INFD64:
9902 case BUILT_IN_INFD128:
9903 return fold_builtin_inf (loc, type, true);
9904
9905 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9906 return fold_builtin_inf (loc, type, false);
9907
9908 case BUILT_IN_CLASSIFY_TYPE:
9909 return fold_builtin_classify_type (NULL_TREE);
9910
9911 default:
9912 break;
9913 }
9914 return NULL_TREE;
9915 }
9916
9917 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9918 This function returns NULL_TREE if no simplification was possible. */
9919
9920 static tree
9921 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9922 {
9923 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9924 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9925 switch (fcode)
9926 {
9927 case BUILT_IN_CONSTANT_P:
9928 {
9929 tree val = fold_builtin_constant_p (arg0);
9930
9931 /* Gimplification will pull the CALL_EXPR for the builtin out of
9932 an if condition. When not optimizing, we'll not CSE it back.
9933 To avoid link error types of regressions, return false now. */
9934 if (!val && !optimize)
9935 val = integer_zero_node;
9936
9937 return val;
9938 }
9939
9940 case BUILT_IN_CLASSIFY_TYPE:
9941 return fold_builtin_classify_type (arg0);
9942
9943 case BUILT_IN_STRLEN:
9944 return fold_builtin_strlen (loc, type, arg0);
9945
9946 CASE_FLT_FN (BUILT_IN_FABS):
9947 case BUILT_IN_FABSD32:
9948 case BUILT_IN_FABSD64:
9949 case BUILT_IN_FABSD128:
9950 return fold_builtin_fabs (loc, arg0, type);
9951
9952 case BUILT_IN_ABS:
9953 case BUILT_IN_LABS:
9954 case BUILT_IN_LLABS:
9955 case BUILT_IN_IMAXABS:
9956 return fold_builtin_abs (loc, arg0, type);
9957
9958 CASE_FLT_FN (BUILT_IN_CONJ):
9959 if (validate_arg (arg0, COMPLEX_TYPE)
9960 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9961 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9962 break;
9963
9964 CASE_FLT_FN (BUILT_IN_CREAL):
9965 if (validate_arg (arg0, COMPLEX_TYPE)
9966 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9967 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9968 break;
9969
9970 CASE_FLT_FN (BUILT_IN_CIMAG):
9971 if (validate_arg (arg0, COMPLEX_TYPE)
9972 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9973 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9974 break;
9975
9976 CASE_FLT_FN (BUILT_IN_CCOS):
9977 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
9978
9979 CASE_FLT_FN (BUILT_IN_CCOSH):
9980 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
9981
9982 CASE_FLT_FN (BUILT_IN_CPROJ):
9983 return fold_builtin_cproj (loc, arg0, type);
9984
9985 CASE_FLT_FN (BUILT_IN_CSIN):
9986 if (validate_arg (arg0, COMPLEX_TYPE)
9987 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9988 return do_mpc_arg1 (arg0, type, mpc_sin);
9989 break;
9990
9991 CASE_FLT_FN (BUILT_IN_CSINH):
9992 if (validate_arg (arg0, COMPLEX_TYPE)
9993 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9994 return do_mpc_arg1 (arg0, type, mpc_sinh);
9995 break;
9996
9997 CASE_FLT_FN (BUILT_IN_CTAN):
9998 if (validate_arg (arg0, COMPLEX_TYPE)
9999 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10000 return do_mpc_arg1 (arg0, type, mpc_tan);
10001 break;
10002
10003 CASE_FLT_FN (BUILT_IN_CTANH):
10004 if (validate_arg (arg0, COMPLEX_TYPE)
10005 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10006 return do_mpc_arg1 (arg0, type, mpc_tanh);
10007 break;
10008
10009 CASE_FLT_FN (BUILT_IN_CLOG):
10010 if (validate_arg (arg0, COMPLEX_TYPE)
10011 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10012 return do_mpc_arg1 (arg0, type, mpc_log);
10013 break;
10014
10015 CASE_FLT_FN (BUILT_IN_CSQRT):
10016 if (validate_arg (arg0, COMPLEX_TYPE)
10017 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10018 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10019 break;
10020
10021 CASE_FLT_FN (BUILT_IN_CASIN):
10022 if (validate_arg (arg0, COMPLEX_TYPE)
10023 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10024 return do_mpc_arg1 (arg0, type, mpc_asin);
10025 break;
10026
10027 CASE_FLT_FN (BUILT_IN_CACOS):
10028 if (validate_arg (arg0, COMPLEX_TYPE)
10029 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10030 return do_mpc_arg1 (arg0, type, mpc_acos);
10031 break;
10032
10033 CASE_FLT_FN (BUILT_IN_CATAN):
10034 if (validate_arg (arg0, COMPLEX_TYPE)
10035 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10036 return do_mpc_arg1 (arg0, type, mpc_atan);
10037 break;
10038
10039 CASE_FLT_FN (BUILT_IN_CASINH):
10040 if (validate_arg (arg0, COMPLEX_TYPE)
10041 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10042 return do_mpc_arg1 (arg0, type, mpc_asinh);
10043 break;
10044
10045 CASE_FLT_FN (BUILT_IN_CACOSH):
10046 if (validate_arg (arg0, COMPLEX_TYPE)
10047 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10048 return do_mpc_arg1 (arg0, type, mpc_acosh);
10049 break;
10050
10051 CASE_FLT_FN (BUILT_IN_CATANH):
10052 if (validate_arg (arg0, COMPLEX_TYPE)
10053 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10054 return do_mpc_arg1 (arg0, type, mpc_atanh);
10055 break;
10056
10057 CASE_FLT_FN (BUILT_IN_CABS):
10058 return fold_builtin_cabs (loc, arg0, type, fndecl);
10059
10060 CASE_FLT_FN (BUILT_IN_CARG):
10061 return fold_builtin_carg (loc, arg0, type);
10062
10063 CASE_FLT_FN (BUILT_IN_SQRT):
10064 return fold_builtin_sqrt (loc, arg0, type);
10065
10066 CASE_FLT_FN (BUILT_IN_CBRT):
10067 return fold_builtin_cbrt (loc, arg0, type);
10068
10069 CASE_FLT_FN (BUILT_IN_ASIN):
10070 if (validate_arg (arg0, REAL_TYPE))
10071 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10072 &dconstm1, &dconst1, true);
10073 break;
10074
10075 CASE_FLT_FN (BUILT_IN_ACOS):
10076 if (validate_arg (arg0, REAL_TYPE))
10077 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10078 &dconstm1, &dconst1, true);
10079 break;
10080
10081 CASE_FLT_FN (BUILT_IN_ATAN):
10082 if (validate_arg (arg0, REAL_TYPE))
10083 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10084 break;
10085
10086 CASE_FLT_FN (BUILT_IN_ASINH):
10087 if (validate_arg (arg0, REAL_TYPE))
10088 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10089 break;
10090
10091 CASE_FLT_FN (BUILT_IN_ACOSH):
10092 if (validate_arg (arg0, REAL_TYPE))
10093 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10094 &dconst1, NULL, true);
10095 break;
10096
10097 CASE_FLT_FN (BUILT_IN_ATANH):
10098 if (validate_arg (arg0, REAL_TYPE))
10099 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10100 &dconstm1, &dconst1, false);
10101 break;
10102
10103 CASE_FLT_FN (BUILT_IN_SIN):
10104 if (validate_arg (arg0, REAL_TYPE))
10105 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10106 break;
10107
10108 CASE_FLT_FN (BUILT_IN_COS):
10109 return fold_builtin_cos (loc, arg0, type, fndecl);
10110
10111 CASE_FLT_FN (BUILT_IN_TAN):
10112 return fold_builtin_tan (arg0, type);
10113
10114 CASE_FLT_FN (BUILT_IN_CEXP):
10115 return fold_builtin_cexp (loc, arg0, type);
10116
10117 CASE_FLT_FN (BUILT_IN_CEXPI):
10118 if (validate_arg (arg0, REAL_TYPE))
10119 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10120 break;
10121
10122 CASE_FLT_FN (BUILT_IN_SINH):
10123 if (validate_arg (arg0, REAL_TYPE))
10124 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10125 break;
10126
10127 CASE_FLT_FN (BUILT_IN_COSH):
10128 return fold_builtin_cosh (loc, arg0, type, fndecl);
10129
10130 CASE_FLT_FN (BUILT_IN_TANH):
10131 if (validate_arg (arg0, REAL_TYPE))
10132 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10133 break;
10134
10135 CASE_FLT_FN (BUILT_IN_ERF):
10136 if (validate_arg (arg0, REAL_TYPE))
10137 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10138 break;
10139
10140 CASE_FLT_FN (BUILT_IN_ERFC):
10141 if (validate_arg (arg0, REAL_TYPE))
10142 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10143 break;
10144
10145 CASE_FLT_FN (BUILT_IN_TGAMMA):
10146 if (validate_arg (arg0, REAL_TYPE))
10147 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10148 break;
10149
10150 CASE_FLT_FN (BUILT_IN_EXP):
10151 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10152
10153 CASE_FLT_FN (BUILT_IN_EXP2):
10154 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10155
10156 CASE_FLT_FN (BUILT_IN_EXP10):
10157 CASE_FLT_FN (BUILT_IN_POW10):
10158 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10159
10160 CASE_FLT_FN (BUILT_IN_EXPM1):
10161 if (validate_arg (arg0, REAL_TYPE))
10162 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10163 break;
10164
10165 CASE_FLT_FN (BUILT_IN_LOG):
10166 if (validate_arg (arg0, REAL_TYPE))
10167 return do_mpfr_arg1 (arg0, type, mpfr_log, &dconst0, NULL, false);
10168 break;
10169
10170 CASE_FLT_FN (BUILT_IN_LOG2):
10171 if (validate_arg (arg0, REAL_TYPE))
10172 return do_mpfr_arg1 (arg0, type, mpfr_log2, &dconst0, NULL, false);
10173 break;
10174
10175 CASE_FLT_FN (BUILT_IN_LOG10):
10176 if (validate_arg (arg0, REAL_TYPE))
10177 return do_mpfr_arg1 (arg0, type, mpfr_log10, &dconst0, NULL, false);
10178 break;
10179
10180 CASE_FLT_FN (BUILT_IN_LOG1P):
10181 if (validate_arg (arg0, REAL_TYPE))
10182 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10183 &dconstm1, NULL, false);
10184 break;
10185
10186 CASE_FLT_FN (BUILT_IN_J0):
10187 if (validate_arg (arg0, REAL_TYPE))
10188 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10189 NULL, NULL, 0);
10190 break;
10191
10192 CASE_FLT_FN (BUILT_IN_J1):
10193 if (validate_arg (arg0, REAL_TYPE))
10194 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10195 NULL, NULL, 0);
10196 break;
10197
10198 CASE_FLT_FN (BUILT_IN_Y0):
10199 if (validate_arg (arg0, REAL_TYPE))
10200 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10201 &dconst0, NULL, false);
10202 break;
10203
10204 CASE_FLT_FN (BUILT_IN_Y1):
10205 if (validate_arg (arg0, REAL_TYPE))
10206 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10207 &dconst0, NULL, false);
10208 break;
10209
10210 CASE_FLT_FN (BUILT_IN_NAN):
10211 case BUILT_IN_NAND32:
10212 case BUILT_IN_NAND64:
10213 case BUILT_IN_NAND128:
10214 return fold_builtin_nan (arg0, type, true);
10215
10216 CASE_FLT_FN (BUILT_IN_NANS):
10217 return fold_builtin_nan (arg0, type, false);
10218
10219 CASE_FLT_FN (BUILT_IN_FLOOR):
10220 return fold_builtin_floor (loc, fndecl, arg0);
10221
10222 CASE_FLT_FN (BUILT_IN_CEIL):
10223 return fold_builtin_ceil (loc, fndecl, arg0);
10224
10225 CASE_FLT_FN (BUILT_IN_TRUNC):
10226 return fold_builtin_trunc (loc, fndecl, arg0);
10227
10228 CASE_FLT_FN (BUILT_IN_ROUND):
10229 return fold_builtin_round (loc, fndecl, arg0);
10230
10231 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10232 CASE_FLT_FN (BUILT_IN_RINT):
10233 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10234
10235 CASE_FLT_FN (BUILT_IN_ICEIL):
10236 CASE_FLT_FN (BUILT_IN_LCEIL):
10237 CASE_FLT_FN (BUILT_IN_LLCEIL):
10238 CASE_FLT_FN (BUILT_IN_LFLOOR):
10239 CASE_FLT_FN (BUILT_IN_IFLOOR):
10240 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10241 CASE_FLT_FN (BUILT_IN_IROUND):
10242 CASE_FLT_FN (BUILT_IN_LROUND):
10243 CASE_FLT_FN (BUILT_IN_LLROUND):
10244 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10245
10246 CASE_FLT_FN (BUILT_IN_IRINT):
10247 CASE_FLT_FN (BUILT_IN_LRINT):
10248 CASE_FLT_FN (BUILT_IN_LLRINT):
10249 return fold_fixed_mathfn (loc, fndecl, arg0);
10250
10251 case BUILT_IN_BSWAP16:
10252 case BUILT_IN_BSWAP32:
10253 case BUILT_IN_BSWAP64:
10254 return fold_builtin_bswap (fndecl, arg0);
10255
10256 CASE_INT_FN (BUILT_IN_FFS):
10257 CASE_INT_FN (BUILT_IN_CLZ):
10258 CASE_INT_FN (BUILT_IN_CTZ):
10259 CASE_INT_FN (BUILT_IN_CLRSB):
10260 CASE_INT_FN (BUILT_IN_POPCOUNT):
10261 CASE_INT_FN (BUILT_IN_PARITY):
10262 return fold_builtin_bitop (fndecl, arg0);
10263
10264 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10265 return fold_builtin_signbit (loc, arg0, type);
10266
10267 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10268 return fold_builtin_significand (loc, arg0, type);
10269
10270 CASE_FLT_FN (BUILT_IN_ILOGB):
10271 CASE_FLT_FN (BUILT_IN_LOGB):
10272 return fold_builtin_logb (loc, arg0, type);
10273
10274 case BUILT_IN_ISASCII:
10275 return fold_builtin_isascii (loc, arg0);
10276
10277 case BUILT_IN_TOASCII:
10278 return fold_builtin_toascii (loc, arg0);
10279
10280 case BUILT_IN_ISDIGIT:
10281 return fold_builtin_isdigit (loc, arg0);
10282
10283 CASE_FLT_FN (BUILT_IN_FINITE):
10284 case BUILT_IN_FINITED32:
10285 case BUILT_IN_FINITED64:
10286 case BUILT_IN_FINITED128:
10287 case BUILT_IN_ISFINITE:
10288 {
10289 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10290 if (ret)
10291 return ret;
10292 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10293 }
10294
10295 CASE_FLT_FN (BUILT_IN_ISINF):
10296 case BUILT_IN_ISINFD32:
10297 case BUILT_IN_ISINFD64:
10298 case BUILT_IN_ISINFD128:
10299 {
10300 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10301 if (ret)
10302 return ret;
10303 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10304 }
10305
10306 case BUILT_IN_ISNORMAL:
10307 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10308
10309 case BUILT_IN_ISINF_SIGN:
10310 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10311
10312 CASE_FLT_FN (BUILT_IN_ISNAN):
10313 case BUILT_IN_ISNAND32:
10314 case BUILT_IN_ISNAND64:
10315 case BUILT_IN_ISNAND128:
10316 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10317
10318 case BUILT_IN_FREE:
10319 if (integer_zerop (arg0))
10320 return build_empty_stmt (loc);
10321 break;
10322
10323 default:
10324 break;
10325 }
10326
10327 return NULL_TREE;
10328
10329 }
10330
10331 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10332 This function returns NULL_TREE if no simplification was possible. */
10333
10334 static tree
10335 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
10336 {
10337 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10338 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10339
10340 switch (fcode)
10341 {
10342 CASE_FLT_FN (BUILT_IN_JN):
10343 if (validate_arg (arg0, INTEGER_TYPE)
10344 && validate_arg (arg1, REAL_TYPE))
10345 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10346 break;
10347
10348 CASE_FLT_FN (BUILT_IN_YN):
10349 if (validate_arg (arg0, INTEGER_TYPE)
10350 && validate_arg (arg1, REAL_TYPE))
10351 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10352 &dconst0, false);
10353 break;
10354
10355 CASE_FLT_FN (BUILT_IN_DREM):
10356 CASE_FLT_FN (BUILT_IN_REMAINDER):
10357 if (validate_arg (arg0, REAL_TYPE)
10358 && validate_arg (arg1, REAL_TYPE))
10359 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10360 break;
10361
10362 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10363 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10364 if (validate_arg (arg0, REAL_TYPE)
10365 && validate_arg (arg1, POINTER_TYPE))
10366 return do_mpfr_lgamma_r (arg0, arg1, type);
10367 break;
10368
10369 CASE_FLT_FN (BUILT_IN_ATAN2):
10370 if (validate_arg (arg0, REAL_TYPE)
10371 && validate_arg (arg1, REAL_TYPE))
10372 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10373 break;
10374
10375 CASE_FLT_FN (BUILT_IN_FDIM):
10376 if (validate_arg (arg0, REAL_TYPE)
10377 && validate_arg (arg1, REAL_TYPE))
10378 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10379 break;
10380
10381 CASE_FLT_FN (BUILT_IN_HYPOT):
10382 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10383
10384 CASE_FLT_FN (BUILT_IN_CPOW):
10385 if (validate_arg (arg0, COMPLEX_TYPE)
10386 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10387 && validate_arg (arg1, COMPLEX_TYPE)
10388 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10389 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10390 break;
10391
10392 CASE_FLT_FN (BUILT_IN_LDEXP):
10393 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10394 CASE_FLT_FN (BUILT_IN_SCALBN):
10395 CASE_FLT_FN (BUILT_IN_SCALBLN):
10396 return fold_builtin_load_exponent (loc, arg0, arg1,
10397 type, /*ldexp=*/false);
10398
10399 CASE_FLT_FN (BUILT_IN_FREXP):
10400 return fold_builtin_frexp (loc, arg0, arg1, type);
10401
10402 CASE_FLT_FN (BUILT_IN_MODF):
10403 return fold_builtin_modf (loc, arg0, arg1, type);
10404
10405 case BUILT_IN_STRSTR:
10406 return fold_builtin_strstr (loc, arg0, arg1, type);
10407
10408 case BUILT_IN_STRSPN:
10409 return fold_builtin_strspn (loc, arg0, arg1);
10410
10411 case BUILT_IN_STRCSPN:
10412 return fold_builtin_strcspn (loc, arg0, arg1);
10413
10414 case BUILT_IN_STRCHR:
10415 case BUILT_IN_INDEX:
10416 return fold_builtin_strchr (loc, arg0, arg1, type);
10417
10418 case BUILT_IN_STRRCHR:
10419 case BUILT_IN_RINDEX:
10420 return fold_builtin_strrchr (loc, arg0, arg1, type);
10421
10422 case BUILT_IN_STRCMP:
10423 return fold_builtin_strcmp (loc, arg0, arg1);
10424
10425 case BUILT_IN_STRPBRK:
10426 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10427
10428 case BUILT_IN_EXPECT:
10429 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10430
10431 CASE_FLT_FN (BUILT_IN_POW):
10432 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10433
10434 CASE_FLT_FN (BUILT_IN_POWI):
10435 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10436
10437 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10438 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10439
10440 CASE_FLT_FN (BUILT_IN_FMIN):
10441 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10442
10443 CASE_FLT_FN (BUILT_IN_FMAX):
10444 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10445
10446 case BUILT_IN_ISGREATER:
10447 return fold_builtin_unordered_cmp (loc, fndecl,
10448 arg0, arg1, UNLE_EXPR, LE_EXPR);
10449 case BUILT_IN_ISGREATEREQUAL:
10450 return fold_builtin_unordered_cmp (loc, fndecl,
10451 arg0, arg1, UNLT_EXPR, LT_EXPR);
10452 case BUILT_IN_ISLESS:
10453 return fold_builtin_unordered_cmp (loc, fndecl,
10454 arg0, arg1, UNGE_EXPR, GE_EXPR);
10455 case BUILT_IN_ISLESSEQUAL:
10456 return fold_builtin_unordered_cmp (loc, fndecl,
10457 arg0, arg1, UNGT_EXPR, GT_EXPR);
10458 case BUILT_IN_ISLESSGREATER:
10459 return fold_builtin_unordered_cmp (loc, fndecl,
10460 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10461 case BUILT_IN_ISUNORDERED:
10462 return fold_builtin_unordered_cmp (loc, fndecl,
10463 arg0, arg1, UNORDERED_EXPR,
10464 NOP_EXPR);
10465
10466 /* We do the folding for va_start in the expander. */
10467 case BUILT_IN_VA_START:
10468 break;
10469
10470 case BUILT_IN_OBJECT_SIZE:
10471 return fold_builtin_object_size (arg0, arg1);
10472
10473 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10474 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10475
10476 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10477 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10478
10479 default:
10480 break;
10481 }
10482 return NULL_TREE;
10483 }
10484
10485 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10486 and ARG2.
10487 This function returns NULL_TREE if no simplification was possible. */
10488
10489 static tree
10490 fold_builtin_3 (location_t loc, tree fndecl,
10491 tree arg0, tree arg1, tree arg2)
10492 {
10493 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10494 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10495 switch (fcode)
10496 {
10497
10498 CASE_FLT_FN (BUILT_IN_SINCOS):
10499 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10500
10501 CASE_FLT_FN (BUILT_IN_FMA):
10502 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10503 break;
10504
10505 CASE_FLT_FN (BUILT_IN_REMQUO):
10506 if (validate_arg (arg0, REAL_TYPE)
10507 && validate_arg (arg1, REAL_TYPE)
10508 && validate_arg (arg2, POINTER_TYPE))
10509 return do_mpfr_remquo (arg0, arg1, arg2);
10510 break;
10511
10512 case BUILT_IN_STRNCMP:
10513 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10514
10515 case BUILT_IN_MEMCHR:
10516 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10517
10518 case BUILT_IN_BCMP:
10519 case BUILT_IN_MEMCMP:
10520 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10521
10522 case BUILT_IN_EXPECT:
10523 return fold_builtin_expect (loc, arg0, arg1, arg2);
10524
10525 case BUILT_IN_ADD_OVERFLOW:
10526 case BUILT_IN_SUB_OVERFLOW:
10527 case BUILT_IN_MUL_OVERFLOW:
10528 case BUILT_IN_SADD_OVERFLOW:
10529 case BUILT_IN_SADDL_OVERFLOW:
10530 case BUILT_IN_SADDLL_OVERFLOW:
10531 case BUILT_IN_SSUB_OVERFLOW:
10532 case BUILT_IN_SSUBL_OVERFLOW:
10533 case BUILT_IN_SSUBLL_OVERFLOW:
10534 case BUILT_IN_SMUL_OVERFLOW:
10535 case BUILT_IN_SMULL_OVERFLOW:
10536 case BUILT_IN_SMULLL_OVERFLOW:
10537 case BUILT_IN_UADD_OVERFLOW:
10538 case BUILT_IN_UADDL_OVERFLOW:
10539 case BUILT_IN_UADDLL_OVERFLOW:
10540 case BUILT_IN_USUB_OVERFLOW:
10541 case BUILT_IN_USUBL_OVERFLOW:
10542 case BUILT_IN_USUBLL_OVERFLOW:
10543 case BUILT_IN_UMUL_OVERFLOW:
10544 case BUILT_IN_UMULL_OVERFLOW:
10545 case BUILT_IN_UMULLL_OVERFLOW:
10546 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10547
10548 default:
10549 break;
10550 }
10551 return NULL_TREE;
10552 }
10553
10554 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10555 arguments. IGNORE is true if the result of the
10556 function call is ignored. This function returns NULL_TREE if no
10557 simplification was possible. */
10558
10559 tree
10560 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
10561 {
10562 tree ret = NULL_TREE;
10563
10564 switch (nargs)
10565 {
10566 case 0:
10567 ret = fold_builtin_0 (loc, fndecl);
10568 break;
10569 case 1:
10570 ret = fold_builtin_1 (loc, fndecl, args[0]);
10571 break;
10572 case 2:
10573 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
10574 break;
10575 case 3:
10576 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10577 break;
10578 default:
10579 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10580 break;
10581 }
10582 if (ret)
10583 {
10584 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10585 SET_EXPR_LOCATION (ret, loc);
10586 TREE_NO_WARNING (ret) = 1;
10587 return ret;
10588 }
10589 return NULL_TREE;
10590 }
10591
10592 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10593 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10594 of arguments in ARGS to be omitted. OLDNARGS is the number of
10595 elements in ARGS. */
10596
10597 static tree
10598 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10599 int skip, tree fndecl, int n, va_list newargs)
10600 {
10601 int nargs = oldnargs - skip + n;
10602 tree *buffer;
10603
10604 if (n > 0)
10605 {
10606 int i, j;
10607
10608 buffer = XALLOCAVEC (tree, nargs);
10609 for (i = 0; i < n; i++)
10610 buffer[i] = va_arg (newargs, tree);
10611 for (j = skip; j < oldnargs; j++, i++)
10612 buffer[i] = args[j];
10613 }
10614 else
10615 buffer = args + skip;
10616
10617 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10618 }
10619
10620 /* Return true if FNDECL shouldn't be folded right now.
10621 If a built-in function has an inline attribute always_inline
10622 wrapper, defer folding it after always_inline functions have
10623 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10624 might not be performed. */
10625
10626 bool
10627 avoid_folding_inline_builtin (tree fndecl)
10628 {
10629 return (DECL_DECLARED_INLINE_P (fndecl)
10630 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10631 && cfun
10632 && !cfun->always_inline_functions_inlined
10633 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10634 }
10635
10636 /* A wrapper function for builtin folding that prevents warnings for
10637 "statement without effect" and the like, caused by removing the
10638 call node earlier than the warning is generated. */
10639
10640 tree
10641 fold_call_expr (location_t loc, tree exp, bool ignore)
10642 {
10643 tree ret = NULL_TREE;
10644 tree fndecl = get_callee_fndecl (exp);
10645 if (fndecl
10646 && TREE_CODE (fndecl) == FUNCTION_DECL
10647 && DECL_BUILT_IN (fndecl)
10648 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10649 yet. Defer folding until we see all the arguments
10650 (after inlining). */
10651 && !CALL_EXPR_VA_ARG_PACK (exp))
10652 {
10653 int nargs = call_expr_nargs (exp);
10654
10655 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10656 instead last argument is __builtin_va_arg_pack (). Defer folding
10657 even in that case, until arguments are finalized. */
10658 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10659 {
10660 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10661 if (fndecl2
10662 && TREE_CODE (fndecl2) == FUNCTION_DECL
10663 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10664 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10665 return NULL_TREE;
10666 }
10667
10668 if (avoid_folding_inline_builtin (fndecl))
10669 return NULL_TREE;
10670
10671 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10672 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10673 CALL_EXPR_ARGP (exp), ignore);
10674 else
10675 {
10676 tree *args = CALL_EXPR_ARGP (exp);
10677 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10678 if (ret)
10679 return ret;
10680 }
10681 }
10682 return NULL_TREE;
10683 }
10684
10685 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10686 N arguments are passed in the array ARGARRAY. Return a folded
10687 expression or NULL_TREE if no simplification was possible. */
10688
10689 tree
10690 fold_builtin_call_array (location_t loc, tree,
10691 tree fn,
10692 int n,
10693 tree *argarray)
10694 {
10695 if (TREE_CODE (fn) != ADDR_EXPR)
10696 return NULL_TREE;
10697
10698 tree fndecl = TREE_OPERAND (fn, 0);
10699 if (TREE_CODE (fndecl) == FUNCTION_DECL
10700 && DECL_BUILT_IN (fndecl))
10701 {
10702 /* If last argument is __builtin_va_arg_pack (), arguments to this
10703 function are not finalized yet. Defer folding until they are. */
10704 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10705 {
10706 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10707 if (fndecl2
10708 && TREE_CODE (fndecl2) == FUNCTION_DECL
10709 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10710 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10711 return NULL_TREE;
10712 }
10713 if (avoid_folding_inline_builtin (fndecl))
10714 return NULL_TREE;
10715 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10716 return targetm.fold_builtin (fndecl, n, argarray, false);
10717 else
10718 return fold_builtin_n (loc, fndecl, argarray, n, false);
10719 }
10720
10721 return NULL_TREE;
10722 }
10723
10724 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10725 along with N new arguments specified as the "..." parameters. SKIP
10726 is the number of arguments in EXP to be omitted. This function is used
10727 to do varargs-to-varargs transformations. */
10728
10729 static tree
10730 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10731 {
10732 va_list ap;
10733 tree t;
10734
10735 va_start (ap, n);
10736 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10737 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10738 va_end (ap);
10739
10740 return t;
10741 }
10742
10743 /* Validate a single argument ARG against a tree code CODE representing
10744 a type. */
10745
10746 static bool
10747 validate_arg (const_tree arg, enum tree_code code)
10748 {
10749 if (!arg)
10750 return false;
10751 else if (code == POINTER_TYPE)
10752 return POINTER_TYPE_P (TREE_TYPE (arg));
10753 else if (code == INTEGER_TYPE)
10754 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10755 return code == TREE_CODE (TREE_TYPE (arg));
10756 }
10757
10758 /* This function validates the types of a function call argument list
10759 against a specified list of tree_codes. If the last specifier is a 0,
10760 that represents an ellipses, otherwise the last specifier must be a
10761 VOID_TYPE.
10762
10763 This is the GIMPLE version of validate_arglist. Eventually we want to
10764 completely convert builtins.c to work from GIMPLEs and the tree based
10765 validate_arglist will then be removed. */
10766
10767 bool
10768 validate_gimple_arglist (const gcall *call, ...)
10769 {
10770 enum tree_code code;
10771 bool res = 0;
10772 va_list ap;
10773 const_tree arg;
10774 size_t i;
10775
10776 va_start (ap, call);
10777 i = 0;
10778
10779 do
10780 {
10781 code = (enum tree_code) va_arg (ap, int);
10782 switch (code)
10783 {
10784 case 0:
10785 /* This signifies an ellipses, any further arguments are all ok. */
10786 res = true;
10787 goto end;
10788 case VOID_TYPE:
10789 /* This signifies an endlink, if no arguments remain, return
10790 true, otherwise return false. */
10791 res = (i == gimple_call_num_args (call));
10792 goto end;
10793 default:
10794 /* If no parameters remain or the parameter's code does not
10795 match the specified code, return false. Otherwise continue
10796 checking any remaining arguments. */
10797 arg = gimple_call_arg (call, i++);
10798 if (!validate_arg (arg, code))
10799 goto end;
10800 break;
10801 }
10802 }
10803 while (1);
10804
10805 /* We need gotos here since we can only have one VA_CLOSE in a
10806 function. */
10807 end: ;
10808 va_end (ap);
10809
10810 return res;
10811 }
10812
10813 /* Default target-specific builtin expander that does nothing. */
10814
10815 rtx
10816 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10817 rtx target ATTRIBUTE_UNUSED,
10818 rtx subtarget ATTRIBUTE_UNUSED,
10819 machine_mode mode ATTRIBUTE_UNUSED,
10820 int ignore ATTRIBUTE_UNUSED)
10821 {
10822 return NULL_RTX;
10823 }
10824
10825 /* Returns true is EXP represents data that would potentially reside
10826 in a readonly section. */
10827
10828 bool
10829 readonly_data_expr (tree exp)
10830 {
10831 STRIP_NOPS (exp);
10832
10833 if (TREE_CODE (exp) != ADDR_EXPR)
10834 return false;
10835
10836 exp = get_base_address (TREE_OPERAND (exp, 0));
10837 if (!exp)
10838 return false;
10839
10840 /* Make sure we call decl_readonly_section only for trees it
10841 can handle (since it returns true for everything it doesn't
10842 understand). */
10843 if (TREE_CODE (exp) == STRING_CST
10844 || TREE_CODE (exp) == CONSTRUCTOR
10845 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10846 return decl_readonly_section (exp, 0);
10847 else
10848 return false;
10849 }
10850
10851 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10852 to the call, and TYPE is its return type.
10853
10854 Return NULL_TREE if no simplification was possible, otherwise return the
10855 simplified form of the call as a tree.
10856
10857 The simplified form may be a constant or other expression which
10858 computes the same value, but in a more efficient manner (including
10859 calls to other builtin functions).
10860
10861 The call may contain arguments which need to be evaluated, but
10862 which are not useful to determine the result of the call. In
10863 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10864 COMPOUND_EXPR will be an argument which must be evaluated.
10865 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10866 COMPOUND_EXPR in the chain will contain the tree for the simplified
10867 form of the builtin function call. */
10868
10869 static tree
10870 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10871 {
10872 if (!validate_arg (s1, POINTER_TYPE)
10873 || !validate_arg (s2, POINTER_TYPE))
10874 return NULL_TREE;
10875 else
10876 {
10877 tree fn;
10878 const char *p1, *p2;
10879
10880 p2 = c_getstr (s2);
10881 if (p2 == NULL)
10882 return NULL_TREE;
10883
10884 p1 = c_getstr (s1);
10885 if (p1 != NULL)
10886 {
10887 const char *r = strstr (p1, p2);
10888 tree tem;
10889
10890 if (r == NULL)
10891 return build_int_cst (TREE_TYPE (s1), 0);
10892
10893 /* Return an offset into the constant string argument. */
10894 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10895 return fold_convert_loc (loc, type, tem);
10896 }
10897
10898 /* The argument is const char *, and the result is char *, so we need
10899 a type conversion here to avoid a warning. */
10900 if (p2[0] == '\0')
10901 return fold_convert_loc (loc, type, s1);
10902
10903 if (p2[1] != '\0')
10904 return NULL_TREE;
10905
10906 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10907 if (!fn)
10908 return NULL_TREE;
10909
10910 /* New argument list transforming strstr(s1, s2) to
10911 strchr(s1, s2[0]). */
10912 return build_call_expr_loc (loc, fn, 2, s1,
10913 build_int_cst (integer_type_node, p2[0]));
10914 }
10915 }
10916
10917 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10918 the call, and TYPE is its return type.
10919
10920 Return NULL_TREE if no simplification was possible, otherwise return the
10921 simplified form of the call as a tree.
10922
10923 The simplified form may be a constant or other expression which
10924 computes the same value, but in a more efficient manner (including
10925 calls to other builtin functions).
10926
10927 The call may contain arguments which need to be evaluated, but
10928 which are not useful to determine the result of the call. In
10929 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10930 COMPOUND_EXPR will be an argument which must be evaluated.
10931 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10932 COMPOUND_EXPR in the chain will contain the tree for the simplified
10933 form of the builtin function call. */
10934
10935 static tree
10936 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10937 {
10938 if (!validate_arg (s1, POINTER_TYPE)
10939 || !validate_arg (s2, INTEGER_TYPE))
10940 return NULL_TREE;
10941 else
10942 {
10943 const char *p1;
10944
10945 if (TREE_CODE (s2) != INTEGER_CST)
10946 return NULL_TREE;
10947
10948 p1 = c_getstr (s1);
10949 if (p1 != NULL)
10950 {
10951 char c;
10952 const char *r;
10953 tree tem;
10954
10955 if (target_char_cast (s2, &c))
10956 return NULL_TREE;
10957
10958 r = strchr (p1, c);
10959
10960 if (r == NULL)
10961 return build_int_cst (TREE_TYPE (s1), 0);
10962
10963 /* Return an offset into the constant string argument. */
10964 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10965 return fold_convert_loc (loc, type, tem);
10966 }
10967 return NULL_TREE;
10968 }
10969 }
10970
10971 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10972 the call, and TYPE is its return type.
10973
10974 Return NULL_TREE if no simplification was possible, otherwise return the
10975 simplified form of the call as a tree.
10976
10977 The simplified form may be a constant or other expression which
10978 computes the same value, but in a more efficient manner (including
10979 calls to other builtin functions).
10980
10981 The call may contain arguments which need to be evaluated, but
10982 which are not useful to determine the result of the call. In
10983 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10984 COMPOUND_EXPR will be an argument which must be evaluated.
10985 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10986 COMPOUND_EXPR in the chain will contain the tree for the simplified
10987 form of the builtin function call. */
10988
10989 static tree
10990 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10991 {
10992 if (!validate_arg (s1, POINTER_TYPE)
10993 || !validate_arg (s2, INTEGER_TYPE))
10994 return NULL_TREE;
10995 else
10996 {
10997 tree fn;
10998 const char *p1;
10999
11000 if (TREE_CODE (s2) != INTEGER_CST)
11001 return NULL_TREE;
11002
11003 p1 = c_getstr (s1);
11004 if (p1 != NULL)
11005 {
11006 char c;
11007 const char *r;
11008 tree tem;
11009
11010 if (target_char_cast (s2, &c))
11011 return NULL_TREE;
11012
11013 r = strrchr (p1, c);
11014
11015 if (r == NULL)
11016 return build_int_cst (TREE_TYPE (s1), 0);
11017
11018 /* Return an offset into the constant string argument. */
11019 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11020 return fold_convert_loc (loc, type, tem);
11021 }
11022
11023 if (! integer_zerop (s2))
11024 return NULL_TREE;
11025
11026 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11027 if (!fn)
11028 return NULL_TREE;
11029
11030 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11031 return build_call_expr_loc (loc, fn, 2, s1, s2);
11032 }
11033 }
11034
11035 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11036 to the call, and TYPE is its return type.
11037
11038 Return NULL_TREE if no simplification was possible, otherwise return the
11039 simplified form of the call as a tree.
11040
11041 The simplified form may be a constant or other expression which
11042 computes the same value, but in a more efficient manner (including
11043 calls to other builtin functions).
11044
11045 The call may contain arguments which need to be evaluated, but
11046 which are not useful to determine the result of the call. In
11047 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11048 COMPOUND_EXPR will be an argument which must be evaluated.
11049 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11050 COMPOUND_EXPR in the chain will contain the tree for the simplified
11051 form of the builtin function call. */
11052
11053 static tree
11054 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11055 {
11056 if (!validate_arg (s1, POINTER_TYPE)
11057 || !validate_arg (s2, POINTER_TYPE))
11058 return NULL_TREE;
11059 else
11060 {
11061 tree fn;
11062 const char *p1, *p2;
11063
11064 p2 = c_getstr (s2);
11065 if (p2 == NULL)
11066 return NULL_TREE;
11067
11068 p1 = c_getstr (s1);
11069 if (p1 != NULL)
11070 {
11071 const char *r = strpbrk (p1, p2);
11072 tree tem;
11073
11074 if (r == NULL)
11075 return build_int_cst (TREE_TYPE (s1), 0);
11076
11077 /* Return an offset into the constant string argument. */
11078 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11079 return fold_convert_loc (loc, type, tem);
11080 }
11081
11082 if (p2[0] == '\0')
11083 /* strpbrk(x, "") == NULL.
11084 Evaluate and ignore s1 in case it had side-effects. */
11085 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11086
11087 if (p2[1] != '\0')
11088 return NULL_TREE; /* Really call strpbrk. */
11089
11090 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11091 if (!fn)
11092 return NULL_TREE;
11093
11094 /* New argument list transforming strpbrk(s1, s2) to
11095 strchr(s1, s2[0]). */
11096 return build_call_expr_loc (loc, fn, 2, s1,
11097 build_int_cst (integer_type_node, p2[0]));
11098 }
11099 }
11100
11101 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11102 to the call.
11103
11104 Return NULL_TREE if no simplification was possible, otherwise return the
11105 simplified form of the call as a tree.
11106
11107 The simplified form may be a constant or other expression which
11108 computes the same value, but in a more efficient manner (including
11109 calls to other builtin functions).
11110
11111 The call may contain arguments which need to be evaluated, but
11112 which are not useful to determine the result of the call. In
11113 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11114 COMPOUND_EXPR will be an argument which must be evaluated.
11115 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11116 COMPOUND_EXPR in the chain will contain the tree for the simplified
11117 form of the builtin function call. */
11118
11119 static tree
11120 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11121 {
11122 if (!validate_arg (s1, POINTER_TYPE)
11123 || !validate_arg (s2, POINTER_TYPE))
11124 return NULL_TREE;
11125 else
11126 {
11127 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11128
11129 /* If both arguments are constants, evaluate at compile-time. */
11130 if (p1 && p2)
11131 {
11132 const size_t r = strspn (p1, p2);
11133 return build_int_cst (size_type_node, r);
11134 }
11135
11136 /* If either argument is "", return NULL_TREE. */
11137 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11138 /* Evaluate and ignore both arguments in case either one has
11139 side-effects. */
11140 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11141 s1, s2);
11142 return NULL_TREE;
11143 }
11144 }
11145
11146 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11147 to the call.
11148
11149 Return NULL_TREE if no simplification was possible, otherwise return the
11150 simplified form of the call as a tree.
11151
11152 The simplified form may be a constant or other expression which
11153 computes the same value, but in a more efficient manner (including
11154 calls to other builtin functions).
11155
11156 The call may contain arguments which need to be evaluated, but
11157 which are not useful to determine the result of the call. In
11158 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11159 COMPOUND_EXPR will be an argument which must be evaluated.
11160 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11161 COMPOUND_EXPR in the chain will contain the tree for the simplified
11162 form of the builtin function call. */
11163
11164 static tree
11165 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11166 {
11167 if (!validate_arg (s1, POINTER_TYPE)
11168 || !validate_arg (s2, POINTER_TYPE))
11169 return NULL_TREE;
11170 else
11171 {
11172 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11173
11174 /* If both arguments are constants, evaluate at compile-time. */
11175 if (p1 && p2)
11176 {
11177 const size_t r = strcspn (p1, p2);
11178 return build_int_cst (size_type_node, r);
11179 }
11180
11181 /* If the first argument is "", return NULL_TREE. */
11182 if (p1 && *p1 == '\0')
11183 {
11184 /* Evaluate and ignore argument s2 in case it has
11185 side-effects. */
11186 return omit_one_operand_loc (loc, size_type_node,
11187 size_zero_node, s2);
11188 }
11189
11190 /* If the second argument is "", return __builtin_strlen(s1). */
11191 if (p2 && *p2 == '\0')
11192 {
11193 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11194
11195 /* If the replacement _DECL isn't initialized, don't do the
11196 transformation. */
11197 if (!fn)
11198 return NULL_TREE;
11199
11200 return build_call_expr_loc (loc, fn, 1, s1);
11201 }
11202 return NULL_TREE;
11203 }
11204 }
11205
11206 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11207 produced. False otherwise. This is done so that we don't output the error
11208 or warning twice or three times. */
11209
11210 bool
11211 fold_builtin_next_arg (tree exp, bool va_start_p)
11212 {
11213 tree fntype = TREE_TYPE (current_function_decl);
11214 int nargs = call_expr_nargs (exp);
11215 tree arg;
11216 /* There is good chance the current input_location points inside the
11217 definition of the va_start macro (perhaps on the token for
11218 builtin) in a system header, so warnings will not be emitted.
11219 Use the location in real source code. */
11220 source_location current_location =
11221 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11222 NULL);
11223
11224 if (!stdarg_p (fntype))
11225 {
11226 error ("%<va_start%> used in function with fixed args");
11227 return true;
11228 }
11229
11230 if (va_start_p)
11231 {
11232 if (va_start_p && (nargs != 2))
11233 {
11234 error ("wrong number of arguments to function %<va_start%>");
11235 return true;
11236 }
11237 arg = CALL_EXPR_ARG (exp, 1);
11238 }
11239 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11240 when we checked the arguments and if needed issued a warning. */
11241 else
11242 {
11243 if (nargs == 0)
11244 {
11245 /* Evidently an out of date version of <stdarg.h>; can't validate
11246 va_start's second argument, but can still work as intended. */
11247 warning_at (current_location,
11248 OPT_Wvarargs,
11249 "%<__builtin_next_arg%> called without an argument");
11250 return true;
11251 }
11252 else if (nargs > 1)
11253 {
11254 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11255 return true;
11256 }
11257 arg = CALL_EXPR_ARG (exp, 0);
11258 }
11259
11260 if (TREE_CODE (arg) == SSA_NAME)
11261 arg = SSA_NAME_VAR (arg);
11262
11263 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11264 or __builtin_next_arg (0) the first time we see it, after checking
11265 the arguments and if needed issuing a warning. */
11266 if (!integer_zerop (arg))
11267 {
11268 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11269
11270 /* Strip off all nops for the sake of the comparison. This
11271 is not quite the same as STRIP_NOPS. It does more.
11272 We must also strip off INDIRECT_EXPR for C++ reference
11273 parameters. */
11274 while (CONVERT_EXPR_P (arg)
11275 || TREE_CODE (arg) == INDIRECT_REF)
11276 arg = TREE_OPERAND (arg, 0);
11277 if (arg != last_parm)
11278 {
11279 /* FIXME: Sometimes with the tree optimizers we can get the
11280 not the last argument even though the user used the last
11281 argument. We just warn and set the arg to be the last
11282 argument so that we will get wrong-code because of
11283 it. */
11284 warning_at (current_location,
11285 OPT_Wvarargs,
11286 "second parameter of %<va_start%> not last named argument");
11287 }
11288
11289 /* Undefined by C99 7.15.1.4p4 (va_start):
11290 "If the parameter parmN is declared with the register storage
11291 class, with a function or array type, or with a type that is
11292 not compatible with the type that results after application of
11293 the default argument promotions, the behavior is undefined."
11294 */
11295 else if (DECL_REGISTER (arg))
11296 {
11297 warning_at (current_location,
11298 OPT_Wvarargs,
11299 "undefined behaviour when second parameter of "
11300 "%<va_start%> is declared with %<register%> storage");
11301 }
11302
11303 /* We want to verify the second parameter just once before the tree
11304 optimizers are run and then avoid keeping it in the tree,
11305 as otherwise we could warn even for correct code like:
11306 void foo (int i, ...)
11307 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11308 if (va_start_p)
11309 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11310 else
11311 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11312 }
11313 return false;
11314 }
11315
11316
11317 /* Expand a call EXP to __builtin_object_size. */
11318
11319 static rtx
11320 expand_builtin_object_size (tree exp)
11321 {
11322 tree ost;
11323 int object_size_type;
11324 tree fndecl = get_callee_fndecl (exp);
11325
11326 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11327 {
11328 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11329 exp, fndecl);
11330 expand_builtin_trap ();
11331 return const0_rtx;
11332 }
11333
11334 ost = CALL_EXPR_ARG (exp, 1);
11335 STRIP_NOPS (ost);
11336
11337 if (TREE_CODE (ost) != INTEGER_CST
11338 || tree_int_cst_sgn (ost) < 0
11339 || compare_tree_int (ost, 3) > 0)
11340 {
11341 error ("%Klast argument of %D is not integer constant between 0 and 3",
11342 exp, fndecl);
11343 expand_builtin_trap ();
11344 return const0_rtx;
11345 }
11346
11347 object_size_type = tree_to_shwi (ost);
11348
11349 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11350 }
11351
11352 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11353 FCODE is the BUILT_IN_* to use.
11354 Return NULL_RTX if we failed; the caller should emit a normal call,
11355 otherwise try to get the result in TARGET, if convenient (and in
11356 mode MODE if that's convenient). */
11357
11358 static rtx
11359 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11360 enum built_in_function fcode)
11361 {
11362 tree dest, src, len, size;
11363
11364 if (!validate_arglist (exp,
11365 POINTER_TYPE,
11366 fcode == BUILT_IN_MEMSET_CHK
11367 ? INTEGER_TYPE : POINTER_TYPE,
11368 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11369 return NULL_RTX;
11370
11371 dest = CALL_EXPR_ARG (exp, 0);
11372 src = CALL_EXPR_ARG (exp, 1);
11373 len = CALL_EXPR_ARG (exp, 2);
11374 size = CALL_EXPR_ARG (exp, 3);
11375
11376 if (! tree_fits_uhwi_p (size))
11377 return NULL_RTX;
11378
11379 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11380 {
11381 tree fn;
11382
11383 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11384 {
11385 warning_at (tree_nonartificial_location (exp),
11386 0, "%Kcall to %D will always overflow destination buffer",
11387 exp, get_callee_fndecl (exp));
11388 return NULL_RTX;
11389 }
11390
11391 fn = NULL_TREE;
11392 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11393 mem{cpy,pcpy,move,set} is available. */
11394 switch (fcode)
11395 {
11396 case BUILT_IN_MEMCPY_CHK:
11397 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11398 break;
11399 case BUILT_IN_MEMPCPY_CHK:
11400 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11401 break;
11402 case BUILT_IN_MEMMOVE_CHK:
11403 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11404 break;
11405 case BUILT_IN_MEMSET_CHK:
11406 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11407 break;
11408 default:
11409 break;
11410 }
11411
11412 if (! fn)
11413 return NULL_RTX;
11414
11415 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11416 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11417 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11418 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11419 }
11420 else if (fcode == BUILT_IN_MEMSET_CHK)
11421 return NULL_RTX;
11422 else
11423 {
11424 unsigned int dest_align = get_pointer_alignment (dest);
11425
11426 /* If DEST is not a pointer type, call the normal function. */
11427 if (dest_align == 0)
11428 return NULL_RTX;
11429
11430 /* If SRC and DEST are the same (and not volatile), do nothing. */
11431 if (operand_equal_p (src, dest, 0))
11432 {
11433 tree expr;
11434
11435 if (fcode != BUILT_IN_MEMPCPY_CHK)
11436 {
11437 /* Evaluate and ignore LEN in case it has side-effects. */
11438 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11439 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11440 }
11441
11442 expr = fold_build_pointer_plus (dest, len);
11443 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11444 }
11445
11446 /* __memmove_chk special case. */
11447 if (fcode == BUILT_IN_MEMMOVE_CHK)
11448 {
11449 unsigned int src_align = get_pointer_alignment (src);
11450
11451 if (src_align == 0)
11452 return NULL_RTX;
11453
11454 /* If src is categorized for a readonly section we can use
11455 normal __memcpy_chk. */
11456 if (readonly_data_expr (src))
11457 {
11458 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11459 if (!fn)
11460 return NULL_RTX;
11461 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11462 dest, src, len, size);
11463 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11464 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11465 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11466 }
11467 }
11468 return NULL_RTX;
11469 }
11470 }
11471
11472 /* Emit warning if a buffer overflow is detected at compile time. */
11473
11474 static void
11475 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11476 {
11477 int is_strlen = 0;
11478 tree len, size;
11479 location_t loc = tree_nonartificial_location (exp);
11480
11481 switch (fcode)
11482 {
11483 case BUILT_IN_STRCPY_CHK:
11484 case BUILT_IN_STPCPY_CHK:
11485 /* For __strcat_chk the warning will be emitted only if overflowing
11486 by at least strlen (dest) + 1 bytes. */
11487 case BUILT_IN_STRCAT_CHK:
11488 len = CALL_EXPR_ARG (exp, 1);
11489 size = CALL_EXPR_ARG (exp, 2);
11490 is_strlen = 1;
11491 break;
11492 case BUILT_IN_STRNCAT_CHK:
11493 case BUILT_IN_STRNCPY_CHK:
11494 case BUILT_IN_STPNCPY_CHK:
11495 len = CALL_EXPR_ARG (exp, 2);
11496 size = CALL_EXPR_ARG (exp, 3);
11497 break;
11498 case BUILT_IN_SNPRINTF_CHK:
11499 case BUILT_IN_VSNPRINTF_CHK:
11500 len = CALL_EXPR_ARG (exp, 1);
11501 size = CALL_EXPR_ARG (exp, 3);
11502 break;
11503 default:
11504 gcc_unreachable ();
11505 }
11506
11507 if (!len || !size)
11508 return;
11509
11510 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11511 return;
11512
11513 if (is_strlen)
11514 {
11515 len = c_strlen (len, 1);
11516 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11517 return;
11518 }
11519 else if (fcode == BUILT_IN_STRNCAT_CHK)
11520 {
11521 tree src = CALL_EXPR_ARG (exp, 1);
11522 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11523 return;
11524 src = c_strlen (src, 1);
11525 if (! src || ! tree_fits_uhwi_p (src))
11526 {
11527 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11528 exp, get_callee_fndecl (exp));
11529 return;
11530 }
11531 else if (tree_int_cst_lt (src, size))
11532 return;
11533 }
11534 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
11535 return;
11536
11537 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11538 exp, get_callee_fndecl (exp));
11539 }
11540
11541 /* Emit warning if a buffer overflow is detected at compile time
11542 in __sprintf_chk/__vsprintf_chk calls. */
11543
11544 static void
11545 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11546 {
11547 tree size, len, fmt;
11548 const char *fmt_str;
11549 int nargs = call_expr_nargs (exp);
11550
11551 /* Verify the required arguments in the original call. */
11552
11553 if (nargs < 4)
11554 return;
11555 size = CALL_EXPR_ARG (exp, 2);
11556 fmt = CALL_EXPR_ARG (exp, 3);
11557
11558 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11559 return;
11560
11561 /* Check whether the format is a literal string constant. */
11562 fmt_str = c_getstr (fmt);
11563 if (fmt_str == NULL)
11564 return;
11565
11566 if (!init_target_chars ())
11567 return;
11568
11569 /* If the format doesn't contain % args or %%, we know its size. */
11570 if (strchr (fmt_str, target_percent) == 0)
11571 len = build_int_cstu (size_type_node, strlen (fmt_str));
11572 /* If the format is "%s" and first ... argument is a string literal,
11573 we know it too. */
11574 else if (fcode == BUILT_IN_SPRINTF_CHK
11575 && strcmp (fmt_str, target_percent_s) == 0)
11576 {
11577 tree arg;
11578
11579 if (nargs < 5)
11580 return;
11581 arg = CALL_EXPR_ARG (exp, 4);
11582 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11583 return;
11584
11585 len = c_strlen (arg, 1);
11586 if (!len || ! tree_fits_uhwi_p (len))
11587 return;
11588 }
11589 else
11590 return;
11591
11592 if (! tree_int_cst_lt (len, size))
11593 warning_at (tree_nonartificial_location (exp),
11594 0, "%Kcall to %D will always overflow destination buffer",
11595 exp, get_callee_fndecl (exp));
11596 }
11597
11598 /* Emit warning if a free is called with address of a variable. */
11599
11600 static void
11601 maybe_emit_free_warning (tree exp)
11602 {
11603 tree arg = CALL_EXPR_ARG (exp, 0);
11604
11605 STRIP_NOPS (arg);
11606 if (TREE_CODE (arg) != ADDR_EXPR)
11607 return;
11608
11609 arg = get_base_address (TREE_OPERAND (arg, 0));
11610 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11611 return;
11612
11613 if (SSA_VAR_P (arg))
11614 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11615 "%Kattempt to free a non-heap object %qD", exp, arg);
11616 else
11617 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11618 "%Kattempt to free a non-heap object", exp);
11619 }
11620
11621 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11622 if possible. */
11623
11624 static tree
11625 fold_builtin_object_size (tree ptr, tree ost)
11626 {
11627 unsigned HOST_WIDE_INT bytes;
11628 int object_size_type;
11629
11630 if (!validate_arg (ptr, POINTER_TYPE)
11631 || !validate_arg (ost, INTEGER_TYPE))
11632 return NULL_TREE;
11633
11634 STRIP_NOPS (ost);
11635
11636 if (TREE_CODE (ost) != INTEGER_CST
11637 || tree_int_cst_sgn (ost) < 0
11638 || compare_tree_int (ost, 3) > 0)
11639 return NULL_TREE;
11640
11641 object_size_type = tree_to_shwi (ost);
11642
11643 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11644 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11645 and (size_t) 0 for types 2 and 3. */
11646 if (TREE_SIDE_EFFECTS (ptr))
11647 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11648
11649 if (TREE_CODE (ptr) == ADDR_EXPR)
11650 {
11651 bytes = compute_builtin_object_size (ptr, object_size_type);
11652 if (wi::fits_to_tree_p (bytes, size_type_node))
11653 return build_int_cstu (size_type_node, bytes);
11654 }
11655 else if (TREE_CODE (ptr) == SSA_NAME)
11656 {
11657 /* If object size is not known yet, delay folding until
11658 later. Maybe subsequent passes will help determining
11659 it. */
11660 bytes = compute_builtin_object_size (ptr, object_size_type);
11661 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11662 && wi::fits_to_tree_p (bytes, size_type_node))
11663 return build_int_cstu (size_type_node, bytes);
11664 }
11665
11666 return NULL_TREE;
11667 }
11668
11669 /* Builtins with folding operations that operate on "..." arguments
11670 need special handling; we need to store the arguments in a convenient
11671 data structure before attempting any folding. Fortunately there are
11672 only a few builtins that fall into this category. FNDECL is the
11673 function, EXP is the CALL_EXPR for the call. */
11674
11675 static tree
11676 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11677 {
11678 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11679 tree ret = NULL_TREE;
11680
11681 switch (fcode)
11682 {
11683 case BUILT_IN_FPCLASSIFY:
11684 ret = fold_builtin_fpclassify (loc, args, nargs);
11685 break;
11686
11687 default:
11688 break;
11689 }
11690 if (ret)
11691 {
11692 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11693 SET_EXPR_LOCATION (ret, loc);
11694 TREE_NO_WARNING (ret) = 1;
11695 return ret;
11696 }
11697 return NULL_TREE;
11698 }
11699
11700 /* Initialize format string characters in the target charset. */
11701
11702 bool
11703 init_target_chars (void)
11704 {
11705 static bool init;
11706 if (!init)
11707 {
11708 target_newline = lang_hooks.to_target_charset ('\n');
11709 target_percent = lang_hooks.to_target_charset ('%');
11710 target_c = lang_hooks.to_target_charset ('c');
11711 target_s = lang_hooks.to_target_charset ('s');
11712 if (target_newline == 0 || target_percent == 0 || target_c == 0
11713 || target_s == 0)
11714 return false;
11715
11716 target_percent_c[0] = target_percent;
11717 target_percent_c[1] = target_c;
11718 target_percent_c[2] = '\0';
11719
11720 target_percent_s[0] = target_percent;
11721 target_percent_s[1] = target_s;
11722 target_percent_s[2] = '\0';
11723
11724 target_percent_s_newline[0] = target_percent;
11725 target_percent_s_newline[1] = target_s;
11726 target_percent_s_newline[2] = target_newline;
11727 target_percent_s_newline[3] = '\0';
11728
11729 init = true;
11730 }
11731 return true;
11732 }
11733
11734 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11735 and no overflow/underflow occurred. INEXACT is true if M was not
11736 exactly calculated. TYPE is the tree type for the result. This
11737 function assumes that you cleared the MPFR flags and then
11738 calculated M to see if anything subsequently set a flag prior to
11739 entering this function. Return NULL_TREE if any checks fail. */
11740
11741 static tree
11742 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11743 {
11744 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11745 overflow/underflow occurred. If -frounding-math, proceed iff the
11746 result of calling FUNC was exact. */
11747 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11748 && (!flag_rounding_math || !inexact))
11749 {
11750 REAL_VALUE_TYPE rr;
11751
11752 real_from_mpfr (&rr, m, type, GMP_RNDN);
11753 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11754 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11755 but the mpft_t is not, then we underflowed in the
11756 conversion. */
11757 if (real_isfinite (&rr)
11758 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11759 {
11760 REAL_VALUE_TYPE rmode;
11761
11762 real_convert (&rmode, TYPE_MODE (type), &rr);
11763 /* Proceed iff the specified mode can hold the value. */
11764 if (real_identical (&rmode, &rr))
11765 return build_real (type, rmode);
11766 }
11767 }
11768 return NULL_TREE;
11769 }
11770
11771 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11772 number and no overflow/underflow occurred. INEXACT is true if M
11773 was not exactly calculated. TYPE is the tree type for the result.
11774 This function assumes that you cleared the MPFR flags and then
11775 calculated M to see if anything subsequently set a flag prior to
11776 entering this function. Return NULL_TREE if any checks fail, if
11777 FORCE_CONVERT is true, then bypass the checks. */
11778
11779 static tree
11780 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11781 {
11782 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11783 overflow/underflow occurred. If -frounding-math, proceed iff the
11784 result of calling FUNC was exact. */
11785 if (force_convert
11786 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11787 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11788 && (!flag_rounding_math || !inexact)))
11789 {
11790 REAL_VALUE_TYPE re, im;
11791
11792 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
11793 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
11794 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11795 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11796 but the mpft_t is not, then we underflowed in the
11797 conversion. */
11798 if (force_convert
11799 || (real_isfinite (&re) && real_isfinite (&im)
11800 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11801 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11802 {
11803 REAL_VALUE_TYPE re_mode, im_mode;
11804
11805 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11806 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11807 /* Proceed iff the specified mode can hold the value. */
11808 if (force_convert
11809 || (real_identical (&re_mode, &re)
11810 && real_identical (&im_mode, &im)))
11811 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11812 build_real (TREE_TYPE (type), im_mode));
11813 }
11814 }
11815 return NULL_TREE;
11816 }
11817
11818 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11819 FUNC on it and return the resulting value as a tree with type TYPE.
11820 If MIN and/or MAX are not NULL, then the supplied ARG must be
11821 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11822 acceptable values, otherwise they are not. The mpfr precision is
11823 set to the precision of TYPE. We assume that function FUNC returns
11824 zero if the result could be calculated exactly within the requested
11825 precision. */
11826
11827 static tree
11828 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
11829 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
11830 bool inclusive)
11831 {
11832 tree result = NULL_TREE;
11833
11834 STRIP_NOPS (arg);
11835
11836 /* To proceed, MPFR must exactly represent the target floating point
11837 format, which only happens when the target base equals two. */
11838 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11839 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
11840 {
11841 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11842
11843 if (real_isfinite (ra)
11844 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
11845 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
11846 {
11847 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11848 const int prec = fmt->p;
11849 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11850 int inexact;
11851 mpfr_t m;
11852
11853 mpfr_init2 (m, prec);
11854 mpfr_from_real (m, ra, GMP_RNDN);
11855 mpfr_clear_flags ();
11856 inexact = func (m, m, rnd);
11857 result = do_mpfr_ckconv (m, type, inexact);
11858 mpfr_clear (m);
11859 }
11860 }
11861
11862 return result;
11863 }
11864
11865 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
11866 FUNC on it and return the resulting value as a tree with type TYPE.
11867 The mpfr precision is set to the precision of TYPE. We assume that
11868 function FUNC returns zero if the result could be calculated
11869 exactly within the requested precision. */
11870
11871 static tree
11872 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
11873 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11874 {
11875 tree result = NULL_TREE;
11876
11877 STRIP_NOPS (arg1);
11878 STRIP_NOPS (arg2);
11879
11880 /* To proceed, MPFR must exactly represent the target floating point
11881 format, which only happens when the target base equals two. */
11882 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11883 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11884 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
11885 {
11886 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11887 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11888
11889 if (real_isfinite (ra1) && real_isfinite (ra2))
11890 {
11891 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11892 const int prec = fmt->p;
11893 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11894 int inexact;
11895 mpfr_t m1, m2;
11896
11897 mpfr_inits2 (prec, m1, m2, NULL);
11898 mpfr_from_real (m1, ra1, GMP_RNDN);
11899 mpfr_from_real (m2, ra2, GMP_RNDN);
11900 mpfr_clear_flags ();
11901 inexact = func (m1, m1, m2, rnd);
11902 result = do_mpfr_ckconv (m1, type, inexact);
11903 mpfr_clears (m1, m2, NULL);
11904 }
11905 }
11906
11907 return result;
11908 }
11909
11910 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
11911 FUNC on it and return the resulting value as a tree with type TYPE.
11912 The mpfr precision is set to the precision of TYPE. We assume that
11913 function FUNC returns zero if the result could be calculated
11914 exactly within the requested precision. */
11915
11916 static tree
11917 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
11918 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11919 {
11920 tree result = NULL_TREE;
11921
11922 STRIP_NOPS (arg1);
11923 STRIP_NOPS (arg2);
11924 STRIP_NOPS (arg3);
11925
11926 /* To proceed, MPFR must exactly represent the target floating point
11927 format, which only happens when the target base equals two. */
11928 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11929 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11930 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
11931 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
11932 {
11933 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11934 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11935 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
11936
11937 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
11938 {
11939 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11940 const int prec = fmt->p;
11941 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11942 int inexact;
11943 mpfr_t m1, m2, m3;
11944
11945 mpfr_inits2 (prec, m1, m2, m3, NULL);
11946 mpfr_from_real (m1, ra1, GMP_RNDN);
11947 mpfr_from_real (m2, ra2, GMP_RNDN);
11948 mpfr_from_real (m3, ra3, GMP_RNDN);
11949 mpfr_clear_flags ();
11950 inexact = func (m1, m1, m2, m3, rnd);
11951 result = do_mpfr_ckconv (m1, type, inexact);
11952 mpfr_clears (m1, m2, m3, NULL);
11953 }
11954 }
11955
11956 return result;
11957 }
11958
11959 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11960 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
11961 If ARG_SINP and ARG_COSP are NULL then the result is returned
11962 as a complex value.
11963 The type is taken from the type of ARG and is used for setting the
11964 precision of the calculation and results. */
11965
11966 static tree
11967 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
11968 {
11969 tree const type = TREE_TYPE (arg);
11970 tree result = NULL_TREE;
11971
11972 STRIP_NOPS (arg);
11973
11974 /* To proceed, MPFR must exactly represent the target floating point
11975 format, which only happens when the target base equals two. */
11976 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11977 && TREE_CODE (arg) == REAL_CST
11978 && !TREE_OVERFLOW (arg))
11979 {
11980 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11981
11982 if (real_isfinite (ra))
11983 {
11984 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11985 const int prec = fmt->p;
11986 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11987 tree result_s, result_c;
11988 int inexact;
11989 mpfr_t m, ms, mc;
11990
11991 mpfr_inits2 (prec, m, ms, mc, NULL);
11992 mpfr_from_real (m, ra, GMP_RNDN);
11993 mpfr_clear_flags ();
11994 inexact = mpfr_sin_cos (ms, mc, m, rnd);
11995 result_s = do_mpfr_ckconv (ms, type, inexact);
11996 result_c = do_mpfr_ckconv (mc, type, inexact);
11997 mpfr_clears (m, ms, mc, NULL);
11998 if (result_s && result_c)
11999 {
12000 /* If we are to return in a complex value do so. */
12001 if (!arg_sinp && !arg_cosp)
12002 return build_complex (build_complex_type (type),
12003 result_c, result_s);
12004
12005 /* Dereference the sin/cos pointer arguments. */
12006 arg_sinp = build_fold_indirect_ref (arg_sinp);
12007 arg_cosp = build_fold_indirect_ref (arg_cosp);
12008 /* Proceed if valid pointer type were passed in. */
12009 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12010 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12011 {
12012 /* Set the values. */
12013 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12014 result_s);
12015 TREE_SIDE_EFFECTS (result_s) = 1;
12016 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12017 result_c);
12018 TREE_SIDE_EFFECTS (result_c) = 1;
12019 /* Combine the assignments into a compound expr. */
12020 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12021 result_s, result_c));
12022 }
12023 }
12024 }
12025 }
12026 return result;
12027 }
12028
12029 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12030 two-argument mpfr order N Bessel function FUNC on them and return
12031 the resulting value as a tree with type TYPE. The mpfr precision
12032 is set to the precision of TYPE. We assume that function FUNC
12033 returns zero if the result could be calculated exactly within the
12034 requested precision. */
12035 static tree
12036 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12037 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12038 const REAL_VALUE_TYPE *min, bool inclusive)
12039 {
12040 tree result = NULL_TREE;
12041
12042 STRIP_NOPS (arg1);
12043 STRIP_NOPS (arg2);
12044
12045 /* To proceed, MPFR must exactly represent the target floating point
12046 format, which only happens when the target base equals two. */
12047 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12048 && tree_fits_shwi_p (arg1)
12049 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12050 {
12051 const HOST_WIDE_INT n = tree_to_shwi (arg1);
12052 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12053
12054 if (n == (long)n
12055 && real_isfinite (ra)
12056 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12057 {
12058 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12059 const int prec = fmt->p;
12060 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12061 int inexact;
12062 mpfr_t m;
12063
12064 mpfr_init2 (m, prec);
12065 mpfr_from_real (m, ra, GMP_RNDN);
12066 mpfr_clear_flags ();
12067 inexact = func (m, n, m, rnd);
12068 result = do_mpfr_ckconv (m, type, inexact);
12069 mpfr_clear (m);
12070 }
12071 }
12072
12073 return result;
12074 }
12075
12076 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12077 the pointer *(ARG_QUO) and return the result. The type is taken
12078 from the type of ARG0 and is used for setting the precision of the
12079 calculation and results. */
12080
12081 static tree
12082 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12083 {
12084 tree const type = TREE_TYPE (arg0);
12085 tree result = NULL_TREE;
12086
12087 STRIP_NOPS (arg0);
12088 STRIP_NOPS (arg1);
12089
12090 /* To proceed, MPFR must exactly represent the target floating point
12091 format, which only happens when the target base equals two. */
12092 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12093 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12094 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12095 {
12096 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12097 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12098
12099 if (real_isfinite (ra0) && real_isfinite (ra1))
12100 {
12101 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12102 const int prec = fmt->p;
12103 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12104 tree result_rem;
12105 long integer_quo;
12106 mpfr_t m0, m1;
12107
12108 mpfr_inits2 (prec, m0, m1, NULL);
12109 mpfr_from_real (m0, ra0, GMP_RNDN);
12110 mpfr_from_real (m1, ra1, GMP_RNDN);
12111 mpfr_clear_flags ();
12112 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12113 /* Remquo is independent of the rounding mode, so pass
12114 inexact=0 to do_mpfr_ckconv(). */
12115 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12116 mpfr_clears (m0, m1, NULL);
12117 if (result_rem)
12118 {
12119 /* MPFR calculates quo in the host's long so it may
12120 return more bits in quo than the target int can hold
12121 if sizeof(host long) > sizeof(target int). This can
12122 happen even for native compilers in LP64 mode. In
12123 these cases, modulo the quo value with the largest
12124 number that the target int can hold while leaving one
12125 bit for the sign. */
12126 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12127 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12128
12129 /* Dereference the quo pointer argument. */
12130 arg_quo = build_fold_indirect_ref (arg_quo);
12131 /* Proceed iff a valid pointer type was passed in. */
12132 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12133 {
12134 /* Set the value. */
12135 tree result_quo
12136 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12137 build_int_cst (TREE_TYPE (arg_quo),
12138 integer_quo));
12139 TREE_SIDE_EFFECTS (result_quo) = 1;
12140 /* Combine the quo assignment with the rem. */
12141 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12142 result_quo, result_rem));
12143 }
12144 }
12145 }
12146 }
12147 return result;
12148 }
12149
12150 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12151 resulting value as a tree with type TYPE. The mpfr precision is
12152 set to the precision of TYPE. We assume that this mpfr function
12153 returns zero if the result could be calculated exactly within the
12154 requested precision. In addition, the integer pointer represented
12155 by ARG_SG will be dereferenced and set to the appropriate signgam
12156 (-1,1) value. */
12157
12158 static tree
12159 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12160 {
12161 tree result = NULL_TREE;
12162
12163 STRIP_NOPS (arg);
12164
12165 /* To proceed, MPFR must exactly represent the target floating point
12166 format, which only happens when the target base equals two. Also
12167 verify ARG is a constant and that ARG_SG is an int pointer. */
12168 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12169 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12170 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12171 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12172 {
12173 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12174
12175 /* In addition to NaN and Inf, the argument cannot be zero or a
12176 negative integer. */
12177 if (real_isfinite (ra)
12178 && ra->cl != rvc_zero
12179 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12180 {
12181 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12182 const int prec = fmt->p;
12183 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12184 int inexact, sg;
12185 mpfr_t m;
12186 tree result_lg;
12187
12188 mpfr_init2 (m, prec);
12189 mpfr_from_real (m, ra, GMP_RNDN);
12190 mpfr_clear_flags ();
12191 inexact = mpfr_lgamma (m, &sg, m, rnd);
12192 result_lg = do_mpfr_ckconv (m, type, inexact);
12193 mpfr_clear (m);
12194 if (result_lg)
12195 {
12196 tree result_sg;
12197
12198 /* Dereference the arg_sg pointer argument. */
12199 arg_sg = build_fold_indirect_ref (arg_sg);
12200 /* Assign the signgam value into *arg_sg. */
12201 result_sg = fold_build2 (MODIFY_EXPR,
12202 TREE_TYPE (arg_sg), arg_sg,
12203 build_int_cst (TREE_TYPE (arg_sg), sg));
12204 TREE_SIDE_EFFECTS (result_sg) = 1;
12205 /* Combine the signgam assignment with the lgamma result. */
12206 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12207 result_sg, result_lg));
12208 }
12209 }
12210 }
12211
12212 return result;
12213 }
12214
12215 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12216 function FUNC on it and return the resulting value as a tree with
12217 type TYPE. The mpfr precision is set to the precision of TYPE. We
12218 assume that function FUNC returns zero if the result could be
12219 calculated exactly within the requested precision. */
12220
12221 static tree
12222 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12223 {
12224 tree result = NULL_TREE;
12225
12226 STRIP_NOPS (arg);
12227
12228 /* To proceed, MPFR must exactly represent the target floating point
12229 format, which only happens when the target base equals two. */
12230 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12231 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12232 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12233 {
12234 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12235 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12236
12237 if (real_isfinite (re) && real_isfinite (im))
12238 {
12239 const struct real_format *const fmt =
12240 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12241 const int prec = fmt->p;
12242 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12243 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12244 int inexact;
12245 mpc_t m;
12246
12247 mpc_init2 (m, prec);
12248 mpfr_from_real (mpc_realref (m), re, rnd);
12249 mpfr_from_real (mpc_imagref (m), im, rnd);
12250 mpfr_clear_flags ();
12251 inexact = func (m, m, crnd);
12252 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
12253 mpc_clear (m);
12254 }
12255 }
12256
12257 return result;
12258 }
12259
12260 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12261 mpc function FUNC on it and return the resulting value as a tree
12262 with type TYPE. The mpfr precision is set to the precision of
12263 TYPE. We assume that function FUNC returns zero if the result
12264 could be calculated exactly within the requested precision. If
12265 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12266 in the arguments and/or results. */
12267
12268 tree
12269 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12270 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12271 {
12272 tree result = NULL_TREE;
12273
12274 STRIP_NOPS (arg0);
12275 STRIP_NOPS (arg1);
12276
12277 /* To proceed, MPFR must exactly represent the target floating point
12278 format, which only happens when the target base equals two. */
12279 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12280 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12281 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12282 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12283 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12284 {
12285 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12286 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12287 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12288 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12289
12290 if (do_nonfinite
12291 || (real_isfinite (re0) && real_isfinite (im0)
12292 && real_isfinite (re1) && real_isfinite (im1)))
12293 {
12294 const struct real_format *const fmt =
12295 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12296 const int prec = fmt->p;
12297 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12298 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12299 int inexact;
12300 mpc_t m0, m1;
12301
12302 mpc_init2 (m0, prec);
12303 mpc_init2 (m1, prec);
12304 mpfr_from_real (mpc_realref (m0), re0, rnd);
12305 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12306 mpfr_from_real (mpc_realref (m1), re1, rnd);
12307 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12308 mpfr_clear_flags ();
12309 inexact = func (m0, m0, m1, crnd);
12310 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12311 mpc_clear (m0);
12312 mpc_clear (m1);
12313 }
12314 }
12315
12316 return result;
12317 }
12318
12319 /* A wrapper function for builtin folding that prevents warnings for
12320 "statement without effect" and the like, caused by removing the
12321 call node earlier than the warning is generated. */
12322
12323 tree
12324 fold_call_stmt (gcall *stmt, bool ignore)
12325 {
12326 tree ret = NULL_TREE;
12327 tree fndecl = gimple_call_fndecl (stmt);
12328 location_t loc = gimple_location (stmt);
12329 if (fndecl
12330 && TREE_CODE (fndecl) == FUNCTION_DECL
12331 && DECL_BUILT_IN (fndecl)
12332 && !gimple_call_va_arg_pack_p (stmt))
12333 {
12334 int nargs = gimple_call_num_args (stmt);
12335 tree *args = (nargs > 0
12336 ? gimple_call_arg_ptr (stmt, 0)
12337 : &error_mark_node);
12338
12339 if (avoid_folding_inline_builtin (fndecl))
12340 return NULL_TREE;
12341 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12342 {
12343 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12344 }
12345 else
12346 {
12347 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
12348 if (ret)
12349 {
12350 /* Propagate location information from original call to
12351 expansion of builtin. Otherwise things like
12352 maybe_emit_chk_warning, that operate on the expansion
12353 of a builtin, will use the wrong location information. */
12354 if (gimple_has_location (stmt))
12355 {
12356 tree realret = ret;
12357 if (TREE_CODE (ret) == NOP_EXPR)
12358 realret = TREE_OPERAND (ret, 0);
12359 if (CAN_HAVE_LOCATION_P (realret)
12360 && !EXPR_HAS_LOCATION (realret))
12361 SET_EXPR_LOCATION (realret, loc);
12362 return realret;
12363 }
12364 return ret;
12365 }
12366 }
12367 }
12368 return NULL_TREE;
12369 }
12370
12371 /* Look up the function in builtin_decl that corresponds to DECL
12372 and set ASMSPEC as its user assembler name. DECL must be a
12373 function decl that declares a builtin. */
12374
12375 void
12376 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12377 {
12378 tree builtin;
12379 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12380 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12381 && asmspec != 0);
12382
12383 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12384 set_user_assembler_name (builtin, asmspec);
12385 switch (DECL_FUNCTION_CODE (decl))
12386 {
12387 case BUILT_IN_MEMCPY:
12388 init_block_move_fn (asmspec);
12389 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12390 break;
12391 case BUILT_IN_MEMSET:
12392 init_block_clear_fn (asmspec);
12393 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12394 break;
12395 case BUILT_IN_MEMMOVE:
12396 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12397 break;
12398 case BUILT_IN_MEMCMP:
12399 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12400 break;
12401 case BUILT_IN_ABORT:
12402 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12403 break;
12404 case BUILT_IN_FFS:
12405 if (INT_TYPE_SIZE < BITS_PER_WORD)
12406 {
12407 set_user_assembler_libfunc ("ffs", asmspec);
12408 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12409 MODE_INT, 0), "ffs");
12410 }
12411 break;
12412 default:
12413 break;
12414 }
12415 }
12416
12417 /* Return true if DECL is a builtin that expands to a constant or similarly
12418 simple code. */
12419 bool
12420 is_simple_builtin (tree decl)
12421 {
12422 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12423 switch (DECL_FUNCTION_CODE (decl))
12424 {
12425 /* Builtins that expand to constants. */
12426 case BUILT_IN_CONSTANT_P:
12427 case BUILT_IN_EXPECT:
12428 case BUILT_IN_OBJECT_SIZE:
12429 case BUILT_IN_UNREACHABLE:
12430 /* Simple register moves or loads from stack. */
12431 case BUILT_IN_ASSUME_ALIGNED:
12432 case BUILT_IN_RETURN_ADDRESS:
12433 case BUILT_IN_EXTRACT_RETURN_ADDR:
12434 case BUILT_IN_FROB_RETURN_ADDR:
12435 case BUILT_IN_RETURN:
12436 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12437 case BUILT_IN_FRAME_ADDRESS:
12438 case BUILT_IN_VA_END:
12439 case BUILT_IN_STACK_SAVE:
12440 case BUILT_IN_STACK_RESTORE:
12441 /* Exception state returns or moves registers around. */
12442 case BUILT_IN_EH_FILTER:
12443 case BUILT_IN_EH_POINTER:
12444 case BUILT_IN_EH_COPY_VALUES:
12445 return true;
12446
12447 default:
12448 return false;
12449 }
12450
12451 return false;
12452 }
12453
12454 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12455 most probably expanded inline into reasonably simple code. This is a
12456 superset of is_simple_builtin. */
12457 bool
12458 is_inexpensive_builtin (tree decl)
12459 {
12460 if (!decl)
12461 return false;
12462 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12463 return true;
12464 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12465 switch (DECL_FUNCTION_CODE (decl))
12466 {
12467 case BUILT_IN_ABS:
12468 case BUILT_IN_ALLOCA:
12469 case BUILT_IN_ALLOCA_WITH_ALIGN:
12470 case BUILT_IN_BSWAP16:
12471 case BUILT_IN_BSWAP32:
12472 case BUILT_IN_BSWAP64:
12473 case BUILT_IN_CLZ:
12474 case BUILT_IN_CLZIMAX:
12475 case BUILT_IN_CLZL:
12476 case BUILT_IN_CLZLL:
12477 case BUILT_IN_CTZ:
12478 case BUILT_IN_CTZIMAX:
12479 case BUILT_IN_CTZL:
12480 case BUILT_IN_CTZLL:
12481 case BUILT_IN_FFS:
12482 case BUILT_IN_FFSIMAX:
12483 case BUILT_IN_FFSL:
12484 case BUILT_IN_FFSLL:
12485 case BUILT_IN_IMAXABS:
12486 case BUILT_IN_FINITE:
12487 case BUILT_IN_FINITEF:
12488 case BUILT_IN_FINITEL:
12489 case BUILT_IN_FINITED32:
12490 case BUILT_IN_FINITED64:
12491 case BUILT_IN_FINITED128:
12492 case BUILT_IN_FPCLASSIFY:
12493 case BUILT_IN_ISFINITE:
12494 case BUILT_IN_ISINF_SIGN:
12495 case BUILT_IN_ISINF:
12496 case BUILT_IN_ISINFF:
12497 case BUILT_IN_ISINFL:
12498 case BUILT_IN_ISINFD32:
12499 case BUILT_IN_ISINFD64:
12500 case BUILT_IN_ISINFD128:
12501 case BUILT_IN_ISNAN:
12502 case BUILT_IN_ISNANF:
12503 case BUILT_IN_ISNANL:
12504 case BUILT_IN_ISNAND32:
12505 case BUILT_IN_ISNAND64:
12506 case BUILT_IN_ISNAND128:
12507 case BUILT_IN_ISNORMAL:
12508 case BUILT_IN_ISGREATER:
12509 case BUILT_IN_ISGREATEREQUAL:
12510 case BUILT_IN_ISLESS:
12511 case BUILT_IN_ISLESSEQUAL:
12512 case BUILT_IN_ISLESSGREATER:
12513 case BUILT_IN_ISUNORDERED:
12514 case BUILT_IN_VA_ARG_PACK:
12515 case BUILT_IN_VA_ARG_PACK_LEN:
12516 case BUILT_IN_VA_COPY:
12517 case BUILT_IN_TRAP:
12518 case BUILT_IN_SAVEREGS:
12519 case BUILT_IN_POPCOUNTL:
12520 case BUILT_IN_POPCOUNTLL:
12521 case BUILT_IN_POPCOUNTIMAX:
12522 case BUILT_IN_POPCOUNT:
12523 case BUILT_IN_PARITYL:
12524 case BUILT_IN_PARITYLL:
12525 case BUILT_IN_PARITYIMAX:
12526 case BUILT_IN_PARITY:
12527 case BUILT_IN_LABS:
12528 case BUILT_IN_LLABS:
12529 case BUILT_IN_PREFETCH:
12530 case BUILT_IN_ACC_ON_DEVICE:
12531 return true;
12532
12533 default:
12534 return is_simple_builtin (decl);
12535 }
12536
12537 return false;
12538 }