re PR rtl-optimization/62078 (ICE: verify_flow_info failed: missing REG_EH_REGION...
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "hash-set.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "stringpool.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "varasm.h"
40 #include "tree-object-size.h"
41 #include "realmpfr.h"
42 #include "predict.h"
43 #include "hashtab.h"
44 #include "hard-reg-set.h"
45 #include "function.h"
46 #include "cfgrtl.h"
47 #include "basic-block.h"
48 #include "tree-ssa-alias.h"
49 #include "internal-fn.h"
50 #include "gimple-expr.h"
51 #include "is-a.h"
52 #include "gimple.h"
53 #include "flags.h"
54 #include "regs.h"
55 #include "except.h"
56 #include "insn-config.h"
57 #include "statistics.h"
58 #include "real.h"
59 #include "fixed-value.h"
60 #include "expmed.h"
61 #include "dojump.h"
62 #include "explow.h"
63 #include "emit-rtl.h"
64 #include "stmt.h"
65 #include "expr.h"
66 #include "insn-codes.h"
67 #include "optabs.h"
68 #include "libfuncs.h"
69 #include "recog.h"
70 #include "output.h"
71 #include "typeclass.h"
72 #include "tm_p.h"
73 #include "target.h"
74 #include "langhooks.h"
75 #include "tree-ssanames.h"
76 #include "tree-dfa.h"
77 #include "value-prof.h"
78 #include "diagnostic-core.h"
79 #include "builtins.h"
80 #include "asan.h"
81 #include "cilk.h"
82 #include "ipa-ref.h"
83 #include "lto-streamer.h"
84 #include "cgraph.h"
85 #include "tree-chkp.h"
86 #include "rtl-chkp.h"
87 #include "gomp-constants.h"
88
89
90 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
91
92 struct target_builtins default_target_builtins;
93 #if SWITCHABLE_TARGET
94 struct target_builtins *this_target_builtins = &default_target_builtins;
95 #endif
96
97 /* Define the names of the builtin function types and codes. */
98 const char *const built_in_class_names[BUILT_IN_LAST]
99 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
100
101 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
102 const char * built_in_names[(int) END_BUILTINS] =
103 {
104 #include "builtins.def"
105 };
106 #undef DEF_BUILTIN
107
108 /* Setup an array of builtin_info_type, make sure each element decl is
109 initialized to NULL_TREE. */
110 builtin_info_type builtin_info[(int)END_BUILTINS];
111
112 /* Non-zero if __builtin_constant_p should be folded right away. */
113 bool force_folding_builtin_constant_p;
114
115 static rtx c_readstr (const char *, machine_mode);
116 static int target_char_cast (tree, char *);
117 static rtx get_memory_rtx (tree, tree);
118 static int apply_args_size (void);
119 static int apply_result_size (void);
120 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
121 static rtx result_vector (int, rtx);
122 #endif
123 static void expand_builtin_update_setjmp_buf (rtx);
124 static void expand_builtin_prefetch (tree);
125 static rtx expand_builtin_apply_args (void);
126 static rtx expand_builtin_apply_args_1 (void);
127 static rtx expand_builtin_apply (rtx, rtx, rtx);
128 static void expand_builtin_return (rtx);
129 static enum type_class type_to_class (tree);
130 static rtx expand_builtin_classify_type (tree);
131 static void expand_errno_check (tree, rtx);
132 static rtx expand_builtin_mathfn (tree, rtx, rtx);
133 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
134 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
135 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
136 static rtx expand_builtin_interclass_mathfn (tree, rtx);
137 static rtx expand_builtin_sincos (tree);
138 static rtx expand_builtin_cexpi (tree, rtx);
139 static rtx expand_builtin_int_roundingfn (tree, rtx);
140 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
141 static rtx expand_builtin_next_arg (void);
142 static rtx expand_builtin_va_start (tree);
143 static rtx expand_builtin_va_end (tree);
144 static rtx expand_builtin_va_copy (tree);
145 static rtx expand_builtin_memcmp (tree, rtx, machine_mode);
146 static rtx expand_builtin_strcmp (tree, rtx);
147 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
148 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
149 static rtx expand_builtin_memcpy (tree, rtx);
150 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
151 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
152 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
153 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
154 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
155 machine_mode, int, tree);
156 static rtx expand_builtin_strcpy (tree, rtx);
157 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
158 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
159 static rtx expand_builtin_strncpy (tree, rtx);
160 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
161 static rtx expand_builtin_memset (tree, rtx, machine_mode);
162 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
163 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
164 static rtx expand_builtin_bzero (tree);
165 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
166 static rtx expand_builtin_alloca (tree, bool);
167 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
168 static rtx expand_builtin_frame_address (tree, tree);
169 static tree stabilize_va_list_loc (location_t, tree, int);
170 static rtx expand_builtin_expect (tree, rtx);
171 static tree fold_builtin_constant_p (tree);
172 static tree fold_builtin_classify_type (tree);
173 static tree fold_builtin_strlen (location_t, tree, tree);
174 static tree fold_builtin_inf (location_t, tree, int);
175 static tree fold_builtin_nan (tree, tree, int);
176 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
177 static bool validate_arg (const_tree, enum tree_code code);
178 static bool integer_valued_real_p (tree);
179 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
180 static rtx expand_builtin_fabs (tree, rtx, rtx);
181 static rtx expand_builtin_signbit (tree, rtx);
182 static tree fold_builtin_sqrt (location_t, tree, tree);
183 static tree fold_builtin_cbrt (location_t, tree, tree);
184 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
185 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
186 static tree fold_builtin_cos (location_t, tree, tree, tree);
187 static tree fold_builtin_cosh (location_t, tree, tree, tree);
188 static tree fold_builtin_tan (tree, tree);
189 static tree fold_builtin_trunc (location_t, tree, tree);
190 static tree fold_builtin_floor (location_t, tree, tree);
191 static tree fold_builtin_ceil (location_t, tree, tree);
192 static tree fold_builtin_round (location_t, tree, tree);
193 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
194 static tree fold_builtin_bitop (tree, tree);
195 static tree fold_builtin_strchr (location_t, tree, tree, tree);
196 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
197 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
198 static tree fold_builtin_strcmp (location_t, tree, tree);
199 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
200 static tree fold_builtin_signbit (location_t, tree, tree);
201 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
202 static tree fold_builtin_isascii (location_t, tree);
203 static tree fold_builtin_toascii (location_t, tree);
204 static tree fold_builtin_isdigit (location_t, tree);
205 static tree fold_builtin_fabs (location_t, tree, tree);
206 static tree fold_builtin_abs (location_t, tree, tree);
207 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
208 enum tree_code);
209 static tree fold_builtin_0 (location_t, tree);
210 static tree fold_builtin_1 (location_t, tree, tree);
211 static tree fold_builtin_2 (location_t, tree, tree, tree);
212 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
213 static tree fold_builtin_varargs (location_t, tree, tree*, int);
214
215 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
216 static tree fold_builtin_strstr (location_t, tree, tree, tree);
217 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
218 static tree fold_builtin_strspn (location_t, tree, tree);
219 static tree fold_builtin_strcspn (location_t, tree, tree);
220
221 static rtx expand_builtin_object_size (tree);
222 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
223 enum built_in_function);
224 static void maybe_emit_chk_warning (tree, enum built_in_function);
225 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
226 static void maybe_emit_free_warning (tree);
227 static tree fold_builtin_object_size (tree, tree);
228
229 unsigned HOST_WIDE_INT target_newline;
230 unsigned HOST_WIDE_INT target_percent;
231 static unsigned HOST_WIDE_INT target_c;
232 static unsigned HOST_WIDE_INT target_s;
233 char target_percent_c[3];
234 char target_percent_s[3];
235 char target_percent_s_newline[4];
236 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
237 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
238 static tree do_mpfr_arg2 (tree, tree, tree,
239 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
240 static tree do_mpfr_arg3 (tree, tree, tree, tree,
241 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
242 static tree do_mpfr_sincos (tree, tree, tree);
243 static tree do_mpfr_bessel_n (tree, tree, tree,
244 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
245 const REAL_VALUE_TYPE *, bool);
246 static tree do_mpfr_remquo (tree, tree, tree);
247 static tree do_mpfr_lgamma_r (tree, tree, tree);
248 static void expand_builtin_sync_synchronize (void);
249
250 /* Return true if NAME starts with __builtin_ or __sync_. */
251
252 static bool
253 is_builtin_name (const char *name)
254 {
255 if (strncmp (name, "__builtin_", 10) == 0)
256 return true;
257 if (strncmp (name, "__sync_", 7) == 0)
258 return true;
259 if (strncmp (name, "__atomic_", 9) == 0)
260 return true;
261 if (flag_cilkplus
262 && (!strcmp (name, "__cilkrts_detach")
263 || !strcmp (name, "__cilkrts_pop_frame")))
264 return true;
265 return false;
266 }
267
268
269 /* Return true if DECL is a function symbol representing a built-in. */
270
271 bool
272 is_builtin_fn (tree decl)
273 {
274 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
275 }
276
277 /* Return true if NODE should be considered for inline expansion regardless
278 of the optimization level. This means whenever a function is invoked with
279 its "internal" name, which normally contains the prefix "__builtin". */
280
281 static bool
282 called_as_built_in (tree node)
283 {
284 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
285 we want the name used to call the function, not the name it
286 will have. */
287 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
288 return is_builtin_name (name);
289 }
290
291 /* Compute values M and N such that M divides (address of EXP - N) and such
292 that N < M. If these numbers can be determined, store M in alignp and N in
293 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
294 *alignp and any bit-offset to *bitposp.
295
296 Note that the address (and thus the alignment) computed here is based
297 on the address to which a symbol resolves, whereas DECL_ALIGN is based
298 on the address at which an object is actually located. These two
299 addresses are not always the same. For example, on ARM targets,
300 the address &foo of a Thumb function foo() has the lowest bit set,
301 whereas foo() itself starts on an even address.
302
303 If ADDR_P is true we are taking the address of the memory reference EXP
304 and thus cannot rely on the access taking place. */
305
306 static bool
307 get_object_alignment_2 (tree exp, unsigned int *alignp,
308 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
309 {
310 HOST_WIDE_INT bitsize, bitpos;
311 tree offset;
312 machine_mode mode;
313 int unsignedp, volatilep;
314 unsigned int align = BITS_PER_UNIT;
315 bool known_alignment = false;
316
317 /* Get the innermost object and the constant (bitpos) and possibly
318 variable (offset) offset of the access. */
319 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
320 &mode, &unsignedp, &volatilep, true);
321
322 /* Extract alignment information from the innermost object and
323 possibly adjust bitpos and offset. */
324 if (TREE_CODE (exp) == FUNCTION_DECL)
325 {
326 /* Function addresses can encode extra information besides their
327 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
328 allows the low bit to be used as a virtual bit, we know
329 that the address itself must be at least 2-byte aligned. */
330 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
331 align = 2 * BITS_PER_UNIT;
332 }
333 else if (TREE_CODE (exp) == LABEL_DECL)
334 ;
335 else if (TREE_CODE (exp) == CONST_DECL)
336 {
337 /* The alignment of a CONST_DECL is determined by its initializer. */
338 exp = DECL_INITIAL (exp);
339 align = TYPE_ALIGN (TREE_TYPE (exp));
340 #ifdef CONSTANT_ALIGNMENT
341 if (CONSTANT_CLASS_P (exp))
342 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
343 #endif
344 known_alignment = true;
345 }
346 else if (DECL_P (exp))
347 {
348 align = DECL_ALIGN (exp);
349 known_alignment = true;
350 }
351 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
352 {
353 align = TYPE_ALIGN (TREE_TYPE (exp));
354 }
355 else if (TREE_CODE (exp) == INDIRECT_REF
356 || TREE_CODE (exp) == MEM_REF
357 || TREE_CODE (exp) == TARGET_MEM_REF)
358 {
359 tree addr = TREE_OPERAND (exp, 0);
360 unsigned ptr_align;
361 unsigned HOST_WIDE_INT ptr_bitpos;
362
363 if (TREE_CODE (addr) == BIT_AND_EXPR
364 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
365 {
366 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
367 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
368 align *= BITS_PER_UNIT;
369 addr = TREE_OPERAND (addr, 0);
370 }
371
372 known_alignment
373 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
374 align = MAX (ptr_align, align);
375
376 /* The alignment of the pointer operand in a TARGET_MEM_REF
377 has to take the variable offset parts into account. */
378 if (TREE_CODE (exp) == TARGET_MEM_REF)
379 {
380 if (TMR_INDEX (exp))
381 {
382 unsigned HOST_WIDE_INT step = 1;
383 if (TMR_STEP (exp))
384 step = TREE_INT_CST_LOW (TMR_STEP (exp));
385 align = MIN (align, (step & -step) * BITS_PER_UNIT);
386 }
387 if (TMR_INDEX2 (exp))
388 align = BITS_PER_UNIT;
389 known_alignment = false;
390 }
391
392 /* When EXP is an actual memory reference then we can use
393 TYPE_ALIGN of a pointer indirection to derive alignment.
394 Do so only if get_pointer_alignment_1 did not reveal absolute
395 alignment knowledge and if using that alignment would
396 improve the situation. */
397 if (!addr_p && !known_alignment
398 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
399 align = TYPE_ALIGN (TREE_TYPE (exp));
400 else
401 {
402 /* Else adjust bitpos accordingly. */
403 bitpos += ptr_bitpos;
404 if (TREE_CODE (exp) == MEM_REF
405 || TREE_CODE (exp) == TARGET_MEM_REF)
406 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
407 }
408 }
409 else if (TREE_CODE (exp) == STRING_CST)
410 {
411 /* STRING_CST are the only constant objects we allow to be not
412 wrapped inside a CONST_DECL. */
413 align = TYPE_ALIGN (TREE_TYPE (exp));
414 #ifdef CONSTANT_ALIGNMENT
415 if (CONSTANT_CLASS_P (exp))
416 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
417 #endif
418 known_alignment = true;
419 }
420
421 /* If there is a non-constant offset part extract the maximum
422 alignment that can prevail. */
423 if (offset)
424 {
425 unsigned int trailing_zeros = tree_ctz (offset);
426 if (trailing_zeros < HOST_BITS_PER_INT)
427 {
428 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
429 if (inner)
430 align = MIN (align, inner);
431 }
432 }
433
434 *alignp = align;
435 *bitposp = bitpos & (*alignp - 1);
436 return known_alignment;
437 }
438
439 /* For a memory reference expression EXP compute values M and N such that M
440 divides (&EXP - N) and such that N < M. If these numbers can be determined,
441 store M in alignp and N in *BITPOSP and return true. Otherwise return false
442 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
443
444 bool
445 get_object_alignment_1 (tree exp, unsigned int *alignp,
446 unsigned HOST_WIDE_INT *bitposp)
447 {
448 return get_object_alignment_2 (exp, alignp, bitposp, false);
449 }
450
451 /* Return the alignment in bits of EXP, an object. */
452
453 unsigned int
454 get_object_alignment (tree exp)
455 {
456 unsigned HOST_WIDE_INT bitpos = 0;
457 unsigned int align;
458
459 get_object_alignment_1 (exp, &align, &bitpos);
460
461 /* align and bitpos now specify known low bits of the pointer.
462 ptr & (align - 1) == bitpos. */
463
464 if (bitpos != 0)
465 align = (bitpos & -bitpos);
466 return align;
467 }
468
469 /* For a pointer valued expression EXP compute values M and N such that M
470 divides (EXP - N) and such that N < M. If these numbers can be determined,
471 store M in alignp and N in *BITPOSP and return true. Return false if
472 the results are just a conservative approximation.
473
474 If EXP is not a pointer, false is returned too. */
475
476 bool
477 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
478 unsigned HOST_WIDE_INT *bitposp)
479 {
480 STRIP_NOPS (exp);
481
482 if (TREE_CODE (exp) == ADDR_EXPR)
483 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
484 alignp, bitposp, true);
485 else if (TREE_CODE (exp) == SSA_NAME
486 && POINTER_TYPE_P (TREE_TYPE (exp)))
487 {
488 unsigned int ptr_align, ptr_misalign;
489 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
490
491 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
492 {
493 *bitposp = ptr_misalign * BITS_PER_UNIT;
494 *alignp = ptr_align * BITS_PER_UNIT;
495 /* We cannot really tell whether this result is an approximation. */
496 return true;
497 }
498 else
499 {
500 *bitposp = 0;
501 *alignp = BITS_PER_UNIT;
502 return false;
503 }
504 }
505 else if (TREE_CODE (exp) == INTEGER_CST)
506 {
507 *alignp = BIGGEST_ALIGNMENT;
508 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
509 & (BIGGEST_ALIGNMENT - 1));
510 return true;
511 }
512
513 *bitposp = 0;
514 *alignp = BITS_PER_UNIT;
515 return false;
516 }
517
518 /* Return the alignment in bits of EXP, a pointer valued expression.
519 The alignment returned is, by default, the alignment of the thing that
520 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
521
522 Otherwise, look at the expression to see if we can do better, i.e., if the
523 expression is actually pointing at an object whose alignment is tighter. */
524
525 unsigned int
526 get_pointer_alignment (tree exp)
527 {
528 unsigned HOST_WIDE_INT bitpos = 0;
529 unsigned int align;
530
531 get_pointer_alignment_1 (exp, &align, &bitpos);
532
533 /* align and bitpos now specify known low bits of the pointer.
534 ptr & (align - 1) == bitpos. */
535
536 if (bitpos != 0)
537 align = (bitpos & -bitpos);
538
539 return align;
540 }
541
542 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
543 way, because it could contain a zero byte in the middle.
544 TREE_STRING_LENGTH is the size of the character array, not the string.
545
546 ONLY_VALUE should be nonzero if the result is not going to be emitted
547 into the instruction stream and zero if it is going to be expanded.
548 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
549 is returned, otherwise NULL, since
550 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
551 evaluate the side-effects.
552
553 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
554 accesses. Note that this implies the result is not going to be emitted
555 into the instruction stream.
556
557 The value returned is of type `ssizetype'.
558
559 Unfortunately, string_constant can't access the values of const char
560 arrays with initializers, so neither can we do so here. */
561
562 tree
563 c_strlen (tree src, int only_value)
564 {
565 tree offset_node;
566 HOST_WIDE_INT offset;
567 int max;
568 const char *ptr;
569 location_t loc;
570
571 STRIP_NOPS (src);
572 if (TREE_CODE (src) == COND_EXPR
573 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
574 {
575 tree len1, len2;
576
577 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
578 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
579 if (tree_int_cst_equal (len1, len2))
580 return len1;
581 }
582
583 if (TREE_CODE (src) == COMPOUND_EXPR
584 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
585 return c_strlen (TREE_OPERAND (src, 1), only_value);
586
587 loc = EXPR_LOC_OR_LOC (src, input_location);
588
589 src = string_constant (src, &offset_node);
590 if (src == 0)
591 return NULL_TREE;
592
593 max = TREE_STRING_LENGTH (src) - 1;
594 ptr = TREE_STRING_POINTER (src);
595
596 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
597 {
598 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
599 compute the offset to the following null if we don't know where to
600 start searching for it. */
601 int i;
602
603 for (i = 0; i < max; i++)
604 if (ptr[i] == 0)
605 return NULL_TREE;
606
607 /* We don't know the starting offset, but we do know that the string
608 has no internal zero bytes. We can assume that the offset falls
609 within the bounds of the string; otherwise, the programmer deserves
610 what he gets. Subtract the offset from the length of the string,
611 and return that. This would perhaps not be valid if we were dealing
612 with named arrays in addition to literal string constants. */
613
614 return size_diffop_loc (loc, size_int (max), offset_node);
615 }
616
617 /* We have a known offset into the string. Start searching there for
618 a null character if we can represent it as a single HOST_WIDE_INT. */
619 if (offset_node == 0)
620 offset = 0;
621 else if (! tree_fits_shwi_p (offset_node))
622 offset = -1;
623 else
624 offset = tree_to_shwi (offset_node);
625
626 /* If the offset is known to be out of bounds, warn, and call strlen at
627 runtime. */
628 if (offset < 0 || offset > max)
629 {
630 /* Suppress multiple warnings for propagated constant strings. */
631 if (only_value != 2
632 && !TREE_NO_WARNING (src))
633 {
634 warning_at (loc, 0, "offset outside bounds of constant string");
635 TREE_NO_WARNING (src) = 1;
636 }
637 return NULL_TREE;
638 }
639
640 /* Use strlen to search for the first zero byte. Since any strings
641 constructed with build_string will have nulls appended, we win even
642 if we get handed something like (char[4])"abcd".
643
644 Since OFFSET is our starting index into the string, no further
645 calculation is needed. */
646 return ssize_int (strlen (ptr + offset));
647 }
648
649 /* Return a char pointer for a C string if it is a string constant
650 or sum of string constant and integer constant. */
651
652 const char *
653 c_getstr (tree src)
654 {
655 tree offset_node;
656
657 src = string_constant (src, &offset_node);
658 if (src == 0)
659 return 0;
660
661 if (offset_node == 0)
662 return TREE_STRING_POINTER (src);
663 else if (!tree_fits_uhwi_p (offset_node)
664 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
665 return 0;
666
667 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
668 }
669
670 /* Return a constant integer corresponding to target reading
671 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
672
673 static rtx
674 c_readstr (const char *str, machine_mode mode)
675 {
676 HOST_WIDE_INT ch;
677 unsigned int i, j;
678 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
679
680 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
681 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
682 / HOST_BITS_PER_WIDE_INT;
683
684 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
685 for (i = 0; i < len; i++)
686 tmp[i] = 0;
687
688 ch = 1;
689 for (i = 0; i < GET_MODE_SIZE (mode); i++)
690 {
691 j = i;
692 if (WORDS_BIG_ENDIAN)
693 j = GET_MODE_SIZE (mode) - i - 1;
694 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
695 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
696 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
697 j *= BITS_PER_UNIT;
698
699 if (ch)
700 ch = (unsigned char) str[i];
701 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
702 }
703
704 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
705 return immed_wide_int_const (c, mode);
706 }
707
708 /* Cast a target constant CST to target CHAR and if that value fits into
709 host char type, return zero and put that value into variable pointed to by
710 P. */
711
712 static int
713 target_char_cast (tree cst, char *p)
714 {
715 unsigned HOST_WIDE_INT val, hostval;
716
717 if (TREE_CODE (cst) != INTEGER_CST
718 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
719 return 1;
720
721 /* Do not care if it fits or not right here. */
722 val = TREE_INT_CST_LOW (cst);
723
724 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
725 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
726
727 hostval = val;
728 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
729 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
730
731 if (val != hostval)
732 return 1;
733
734 *p = hostval;
735 return 0;
736 }
737
738 /* Similar to save_expr, but assumes that arbitrary code is not executed
739 in between the multiple evaluations. In particular, we assume that a
740 non-addressable local variable will not be modified. */
741
742 static tree
743 builtin_save_expr (tree exp)
744 {
745 if (TREE_CODE (exp) == SSA_NAME
746 || (TREE_ADDRESSABLE (exp) == 0
747 && (TREE_CODE (exp) == PARM_DECL
748 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
749 return exp;
750
751 return save_expr (exp);
752 }
753
754 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
755 times to get the address of either a higher stack frame, or a return
756 address located within it (depending on FNDECL_CODE). */
757
758 static rtx
759 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
760 {
761 int i;
762
763 #ifdef INITIAL_FRAME_ADDRESS_RTX
764 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
765 #else
766 rtx tem;
767
768 /* For a zero count with __builtin_return_address, we don't care what
769 frame address we return, because target-specific definitions will
770 override us. Therefore frame pointer elimination is OK, and using
771 the soft frame pointer is OK.
772
773 For a nonzero count, or a zero count with __builtin_frame_address,
774 we require a stable offset from the current frame pointer to the
775 previous one, so we must use the hard frame pointer, and
776 we must disable frame pointer elimination. */
777 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
778 tem = frame_pointer_rtx;
779 else
780 {
781 tem = hard_frame_pointer_rtx;
782
783 /* Tell reload not to eliminate the frame pointer. */
784 crtl->accesses_prior_frames = 1;
785 }
786 #endif
787
788 /* Some machines need special handling before we can access
789 arbitrary frames. For example, on the SPARC, we must first flush
790 all register windows to the stack. */
791 #ifdef SETUP_FRAME_ADDRESSES
792 if (count > 0)
793 SETUP_FRAME_ADDRESSES ();
794 #endif
795
796 /* On the SPARC, the return address is not in the frame, it is in a
797 register. There is no way to access it off of the current frame
798 pointer, but it can be accessed off the previous frame pointer by
799 reading the value from the register window save area. */
800 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
801 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
802 count--;
803 #endif
804
805 /* Scan back COUNT frames to the specified frame. */
806 for (i = 0; i < count; i++)
807 {
808 /* Assume the dynamic chain pointer is in the word that the
809 frame address points to, unless otherwise specified. */
810 #ifdef DYNAMIC_CHAIN_ADDRESS
811 tem = DYNAMIC_CHAIN_ADDRESS (tem);
812 #endif
813 tem = memory_address (Pmode, tem);
814 tem = gen_frame_mem (Pmode, tem);
815 tem = copy_to_reg (tem);
816 }
817
818 /* For __builtin_frame_address, return what we've got. But, on
819 the SPARC for example, we may have to add a bias. */
820 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
821 #ifdef FRAME_ADDR_RTX
822 return FRAME_ADDR_RTX (tem);
823 #else
824 return tem;
825 #endif
826
827 /* For __builtin_return_address, get the return address from that frame. */
828 #ifdef RETURN_ADDR_RTX
829 tem = RETURN_ADDR_RTX (count, tem);
830 #else
831 tem = memory_address (Pmode,
832 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
833 tem = gen_frame_mem (Pmode, tem);
834 #endif
835 return tem;
836 }
837
838 /* Alias set used for setjmp buffer. */
839 static alias_set_type setjmp_alias_set = -1;
840
841 /* Construct the leading half of a __builtin_setjmp call. Control will
842 return to RECEIVER_LABEL. This is also called directly by the SJLJ
843 exception handling code. */
844
845 void
846 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
847 {
848 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
849 rtx stack_save;
850 rtx mem;
851
852 if (setjmp_alias_set == -1)
853 setjmp_alias_set = new_alias_set ();
854
855 buf_addr = convert_memory_address (Pmode, buf_addr);
856
857 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
858
859 /* We store the frame pointer and the address of receiver_label in
860 the buffer and use the rest of it for the stack save area, which
861 is machine-dependent. */
862
863 mem = gen_rtx_MEM (Pmode, buf_addr);
864 set_mem_alias_set (mem, setjmp_alias_set);
865 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
866
867 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
868 GET_MODE_SIZE (Pmode))),
869 set_mem_alias_set (mem, setjmp_alias_set);
870
871 emit_move_insn (validize_mem (mem),
872 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
873
874 stack_save = gen_rtx_MEM (sa_mode,
875 plus_constant (Pmode, buf_addr,
876 2 * GET_MODE_SIZE (Pmode)));
877 set_mem_alias_set (stack_save, setjmp_alias_set);
878 emit_stack_save (SAVE_NONLOCAL, &stack_save);
879
880 /* If there is further processing to do, do it. */
881 #ifdef HAVE_builtin_setjmp_setup
882 if (HAVE_builtin_setjmp_setup)
883 emit_insn (gen_builtin_setjmp_setup (buf_addr));
884 #endif
885
886 /* We have a nonlocal label. */
887 cfun->has_nonlocal_label = 1;
888 }
889
890 /* Construct the trailing part of a __builtin_setjmp call. This is
891 also called directly by the SJLJ exception handling code.
892 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
893
894 void
895 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
896 {
897 rtx chain;
898
899 /* Mark the FP as used when we get here, so we have to make sure it's
900 marked as used by this function. */
901 emit_use (hard_frame_pointer_rtx);
902
903 /* Mark the static chain as clobbered here so life information
904 doesn't get messed up for it. */
905 chain = targetm.calls.static_chain (current_function_decl, true);
906 if (chain && REG_P (chain))
907 emit_clobber (chain);
908
909 /* Now put in the code to restore the frame pointer, and argument
910 pointer, if needed. */
911 #ifdef HAVE_nonlocal_goto
912 if (! HAVE_nonlocal_goto)
913 #endif
914 {
915 /* First adjust our frame pointer to its actual value. It was
916 previously set to the start of the virtual area corresponding to
917 the stacked variables when we branched here and now needs to be
918 adjusted to the actual hardware fp value.
919
920 Assignments to virtual registers are converted by
921 instantiate_virtual_regs into the corresponding assignment
922 to the underlying register (fp in this case) that makes
923 the original assignment true.
924 So the following insn will actually be decrementing fp by
925 STARTING_FRAME_OFFSET. */
926 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
927
928 /* Restoring the frame pointer also modifies the hard frame pointer.
929 Mark it used (so that the previous assignment remains live once
930 the frame pointer is eliminated) and clobbered (to represent the
931 implicit update from the assignment). */
932 emit_use (hard_frame_pointer_rtx);
933 emit_clobber (hard_frame_pointer_rtx);
934 }
935
936 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
937 if (fixed_regs[ARG_POINTER_REGNUM])
938 {
939 #ifdef ELIMINABLE_REGS
940 /* If the argument pointer can be eliminated in favor of the
941 frame pointer, we don't need to restore it. We assume here
942 that if such an elimination is present, it can always be used.
943 This is the case on all known machines; if we don't make this
944 assumption, we do unnecessary saving on many machines. */
945 size_t i;
946 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
947
948 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
949 if (elim_regs[i].from == ARG_POINTER_REGNUM
950 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
951 break;
952
953 if (i == ARRAY_SIZE (elim_regs))
954 #endif
955 {
956 /* Now restore our arg pointer from the address at which it
957 was saved in our stack frame. */
958 emit_move_insn (crtl->args.internal_arg_pointer,
959 copy_to_reg (get_arg_pointer_save_area ()));
960 }
961 }
962 #endif
963
964 #ifdef HAVE_builtin_setjmp_receiver
965 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
966 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
967 else
968 #endif
969 #ifdef HAVE_nonlocal_goto_receiver
970 if (HAVE_nonlocal_goto_receiver)
971 emit_insn (gen_nonlocal_goto_receiver ());
972 else
973 #endif
974 { /* Nothing */ }
975
976 /* We must not allow the code we just generated to be reordered by
977 scheduling. Specifically, the update of the frame pointer must
978 happen immediately, not later. */
979 emit_insn (gen_blockage ());
980 }
981
982 /* __builtin_longjmp is passed a pointer to an array of five words (not
983 all will be used on all machines). It operates similarly to the C
984 library function of the same name, but is more efficient. Much of
985 the code below is copied from the handling of non-local gotos. */
986
987 static void
988 expand_builtin_longjmp (rtx buf_addr, rtx value)
989 {
990 rtx fp, lab, stack;
991 rtx_insn *insn, *last;
992 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
993
994 /* DRAP is needed for stack realign if longjmp is expanded to current
995 function */
996 if (SUPPORTS_STACK_ALIGNMENT)
997 crtl->need_drap = true;
998
999 if (setjmp_alias_set == -1)
1000 setjmp_alias_set = new_alias_set ();
1001
1002 buf_addr = convert_memory_address (Pmode, buf_addr);
1003
1004 buf_addr = force_reg (Pmode, buf_addr);
1005
1006 /* We require that the user must pass a second argument of 1, because
1007 that is what builtin_setjmp will return. */
1008 gcc_assert (value == const1_rtx);
1009
1010 last = get_last_insn ();
1011 #ifdef HAVE_builtin_longjmp
1012 if (HAVE_builtin_longjmp)
1013 emit_insn (gen_builtin_longjmp (buf_addr));
1014 else
1015 #endif
1016 {
1017 fp = gen_rtx_MEM (Pmode, buf_addr);
1018 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1019 GET_MODE_SIZE (Pmode)));
1020
1021 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1022 2 * GET_MODE_SIZE (Pmode)));
1023 set_mem_alias_set (fp, setjmp_alias_set);
1024 set_mem_alias_set (lab, setjmp_alias_set);
1025 set_mem_alias_set (stack, setjmp_alias_set);
1026
1027 /* Pick up FP, label, and SP from the block and jump. This code is
1028 from expand_goto in stmt.c; see there for detailed comments. */
1029 #ifdef HAVE_nonlocal_goto
1030 if (HAVE_nonlocal_goto)
1031 /* We have to pass a value to the nonlocal_goto pattern that will
1032 get copied into the static_chain pointer, but it does not matter
1033 what that value is, because builtin_setjmp does not use it. */
1034 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1035 else
1036 #endif
1037 {
1038 lab = copy_to_reg (lab);
1039
1040 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1041 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1042
1043 emit_move_insn (hard_frame_pointer_rtx, fp);
1044 emit_stack_restore (SAVE_NONLOCAL, stack);
1045
1046 emit_use (hard_frame_pointer_rtx);
1047 emit_use (stack_pointer_rtx);
1048 emit_indirect_jump (lab);
1049 }
1050 }
1051
1052 /* Search backwards and mark the jump insn as a non-local goto.
1053 Note that this precludes the use of __builtin_longjmp to a
1054 __builtin_setjmp target in the same function. However, we've
1055 already cautioned the user that these functions are for
1056 internal exception handling use only. */
1057 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1058 {
1059 gcc_assert (insn != last);
1060
1061 if (JUMP_P (insn))
1062 {
1063 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1064 break;
1065 }
1066 else if (CALL_P (insn))
1067 break;
1068 }
1069 }
1070
1071 static inline bool
1072 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1073 {
1074 return (iter->i < iter->n);
1075 }
1076
1077 /* This function validates the types of a function call argument list
1078 against a specified list of tree_codes. If the last specifier is a 0,
1079 that represents an ellipses, otherwise the last specifier must be a
1080 VOID_TYPE. */
1081
1082 static bool
1083 validate_arglist (const_tree callexpr, ...)
1084 {
1085 enum tree_code code;
1086 bool res = 0;
1087 va_list ap;
1088 const_call_expr_arg_iterator iter;
1089 const_tree arg;
1090
1091 va_start (ap, callexpr);
1092 init_const_call_expr_arg_iterator (callexpr, &iter);
1093
1094 do
1095 {
1096 code = (enum tree_code) va_arg (ap, int);
1097 switch (code)
1098 {
1099 case 0:
1100 /* This signifies an ellipses, any further arguments are all ok. */
1101 res = true;
1102 goto end;
1103 case VOID_TYPE:
1104 /* This signifies an endlink, if no arguments remain, return
1105 true, otherwise return false. */
1106 res = !more_const_call_expr_args_p (&iter);
1107 goto end;
1108 default:
1109 /* If no parameters remain or the parameter's code does not
1110 match the specified code, return false. Otherwise continue
1111 checking any remaining arguments. */
1112 arg = next_const_call_expr_arg (&iter);
1113 if (!validate_arg (arg, code))
1114 goto end;
1115 break;
1116 }
1117 }
1118 while (1);
1119
1120 /* We need gotos here since we can only have one VA_CLOSE in a
1121 function. */
1122 end: ;
1123 va_end (ap);
1124
1125 return res;
1126 }
1127
1128 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1129 and the address of the save area. */
1130
1131 static rtx
1132 expand_builtin_nonlocal_goto (tree exp)
1133 {
1134 tree t_label, t_save_area;
1135 rtx r_label, r_save_area, r_fp, r_sp;
1136 rtx_insn *insn;
1137
1138 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1139 return NULL_RTX;
1140
1141 t_label = CALL_EXPR_ARG (exp, 0);
1142 t_save_area = CALL_EXPR_ARG (exp, 1);
1143
1144 r_label = expand_normal (t_label);
1145 r_label = convert_memory_address (Pmode, r_label);
1146 r_save_area = expand_normal (t_save_area);
1147 r_save_area = convert_memory_address (Pmode, r_save_area);
1148 /* Copy the address of the save location to a register just in case it was
1149 based on the frame pointer. */
1150 r_save_area = copy_to_reg (r_save_area);
1151 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1152 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1153 plus_constant (Pmode, r_save_area,
1154 GET_MODE_SIZE (Pmode)));
1155
1156 crtl->has_nonlocal_goto = 1;
1157
1158 #ifdef HAVE_nonlocal_goto
1159 /* ??? We no longer need to pass the static chain value, afaik. */
1160 if (HAVE_nonlocal_goto)
1161 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1162 else
1163 #endif
1164 {
1165 r_label = copy_to_reg (r_label);
1166
1167 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1168 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1169
1170 /* Restore frame pointer for containing function. */
1171 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1172 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1173
1174 /* USE of hard_frame_pointer_rtx added for consistency;
1175 not clear if really needed. */
1176 emit_use (hard_frame_pointer_rtx);
1177 emit_use (stack_pointer_rtx);
1178
1179 /* If the architecture is using a GP register, we must
1180 conservatively assume that the target function makes use of it.
1181 The prologue of functions with nonlocal gotos must therefore
1182 initialize the GP register to the appropriate value, and we
1183 must then make sure that this value is live at the point
1184 of the jump. (Note that this doesn't necessarily apply
1185 to targets with a nonlocal_goto pattern; they are free
1186 to implement it in their own way. Note also that this is
1187 a no-op if the GP register is a global invariant.) */
1188 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1189 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1190 emit_use (pic_offset_table_rtx);
1191
1192 emit_indirect_jump (r_label);
1193 }
1194
1195 /* Search backwards to the jump insn and mark it as a
1196 non-local goto. */
1197 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1198 {
1199 if (JUMP_P (insn))
1200 {
1201 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1202 break;
1203 }
1204 else if (CALL_P (insn))
1205 break;
1206 }
1207
1208 return const0_rtx;
1209 }
1210
1211 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1212 (not all will be used on all machines) that was passed to __builtin_setjmp.
1213 It updates the stack pointer in that block to correspond to the current
1214 stack pointer. */
1215
1216 static void
1217 expand_builtin_update_setjmp_buf (rtx buf_addr)
1218 {
1219 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1220 rtx stack_save
1221 = gen_rtx_MEM (sa_mode,
1222 memory_address
1223 (sa_mode,
1224 plus_constant (Pmode, buf_addr,
1225 2 * GET_MODE_SIZE (Pmode))));
1226
1227 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1228 }
1229
1230 /* Expand a call to __builtin_prefetch. For a target that does not support
1231 data prefetch, evaluate the memory address argument in case it has side
1232 effects. */
1233
1234 static void
1235 expand_builtin_prefetch (tree exp)
1236 {
1237 tree arg0, arg1, arg2;
1238 int nargs;
1239 rtx op0, op1, op2;
1240
1241 if (!validate_arglist (exp, POINTER_TYPE, 0))
1242 return;
1243
1244 arg0 = CALL_EXPR_ARG (exp, 0);
1245
1246 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1247 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1248 locality). */
1249 nargs = call_expr_nargs (exp);
1250 if (nargs > 1)
1251 arg1 = CALL_EXPR_ARG (exp, 1);
1252 else
1253 arg1 = integer_zero_node;
1254 if (nargs > 2)
1255 arg2 = CALL_EXPR_ARG (exp, 2);
1256 else
1257 arg2 = integer_three_node;
1258
1259 /* Argument 0 is an address. */
1260 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1261
1262 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1263 if (TREE_CODE (arg1) != INTEGER_CST)
1264 {
1265 error ("second argument to %<__builtin_prefetch%> must be a constant");
1266 arg1 = integer_zero_node;
1267 }
1268 op1 = expand_normal (arg1);
1269 /* Argument 1 must be either zero or one. */
1270 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1271 {
1272 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1273 " using zero");
1274 op1 = const0_rtx;
1275 }
1276
1277 /* Argument 2 (locality) must be a compile-time constant int. */
1278 if (TREE_CODE (arg2) != INTEGER_CST)
1279 {
1280 error ("third argument to %<__builtin_prefetch%> must be a constant");
1281 arg2 = integer_zero_node;
1282 }
1283 op2 = expand_normal (arg2);
1284 /* Argument 2 must be 0, 1, 2, or 3. */
1285 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1286 {
1287 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1288 op2 = const0_rtx;
1289 }
1290
1291 #ifdef HAVE_prefetch
1292 if (HAVE_prefetch)
1293 {
1294 struct expand_operand ops[3];
1295
1296 create_address_operand (&ops[0], op0);
1297 create_integer_operand (&ops[1], INTVAL (op1));
1298 create_integer_operand (&ops[2], INTVAL (op2));
1299 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1300 return;
1301 }
1302 #endif
1303
1304 /* Don't do anything with direct references to volatile memory, but
1305 generate code to handle other side effects. */
1306 if (!MEM_P (op0) && side_effects_p (op0))
1307 emit_insn (op0);
1308 }
1309
1310 /* Get a MEM rtx for expression EXP which is the address of an operand
1311 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1312 the maximum length of the block of memory that might be accessed or
1313 NULL if unknown. */
1314
1315 static rtx
1316 get_memory_rtx (tree exp, tree len)
1317 {
1318 tree orig_exp = exp;
1319 rtx addr, mem;
1320
1321 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1322 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1323 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1324 exp = TREE_OPERAND (exp, 0);
1325
1326 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1327 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1328
1329 /* Get an expression we can use to find the attributes to assign to MEM.
1330 First remove any nops. */
1331 while (CONVERT_EXPR_P (exp)
1332 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1333 exp = TREE_OPERAND (exp, 0);
1334
1335 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1336 (as builtin stringops may alias with anything). */
1337 exp = fold_build2 (MEM_REF,
1338 build_array_type (char_type_node,
1339 build_range_type (sizetype,
1340 size_one_node, len)),
1341 exp, build_int_cst (ptr_type_node, 0));
1342
1343 /* If the MEM_REF has no acceptable address, try to get the base object
1344 from the original address we got, and build an all-aliasing
1345 unknown-sized access to that one. */
1346 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1347 set_mem_attributes (mem, exp, 0);
1348 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1349 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1350 0))))
1351 {
1352 exp = build_fold_addr_expr (exp);
1353 exp = fold_build2 (MEM_REF,
1354 build_array_type (char_type_node,
1355 build_range_type (sizetype,
1356 size_zero_node,
1357 NULL)),
1358 exp, build_int_cst (ptr_type_node, 0));
1359 set_mem_attributes (mem, exp, 0);
1360 }
1361 set_mem_alias_set (mem, 0);
1362 return mem;
1363 }
1364 \f
1365 /* Built-in functions to perform an untyped call and return. */
1366
1367 #define apply_args_mode \
1368 (this_target_builtins->x_apply_args_mode)
1369 #define apply_result_mode \
1370 (this_target_builtins->x_apply_result_mode)
1371
1372 /* Return the size required for the block returned by __builtin_apply_args,
1373 and initialize apply_args_mode. */
1374
1375 static int
1376 apply_args_size (void)
1377 {
1378 static int size = -1;
1379 int align;
1380 unsigned int regno;
1381 machine_mode mode;
1382
1383 /* The values computed by this function never change. */
1384 if (size < 0)
1385 {
1386 /* The first value is the incoming arg-pointer. */
1387 size = GET_MODE_SIZE (Pmode);
1388
1389 /* The second value is the structure value address unless this is
1390 passed as an "invisible" first argument. */
1391 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1392 size += GET_MODE_SIZE (Pmode);
1393
1394 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1395 if (FUNCTION_ARG_REGNO_P (regno))
1396 {
1397 mode = targetm.calls.get_raw_arg_mode (regno);
1398
1399 gcc_assert (mode != VOIDmode);
1400
1401 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1402 if (size % align != 0)
1403 size = CEIL (size, align) * align;
1404 size += GET_MODE_SIZE (mode);
1405 apply_args_mode[regno] = mode;
1406 }
1407 else
1408 {
1409 apply_args_mode[regno] = VOIDmode;
1410 }
1411 }
1412 return size;
1413 }
1414
1415 /* Return the size required for the block returned by __builtin_apply,
1416 and initialize apply_result_mode. */
1417
1418 static int
1419 apply_result_size (void)
1420 {
1421 static int size = -1;
1422 int align, regno;
1423 machine_mode mode;
1424
1425 /* The values computed by this function never change. */
1426 if (size < 0)
1427 {
1428 size = 0;
1429
1430 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1431 if (targetm.calls.function_value_regno_p (regno))
1432 {
1433 mode = targetm.calls.get_raw_result_mode (regno);
1434
1435 gcc_assert (mode != VOIDmode);
1436
1437 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1438 if (size % align != 0)
1439 size = CEIL (size, align) * align;
1440 size += GET_MODE_SIZE (mode);
1441 apply_result_mode[regno] = mode;
1442 }
1443 else
1444 apply_result_mode[regno] = VOIDmode;
1445
1446 /* Allow targets that use untyped_call and untyped_return to override
1447 the size so that machine-specific information can be stored here. */
1448 #ifdef APPLY_RESULT_SIZE
1449 size = APPLY_RESULT_SIZE;
1450 #endif
1451 }
1452 return size;
1453 }
1454
1455 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1456 /* Create a vector describing the result block RESULT. If SAVEP is true,
1457 the result block is used to save the values; otherwise it is used to
1458 restore the values. */
1459
1460 static rtx
1461 result_vector (int savep, rtx result)
1462 {
1463 int regno, size, align, nelts;
1464 machine_mode mode;
1465 rtx reg, mem;
1466 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1467
1468 size = nelts = 0;
1469 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1470 if ((mode = apply_result_mode[regno]) != VOIDmode)
1471 {
1472 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1473 if (size % align != 0)
1474 size = CEIL (size, align) * align;
1475 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1476 mem = adjust_address (result, mode, size);
1477 savevec[nelts++] = (savep
1478 ? gen_rtx_SET (VOIDmode, mem, reg)
1479 : gen_rtx_SET (VOIDmode, reg, mem));
1480 size += GET_MODE_SIZE (mode);
1481 }
1482 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1483 }
1484 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1485
1486 /* Save the state required to perform an untyped call with the same
1487 arguments as were passed to the current function. */
1488
1489 static rtx
1490 expand_builtin_apply_args_1 (void)
1491 {
1492 rtx registers, tem;
1493 int size, align, regno;
1494 machine_mode mode;
1495 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1496
1497 /* Create a block where the arg-pointer, structure value address,
1498 and argument registers can be saved. */
1499 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1500
1501 /* Walk past the arg-pointer and structure value address. */
1502 size = GET_MODE_SIZE (Pmode);
1503 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1504 size += GET_MODE_SIZE (Pmode);
1505
1506 /* Save each register used in calling a function to the block. */
1507 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1508 if ((mode = apply_args_mode[regno]) != VOIDmode)
1509 {
1510 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1511 if (size % align != 0)
1512 size = CEIL (size, align) * align;
1513
1514 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1515
1516 emit_move_insn (adjust_address (registers, mode, size), tem);
1517 size += GET_MODE_SIZE (mode);
1518 }
1519
1520 /* Save the arg pointer to the block. */
1521 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1522 #ifdef STACK_GROWS_DOWNWARD
1523 /* We need the pointer as the caller actually passed them to us, not
1524 as we might have pretended they were passed. Make sure it's a valid
1525 operand, as emit_move_insn isn't expected to handle a PLUS. */
1526 tem
1527 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1528 NULL_RTX);
1529 #endif
1530 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1531
1532 size = GET_MODE_SIZE (Pmode);
1533
1534 /* Save the structure value address unless this is passed as an
1535 "invisible" first argument. */
1536 if (struct_incoming_value)
1537 {
1538 emit_move_insn (adjust_address (registers, Pmode, size),
1539 copy_to_reg (struct_incoming_value));
1540 size += GET_MODE_SIZE (Pmode);
1541 }
1542
1543 /* Return the address of the block. */
1544 return copy_addr_to_reg (XEXP (registers, 0));
1545 }
1546
1547 /* __builtin_apply_args returns block of memory allocated on
1548 the stack into which is stored the arg pointer, structure
1549 value address, static chain, and all the registers that might
1550 possibly be used in performing a function call. The code is
1551 moved to the start of the function so the incoming values are
1552 saved. */
1553
1554 static rtx
1555 expand_builtin_apply_args (void)
1556 {
1557 /* Don't do __builtin_apply_args more than once in a function.
1558 Save the result of the first call and reuse it. */
1559 if (apply_args_value != 0)
1560 return apply_args_value;
1561 {
1562 /* When this function is called, it means that registers must be
1563 saved on entry to this function. So we migrate the
1564 call to the first insn of this function. */
1565 rtx temp;
1566 rtx seq;
1567
1568 start_sequence ();
1569 temp = expand_builtin_apply_args_1 ();
1570 seq = get_insns ();
1571 end_sequence ();
1572
1573 apply_args_value = temp;
1574
1575 /* Put the insns after the NOTE that starts the function.
1576 If this is inside a start_sequence, make the outer-level insn
1577 chain current, so the code is placed at the start of the
1578 function. If internal_arg_pointer is a non-virtual pseudo,
1579 it needs to be placed after the function that initializes
1580 that pseudo. */
1581 push_topmost_sequence ();
1582 if (REG_P (crtl->args.internal_arg_pointer)
1583 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1584 emit_insn_before (seq, parm_birth_insn);
1585 else
1586 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1587 pop_topmost_sequence ();
1588 return temp;
1589 }
1590 }
1591
1592 /* Perform an untyped call and save the state required to perform an
1593 untyped return of whatever value was returned by the given function. */
1594
1595 static rtx
1596 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1597 {
1598 int size, align, regno;
1599 machine_mode mode;
1600 rtx incoming_args, result, reg, dest, src;
1601 rtx_call_insn *call_insn;
1602 rtx old_stack_level = 0;
1603 rtx call_fusage = 0;
1604 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1605
1606 arguments = convert_memory_address (Pmode, arguments);
1607
1608 /* Create a block where the return registers can be saved. */
1609 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1610
1611 /* Fetch the arg pointer from the ARGUMENTS block. */
1612 incoming_args = gen_reg_rtx (Pmode);
1613 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1614 #ifndef STACK_GROWS_DOWNWARD
1615 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1616 incoming_args, 0, OPTAB_LIB_WIDEN);
1617 #endif
1618
1619 /* Push a new argument block and copy the arguments. Do not allow
1620 the (potential) memcpy call below to interfere with our stack
1621 manipulations. */
1622 do_pending_stack_adjust ();
1623 NO_DEFER_POP;
1624
1625 /* Save the stack with nonlocal if available. */
1626 #ifdef HAVE_save_stack_nonlocal
1627 if (HAVE_save_stack_nonlocal)
1628 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1629 else
1630 #endif
1631 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1632
1633 /* Allocate a block of memory onto the stack and copy the memory
1634 arguments to the outgoing arguments address. We can pass TRUE
1635 as the 4th argument because we just saved the stack pointer
1636 and will restore it right after the call. */
1637 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1638
1639 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1640 may have already set current_function_calls_alloca to true.
1641 current_function_calls_alloca won't be set if argsize is zero,
1642 so we have to guarantee need_drap is true here. */
1643 if (SUPPORTS_STACK_ALIGNMENT)
1644 crtl->need_drap = true;
1645
1646 dest = virtual_outgoing_args_rtx;
1647 #ifndef STACK_GROWS_DOWNWARD
1648 if (CONST_INT_P (argsize))
1649 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1650 else
1651 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1652 #endif
1653 dest = gen_rtx_MEM (BLKmode, dest);
1654 set_mem_align (dest, PARM_BOUNDARY);
1655 src = gen_rtx_MEM (BLKmode, incoming_args);
1656 set_mem_align (src, PARM_BOUNDARY);
1657 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1658
1659 /* Refer to the argument block. */
1660 apply_args_size ();
1661 arguments = gen_rtx_MEM (BLKmode, arguments);
1662 set_mem_align (arguments, PARM_BOUNDARY);
1663
1664 /* Walk past the arg-pointer and structure value address. */
1665 size = GET_MODE_SIZE (Pmode);
1666 if (struct_value)
1667 size += GET_MODE_SIZE (Pmode);
1668
1669 /* Restore each of the registers previously saved. Make USE insns
1670 for each of these registers for use in making the call. */
1671 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1672 if ((mode = apply_args_mode[regno]) != VOIDmode)
1673 {
1674 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1675 if (size % align != 0)
1676 size = CEIL (size, align) * align;
1677 reg = gen_rtx_REG (mode, regno);
1678 emit_move_insn (reg, adjust_address (arguments, mode, size));
1679 use_reg (&call_fusage, reg);
1680 size += GET_MODE_SIZE (mode);
1681 }
1682
1683 /* Restore the structure value address unless this is passed as an
1684 "invisible" first argument. */
1685 size = GET_MODE_SIZE (Pmode);
1686 if (struct_value)
1687 {
1688 rtx value = gen_reg_rtx (Pmode);
1689 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1690 emit_move_insn (struct_value, value);
1691 if (REG_P (struct_value))
1692 use_reg (&call_fusage, struct_value);
1693 size += GET_MODE_SIZE (Pmode);
1694 }
1695
1696 /* All arguments and registers used for the call are set up by now! */
1697 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1698
1699 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1700 and we don't want to load it into a register as an optimization,
1701 because prepare_call_address already did it if it should be done. */
1702 if (GET_CODE (function) != SYMBOL_REF)
1703 function = memory_address (FUNCTION_MODE, function);
1704
1705 /* Generate the actual call instruction and save the return value. */
1706 #ifdef HAVE_untyped_call
1707 if (HAVE_untyped_call)
1708 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1709 result, result_vector (1, result)));
1710 else
1711 #endif
1712 #ifdef HAVE_call_value
1713 if (HAVE_call_value)
1714 {
1715 rtx valreg = 0;
1716
1717 /* Locate the unique return register. It is not possible to
1718 express a call that sets more than one return register using
1719 call_value; use untyped_call for that. In fact, untyped_call
1720 only needs to save the return registers in the given block. */
1721 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1722 if ((mode = apply_result_mode[regno]) != VOIDmode)
1723 {
1724 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1725
1726 valreg = gen_rtx_REG (mode, regno);
1727 }
1728
1729 emit_call_insn (GEN_CALL_VALUE (valreg,
1730 gen_rtx_MEM (FUNCTION_MODE, function),
1731 const0_rtx, NULL_RTX, const0_rtx));
1732
1733 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1734 }
1735 else
1736 #endif
1737 gcc_unreachable ();
1738
1739 /* Find the CALL insn we just emitted, and attach the register usage
1740 information. */
1741 call_insn = last_call_insn ();
1742 add_function_usage_to (call_insn, call_fusage);
1743
1744 /* Restore the stack. */
1745 #ifdef HAVE_save_stack_nonlocal
1746 if (HAVE_save_stack_nonlocal)
1747 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1748 else
1749 #endif
1750 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1751 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1752
1753 OK_DEFER_POP;
1754
1755 /* Return the address of the result block. */
1756 result = copy_addr_to_reg (XEXP (result, 0));
1757 return convert_memory_address (ptr_mode, result);
1758 }
1759
1760 /* Perform an untyped return. */
1761
1762 static void
1763 expand_builtin_return (rtx result)
1764 {
1765 int size, align, regno;
1766 machine_mode mode;
1767 rtx reg;
1768 rtx_insn *call_fusage = 0;
1769
1770 result = convert_memory_address (Pmode, result);
1771
1772 apply_result_size ();
1773 result = gen_rtx_MEM (BLKmode, result);
1774
1775 #ifdef HAVE_untyped_return
1776 if (HAVE_untyped_return)
1777 {
1778 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1779 emit_barrier ();
1780 return;
1781 }
1782 #endif
1783
1784 /* Restore the return value and note that each value is used. */
1785 size = 0;
1786 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1787 if ((mode = apply_result_mode[regno]) != VOIDmode)
1788 {
1789 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1790 if (size % align != 0)
1791 size = CEIL (size, align) * align;
1792 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1793 emit_move_insn (reg, adjust_address (result, mode, size));
1794
1795 push_to_sequence (call_fusage);
1796 emit_use (reg);
1797 call_fusage = get_insns ();
1798 end_sequence ();
1799 size += GET_MODE_SIZE (mode);
1800 }
1801
1802 /* Put the USE insns before the return. */
1803 emit_insn (call_fusage);
1804
1805 /* Return whatever values was restored by jumping directly to the end
1806 of the function. */
1807 expand_naked_return ();
1808 }
1809
1810 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1811
1812 static enum type_class
1813 type_to_class (tree type)
1814 {
1815 switch (TREE_CODE (type))
1816 {
1817 case VOID_TYPE: return void_type_class;
1818 case INTEGER_TYPE: return integer_type_class;
1819 case ENUMERAL_TYPE: return enumeral_type_class;
1820 case BOOLEAN_TYPE: return boolean_type_class;
1821 case POINTER_TYPE: return pointer_type_class;
1822 case REFERENCE_TYPE: return reference_type_class;
1823 case OFFSET_TYPE: return offset_type_class;
1824 case REAL_TYPE: return real_type_class;
1825 case COMPLEX_TYPE: return complex_type_class;
1826 case FUNCTION_TYPE: return function_type_class;
1827 case METHOD_TYPE: return method_type_class;
1828 case RECORD_TYPE: return record_type_class;
1829 case UNION_TYPE:
1830 case QUAL_UNION_TYPE: return union_type_class;
1831 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1832 ? string_type_class : array_type_class);
1833 case LANG_TYPE: return lang_type_class;
1834 default: return no_type_class;
1835 }
1836 }
1837
1838 /* Expand a call EXP to __builtin_classify_type. */
1839
1840 static rtx
1841 expand_builtin_classify_type (tree exp)
1842 {
1843 if (call_expr_nargs (exp))
1844 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1845 return GEN_INT (no_type_class);
1846 }
1847
1848 /* This helper macro, meant to be used in mathfn_built_in below,
1849 determines which among a set of three builtin math functions is
1850 appropriate for a given type mode. The `F' and `L' cases are
1851 automatically generated from the `double' case. */
1852 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1853 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1854 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1855 fcodel = BUILT_IN_MATHFN##L ; break;
1856 /* Similar to above, but appends _R after any F/L suffix. */
1857 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1858 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1859 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1860 fcodel = BUILT_IN_MATHFN##L_R ; break;
1861
1862 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1863 if available. If IMPLICIT is true use the implicit builtin declaration,
1864 otherwise use the explicit declaration. If we can't do the conversion,
1865 return zero. */
1866
1867 static tree
1868 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1869 {
1870 enum built_in_function fcode, fcodef, fcodel, fcode2;
1871
1872 switch (fn)
1873 {
1874 CASE_MATHFN (BUILT_IN_ACOS)
1875 CASE_MATHFN (BUILT_IN_ACOSH)
1876 CASE_MATHFN (BUILT_IN_ASIN)
1877 CASE_MATHFN (BUILT_IN_ASINH)
1878 CASE_MATHFN (BUILT_IN_ATAN)
1879 CASE_MATHFN (BUILT_IN_ATAN2)
1880 CASE_MATHFN (BUILT_IN_ATANH)
1881 CASE_MATHFN (BUILT_IN_CBRT)
1882 CASE_MATHFN (BUILT_IN_CEIL)
1883 CASE_MATHFN (BUILT_IN_CEXPI)
1884 CASE_MATHFN (BUILT_IN_COPYSIGN)
1885 CASE_MATHFN (BUILT_IN_COS)
1886 CASE_MATHFN (BUILT_IN_COSH)
1887 CASE_MATHFN (BUILT_IN_DREM)
1888 CASE_MATHFN (BUILT_IN_ERF)
1889 CASE_MATHFN (BUILT_IN_ERFC)
1890 CASE_MATHFN (BUILT_IN_EXP)
1891 CASE_MATHFN (BUILT_IN_EXP10)
1892 CASE_MATHFN (BUILT_IN_EXP2)
1893 CASE_MATHFN (BUILT_IN_EXPM1)
1894 CASE_MATHFN (BUILT_IN_FABS)
1895 CASE_MATHFN (BUILT_IN_FDIM)
1896 CASE_MATHFN (BUILT_IN_FLOOR)
1897 CASE_MATHFN (BUILT_IN_FMA)
1898 CASE_MATHFN (BUILT_IN_FMAX)
1899 CASE_MATHFN (BUILT_IN_FMIN)
1900 CASE_MATHFN (BUILT_IN_FMOD)
1901 CASE_MATHFN (BUILT_IN_FREXP)
1902 CASE_MATHFN (BUILT_IN_GAMMA)
1903 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1904 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1905 CASE_MATHFN (BUILT_IN_HYPOT)
1906 CASE_MATHFN (BUILT_IN_ILOGB)
1907 CASE_MATHFN (BUILT_IN_ICEIL)
1908 CASE_MATHFN (BUILT_IN_IFLOOR)
1909 CASE_MATHFN (BUILT_IN_INF)
1910 CASE_MATHFN (BUILT_IN_IRINT)
1911 CASE_MATHFN (BUILT_IN_IROUND)
1912 CASE_MATHFN (BUILT_IN_ISINF)
1913 CASE_MATHFN (BUILT_IN_J0)
1914 CASE_MATHFN (BUILT_IN_J1)
1915 CASE_MATHFN (BUILT_IN_JN)
1916 CASE_MATHFN (BUILT_IN_LCEIL)
1917 CASE_MATHFN (BUILT_IN_LDEXP)
1918 CASE_MATHFN (BUILT_IN_LFLOOR)
1919 CASE_MATHFN (BUILT_IN_LGAMMA)
1920 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1921 CASE_MATHFN (BUILT_IN_LLCEIL)
1922 CASE_MATHFN (BUILT_IN_LLFLOOR)
1923 CASE_MATHFN (BUILT_IN_LLRINT)
1924 CASE_MATHFN (BUILT_IN_LLROUND)
1925 CASE_MATHFN (BUILT_IN_LOG)
1926 CASE_MATHFN (BUILT_IN_LOG10)
1927 CASE_MATHFN (BUILT_IN_LOG1P)
1928 CASE_MATHFN (BUILT_IN_LOG2)
1929 CASE_MATHFN (BUILT_IN_LOGB)
1930 CASE_MATHFN (BUILT_IN_LRINT)
1931 CASE_MATHFN (BUILT_IN_LROUND)
1932 CASE_MATHFN (BUILT_IN_MODF)
1933 CASE_MATHFN (BUILT_IN_NAN)
1934 CASE_MATHFN (BUILT_IN_NANS)
1935 CASE_MATHFN (BUILT_IN_NEARBYINT)
1936 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1937 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1938 CASE_MATHFN (BUILT_IN_POW)
1939 CASE_MATHFN (BUILT_IN_POWI)
1940 CASE_MATHFN (BUILT_IN_POW10)
1941 CASE_MATHFN (BUILT_IN_REMAINDER)
1942 CASE_MATHFN (BUILT_IN_REMQUO)
1943 CASE_MATHFN (BUILT_IN_RINT)
1944 CASE_MATHFN (BUILT_IN_ROUND)
1945 CASE_MATHFN (BUILT_IN_SCALB)
1946 CASE_MATHFN (BUILT_IN_SCALBLN)
1947 CASE_MATHFN (BUILT_IN_SCALBN)
1948 CASE_MATHFN (BUILT_IN_SIGNBIT)
1949 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1950 CASE_MATHFN (BUILT_IN_SIN)
1951 CASE_MATHFN (BUILT_IN_SINCOS)
1952 CASE_MATHFN (BUILT_IN_SINH)
1953 CASE_MATHFN (BUILT_IN_SQRT)
1954 CASE_MATHFN (BUILT_IN_TAN)
1955 CASE_MATHFN (BUILT_IN_TANH)
1956 CASE_MATHFN (BUILT_IN_TGAMMA)
1957 CASE_MATHFN (BUILT_IN_TRUNC)
1958 CASE_MATHFN (BUILT_IN_Y0)
1959 CASE_MATHFN (BUILT_IN_Y1)
1960 CASE_MATHFN (BUILT_IN_YN)
1961
1962 default:
1963 return NULL_TREE;
1964 }
1965
1966 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1967 fcode2 = fcode;
1968 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1969 fcode2 = fcodef;
1970 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1971 fcode2 = fcodel;
1972 else
1973 return NULL_TREE;
1974
1975 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1976 return NULL_TREE;
1977
1978 return builtin_decl_explicit (fcode2);
1979 }
1980
1981 /* Like mathfn_built_in_1(), but always use the implicit array. */
1982
1983 tree
1984 mathfn_built_in (tree type, enum built_in_function fn)
1985 {
1986 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1987 }
1988
1989 /* If errno must be maintained, expand the RTL to check if the result,
1990 TARGET, of a built-in function call, EXP, is NaN, and if so set
1991 errno to EDOM. */
1992
1993 static void
1994 expand_errno_check (tree exp, rtx target)
1995 {
1996 rtx_code_label *lab = gen_label_rtx ();
1997
1998 /* Test the result; if it is NaN, set errno=EDOM because
1999 the argument was not in the domain. */
2000 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
2001 NULL_RTX, NULL_RTX, lab,
2002 /* The jump is very likely. */
2003 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
2004
2005 #ifdef TARGET_EDOM
2006 /* If this built-in doesn't throw an exception, set errno directly. */
2007 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
2008 {
2009 #ifdef GEN_ERRNO_RTX
2010 rtx errno_rtx = GEN_ERRNO_RTX;
2011 #else
2012 rtx errno_rtx
2013 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
2014 #endif
2015 emit_move_insn (errno_rtx,
2016 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
2017 emit_label (lab);
2018 return;
2019 }
2020 #endif
2021
2022 /* Make sure the library call isn't expanded as a tail call. */
2023 CALL_EXPR_TAILCALL (exp) = 0;
2024
2025 /* We can't set errno=EDOM directly; let the library call do it.
2026 Pop the arguments right away in case the call gets deleted. */
2027 NO_DEFER_POP;
2028 expand_call (exp, target, 0);
2029 OK_DEFER_POP;
2030 emit_label (lab);
2031 }
2032
2033 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2034 Return NULL_RTX if a normal call should be emitted rather than expanding
2035 the function in-line. EXP is the expression that is a call to the builtin
2036 function; if convenient, the result should be placed in TARGET.
2037 SUBTARGET may be used as the target for computing one of EXP's operands. */
2038
2039 static rtx
2040 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2041 {
2042 optab builtin_optab;
2043 rtx op0;
2044 rtx_insn *insns;
2045 tree fndecl = get_callee_fndecl (exp);
2046 machine_mode mode;
2047 bool errno_set = false;
2048 bool try_widening = false;
2049 tree arg;
2050
2051 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2052 return NULL_RTX;
2053
2054 arg = CALL_EXPR_ARG (exp, 0);
2055
2056 switch (DECL_FUNCTION_CODE (fndecl))
2057 {
2058 CASE_FLT_FN (BUILT_IN_SQRT):
2059 errno_set = ! tree_expr_nonnegative_p (arg);
2060 try_widening = true;
2061 builtin_optab = sqrt_optab;
2062 break;
2063 CASE_FLT_FN (BUILT_IN_EXP):
2064 errno_set = true; builtin_optab = exp_optab; break;
2065 CASE_FLT_FN (BUILT_IN_EXP10):
2066 CASE_FLT_FN (BUILT_IN_POW10):
2067 errno_set = true; builtin_optab = exp10_optab; break;
2068 CASE_FLT_FN (BUILT_IN_EXP2):
2069 errno_set = true; builtin_optab = exp2_optab; break;
2070 CASE_FLT_FN (BUILT_IN_EXPM1):
2071 errno_set = true; builtin_optab = expm1_optab; break;
2072 CASE_FLT_FN (BUILT_IN_LOGB):
2073 errno_set = true; builtin_optab = logb_optab; break;
2074 CASE_FLT_FN (BUILT_IN_LOG):
2075 errno_set = true; builtin_optab = log_optab; break;
2076 CASE_FLT_FN (BUILT_IN_LOG10):
2077 errno_set = true; builtin_optab = log10_optab; break;
2078 CASE_FLT_FN (BUILT_IN_LOG2):
2079 errno_set = true; builtin_optab = log2_optab; break;
2080 CASE_FLT_FN (BUILT_IN_LOG1P):
2081 errno_set = true; builtin_optab = log1p_optab; break;
2082 CASE_FLT_FN (BUILT_IN_ASIN):
2083 builtin_optab = asin_optab; break;
2084 CASE_FLT_FN (BUILT_IN_ACOS):
2085 builtin_optab = acos_optab; break;
2086 CASE_FLT_FN (BUILT_IN_TAN):
2087 builtin_optab = tan_optab; break;
2088 CASE_FLT_FN (BUILT_IN_ATAN):
2089 builtin_optab = atan_optab; break;
2090 CASE_FLT_FN (BUILT_IN_FLOOR):
2091 builtin_optab = floor_optab; break;
2092 CASE_FLT_FN (BUILT_IN_CEIL):
2093 builtin_optab = ceil_optab; break;
2094 CASE_FLT_FN (BUILT_IN_TRUNC):
2095 builtin_optab = btrunc_optab; break;
2096 CASE_FLT_FN (BUILT_IN_ROUND):
2097 builtin_optab = round_optab; break;
2098 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2099 builtin_optab = nearbyint_optab;
2100 if (flag_trapping_math)
2101 break;
2102 /* Else fallthrough and expand as rint. */
2103 CASE_FLT_FN (BUILT_IN_RINT):
2104 builtin_optab = rint_optab; break;
2105 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2106 builtin_optab = significand_optab; break;
2107 default:
2108 gcc_unreachable ();
2109 }
2110
2111 /* Make a suitable register to place result in. */
2112 mode = TYPE_MODE (TREE_TYPE (exp));
2113
2114 if (! flag_errno_math || ! HONOR_NANS (mode))
2115 errno_set = false;
2116
2117 /* Before working hard, check whether the instruction is available, but try
2118 to widen the mode for specific operations. */
2119 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2120 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2121 && (!errno_set || !optimize_insn_for_size_p ()))
2122 {
2123 rtx result = gen_reg_rtx (mode);
2124
2125 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2126 need to expand the argument again. This way, we will not perform
2127 side-effects more the once. */
2128 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2129
2130 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2131
2132 start_sequence ();
2133
2134 /* Compute into RESULT.
2135 Set RESULT to wherever the result comes back. */
2136 result = expand_unop (mode, builtin_optab, op0, result, 0);
2137
2138 if (result != 0)
2139 {
2140 if (errno_set)
2141 expand_errno_check (exp, result);
2142
2143 /* Output the entire sequence. */
2144 insns = get_insns ();
2145 end_sequence ();
2146 emit_insn (insns);
2147 return result;
2148 }
2149
2150 /* If we were unable to expand via the builtin, stop the sequence
2151 (without outputting the insns) and call to the library function
2152 with the stabilized argument list. */
2153 end_sequence ();
2154 }
2155
2156 return expand_call (exp, target, target == const0_rtx);
2157 }
2158
2159 /* Expand a call to the builtin binary math functions (pow and atan2).
2160 Return NULL_RTX if a normal call should be emitted rather than expanding the
2161 function in-line. EXP is the expression that is a call to the builtin
2162 function; if convenient, the result should be placed in TARGET.
2163 SUBTARGET may be used as the target for computing one of EXP's
2164 operands. */
2165
2166 static rtx
2167 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2168 {
2169 optab builtin_optab;
2170 rtx op0, op1, result;
2171 rtx_insn *insns;
2172 int op1_type = REAL_TYPE;
2173 tree fndecl = get_callee_fndecl (exp);
2174 tree arg0, arg1;
2175 machine_mode mode;
2176 bool errno_set = true;
2177
2178 switch (DECL_FUNCTION_CODE (fndecl))
2179 {
2180 CASE_FLT_FN (BUILT_IN_SCALBN):
2181 CASE_FLT_FN (BUILT_IN_SCALBLN):
2182 CASE_FLT_FN (BUILT_IN_LDEXP):
2183 op1_type = INTEGER_TYPE;
2184 default:
2185 break;
2186 }
2187
2188 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2189 return NULL_RTX;
2190
2191 arg0 = CALL_EXPR_ARG (exp, 0);
2192 arg1 = CALL_EXPR_ARG (exp, 1);
2193
2194 switch (DECL_FUNCTION_CODE (fndecl))
2195 {
2196 CASE_FLT_FN (BUILT_IN_POW):
2197 builtin_optab = pow_optab; break;
2198 CASE_FLT_FN (BUILT_IN_ATAN2):
2199 builtin_optab = atan2_optab; break;
2200 CASE_FLT_FN (BUILT_IN_SCALB):
2201 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2202 return 0;
2203 builtin_optab = scalb_optab; break;
2204 CASE_FLT_FN (BUILT_IN_SCALBN):
2205 CASE_FLT_FN (BUILT_IN_SCALBLN):
2206 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2207 return 0;
2208 /* Fall through... */
2209 CASE_FLT_FN (BUILT_IN_LDEXP):
2210 builtin_optab = ldexp_optab; break;
2211 CASE_FLT_FN (BUILT_IN_FMOD):
2212 builtin_optab = fmod_optab; break;
2213 CASE_FLT_FN (BUILT_IN_REMAINDER):
2214 CASE_FLT_FN (BUILT_IN_DREM):
2215 builtin_optab = remainder_optab; break;
2216 default:
2217 gcc_unreachable ();
2218 }
2219
2220 /* Make a suitable register to place result in. */
2221 mode = TYPE_MODE (TREE_TYPE (exp));
2222
2223 /* Before working hard, check whether the instruction is available. */
2224 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2225 return NULL_RTX;
2226
2227 result = gen_reg_rtx (mode);
2228
2229 if (! flag_errno_math || ! HONOR_NANS (mode))
2230 errno_set = false;
2231
2232 if (errno_set && optimize_insn_for_size_p ())
2233 return 0;
2234
2235 /* Always stabilize the argument list. */
2236 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2237 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2238
2239 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2240 op1 = expand_normal (arg1);
2241
2242 start_sequence ();
2243
2244 /* Compute into RESULT.
2245 Set RESULT to wherever the result comes back. */
2246 result = expand_binop (mode, builtin_optab, op0, op1,
2247 result, 0, OPTAB_DIRECT);
2248
2249 /* If we were unable to expand via the builtin, stop the sequence
2250 (without outputting the insns) and call to the library function
2251 with the stabilized argument list. */
2252 if (result == 0)
2253 {
2254 end_sequence ();
2255 return expand_call (exp, target, target == const0_rtx);
2256 }
2257
2258 if (errno_set)
2259 expand_errno_check (exp, result);
2260
2261 /* Output the entire sequence. */
2262 insns = get_insns ();
2263 end_sequence ();
2264 emit_insn (insns);
2265
2266 return result;
2267 }
2268
2269 /* Expand a call to the builtin trinary math functions (fma).
2270 Return NULL_RTX if a normal call should be emitted rather than expanding the
2271 function in-line. EXP is the expression that is a call to the builtin
2272 function; if convenient, the result should be placed in TARGET.
2273 SUBTARGET may be used as the target for computing one of EXP's
2274 operands. */
2275
2276 static rtx
2277 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2278 {
2279 optab builtin_optab;
2280 rtx op0, op1, op2, result;
2281 rtx_insn *insns;
2282 tree fndecl = get_callee_fndecl (exp);
2283 tree arg0, arg1, arg2;
2284 machine_mode mode;
2285
2286 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2287 return NULL_RTX;
2288
2289 arg0 = CALL_EXPR_ARG (exp, 0);
2290 arg1 = CALL_EXPR_ARG (exp, 1);
2291 arg2 = CALL_EXPR_ARG (exp, 2);
2292
2293 switch (DECL_FUNCTION_CODE (fndecl))
2294 {
2295 CASE_FLT_FN (BUILT_IN_FMA):
2296 builtin_optab = fma_optab; break;
2297 default:
2298 gcc_unreachable ();
2299 }
2300
2301 /* Make a suitable register to place result in. */
2302 mode = TYPE_MODE (TREE_TYPE (exp));
2303
2304 /* Before working hard, check whether the instruction is available. */
2305 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2306 return NULL_RTX;
2307
2308 result = gen_reg_rtx (mode);
2309
2310 /* Always stabilize the argument list. */
2311 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2312 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2313 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2314
2315 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2316 op1 = expand_normal (arg1);
2317 op2 = expand_normal (arg2);
2318
2319 start_sequence ();
2320
2321 /* Compute into RESULT.
2322 Set RESULT to wherever the result comes back. */
2323 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2324 result, 0);
2325
2326 /* If we were unable to expand via the builtin, stop the sequence
2327 (without outputting the insns) and call to the library function
2328 with the stabilized argument list. */
2329 if (result == 0)
2330 {
2331 end_sequence ();
2332 return expand_call (exp, target, target == const0_rtx);
2333 }
2334
2335 /* Output the entire sequence. */
2336 insns = get_insns ();
2337 end_sequence ();
2338 emit_insn (insns);
2339
2340 return result;
2341 }
2342
2343 /* Expand a call to the builtin sin and cos math functions.
2344 Return NULL_RTX if a normal call should be emitted rather than expanding the
2345 function in-line. EXP is the expression that is a call to the builtin
2346 function; if convenient, the result should be placed in TARGET.
2347 SUBTARGET may be used as the target for computing one of EXP's
2348 operands. */
2349
2350 static rtx
2351 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2352 {
2353 optab builtin_optab;
2354 rtx op0;
2355 rtx_insn *insns;
2356 tree fndecl = get_callee_fndecl (exp);
2357 machine_mode mode;
2358 tree arg;
2359
2360 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2361 return NULL_RTX;
2362
2363 arg = CALL_EXPR_ARG (exp, 0);
2364
2365 switch (DECL_FUNCTION_CODE (fndecl))
2366 {
2367 CASE_FLT_FN (BUILT_IN_SIN):
2368 CASE_FLT_FN (BUILT_IN_COS):
2369 builtin_optab = sincos_optab; break;
2370 default:
2371 gcc_unreachable ();
2372 }
2373
2374 /* Make a suitable register to place result in. */
2375 mode = TYPE_MODE (TREE_TYPE (exp));
2376
2377 /* Check if sincos insn is available, otherwise fallback
2378 to sin or cos insn. */
2379 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2380 switch (DECL_FUNCTION_CODE (fndecl))
2381 {
2382 CASE_FLT_FN (BUILT_IN_SIN):
2383 builtin_optab = sin_optab; break;
2384 CASE_FLT_FN (BUILT_IN_COS):
2385 builtin_optab = cos_optab; break;
2386 default:
2387 gcc_unreachable ();
2388 }
2389
2390 /* Before working hard, check whether the instruction is available. */
2391 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2392 {
2393 rtx result = gen_reg_rtx (mode);
2394
2395 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2396 need to expand the argument again. This way, we will not perform
2397 side-effects more the once. */
2398 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2399
2400 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2401
2402 start_sequence ();
2403
2404 /* Compute into RESULT.
2405 Set RESULT to wherever the result comes back. */
2406 if (builtin_optab == sincos_optab)
2407 {
2408 int ok;
2409
2410 switch (DECL_FUNCTION_CODE (fndecl))
2411 {
2412 CASE_FLT_FN (BUILT_IN_SIN):
2413 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2414 break;
2415 CASE_FLT_FN (BUILT_IN_COS):
2416 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2417 break;
2418 default:
2419 gcc_unreachable ();
2420 }
2421 gcc_assert (ok);
2422 }
2423 else
2424 result = expand_unop (mode, builtin_optab, op0, result, 0);
2425
2426 if (result != 0)
2427 {
2428 /* Output the entire sequence. */
2429 insns = get_insns ();
2430 end_sequence ();
2431 emit_insn (insns);
2432 return result;
2433 }
2434
2435 /* If we were unable to expand via the builtin, stop the sequence
2436 (without outputting the insns) and call to the library function
2437 with the stabilized argument list. */
2438 end_sequence ();
2439 }
2440
2441 return expand_call (exp, target, target == const0_rtx);
2442 }
2443
2444 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2445 return an RTL instruction code that implements the functionality.
2446 If that isn't possible or available return CODE_FOR_nothing. */
2447
2448 static enum insn_code
2449 interclass_mathfn_icode (tree arg, tree fndecl)
2450 {
2451 bool errno_set = false;
2452 optab builtin_optab = unknown_optab;
2453 machine_mode mode;
2454
2455 switch (DECL_FUNCTION_CODE (fndecl))
2456 {
2457 CASE_FLT_FN (BUILT_IN_ILOGB):
2458 errno_set = true; builtin_optab = ilogb_optab; break;
2459 CASE_FLT_FN (BUILT_IN_ISINF):
2460 builtin_optab = isinf_optab; break;
2461 case BUILT_IN_ISNORMAL:
2462 case BUILT_IN_ISFINITE:
2463 CASE_FLT_FN (BUILT_IN_FINITE):
2464 case BUILT_IN_FINITED32:
2465 case BUILT_IN_FINITED64:
2466 case BUILT_IN_FINITED128:
2467 case BUILT_IN_ISINFD32:
2468 case BUILT_IN_ISINFD64:
2469 case BUILT_IN_ISINFD128:
2470 /* These builtins have no optabs (yet). */
2471 break;
2472 default:
2473 gcc_unreachable ();
2474 }
2475
2476 /* There's no easy way to detect the case we need to set EDOM. */
2477 if (flag_errno_math && errno_set)
2478 return CODE_FOR_nothing;
2479
2480 /* Optab mode depends on the mode of the input argument. */
2481 mode = TYPE_MODE (TREE_TYPE (arg));
2482
2483 if (builtin_optab)
2484 return optab_handler (builtin_optab, mode);
2485 return CODE_FOR_nothing;
2486 }
2487
2488 /* Expand a call to one of the builtin math functions that operate on
2489 floating point argument and output an integer result (ilogb, isinf,
2490 isnan, etc).
2491 Return 0 if a normal call should be emitted rather than expanding the
2492 function in-line. EXP is the expression that is a call to the builtin
2493 function; if convenient, the result should be placed in TARGET. */
2494
2495 static rtx
2496 expand_builtin_interclass_mathfn (tree exp, rtx target)
2497 {
2498 enum insn_code icode = CODE_FOR_nothing;
2499 rtx op0;
2500 tree fndecl = get_callee_fndecl (exp);
2501 machine_mode mode;
2502 tree arg;
2503
2504 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2505 return NULL_RTX;
2506
2507 arg = CALL_EXPR_ARG (exp, 0);
2508 icode = interclass_mathfn_icode (arg, fndecl);
2509 mode = TYPE_MODE (TREE_TYPE (arg));
2510
2511 if (icode != CODE_FOR_nothing)
2512 {
2513 struct expand_operand ops[1];
2514 rtx_insn *last = get_last_insn ();
2515 tree orig_arg = arg;
2516
2517 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2518 need to expand the argument again. This way, we will not perform
2519 side-effects more the once. */
2520 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2521
2522 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2523
2524 if (mode != GET_MODE (op0))
2525 op0 = convert_to_mode (mode, op0, 0);
2526
2527 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2528 if (maybe_legitimize_operands (icode, 0, 1, ops)
2529 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2530 return ops[0].value;
2531
2532 delete_insns_since (last);
2533 CALL_EXPR_ARG (exp, 0) = orig_arg;
2534 }
2535
2536 return NULL_RTX;
2537 }
2538
2539 /* Expand a call to the builtin sincos math function.
2540 Return NULL_RTX if a normal call should be emitted rather than expanding the
2541 function in-line. EXP is the expression that is a call to the builtin
2542 function. */
2543
2544 static rtx
2545 expand_builtin_sincos (tree exp)
2546 {
2547 rtx op0, op1, op2, target1, target2;
2548 machine_mode mode;
2549 tree arg, sinp, cosp;
2550 int result;
2551 location_t loc = EXPR_LOCATION (exp);
2552 tree alias_type, alias_off;
2553
2554 if (!validate_arglist (exp, REAL_TYPE,
2555 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2556 return NULL_RTX;
2557
2558 arg = CALL_EXPR_ARG (exp, 0);
2559 sinp = CALL_EXPR_ARG (exp, 1);
2560 cosp = CALL_EXPR_ARG (exp, 2);
2561
2562 /* Make a suitable register to place result in. */
2563 mode = TYPE_MODE (TREE_TYPE (arg));
2564
2565 /* Check if sincos insn is available, otherwise emit the call. */
2566 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2567 return NULL_RTX;
2568
2569 target1 = gen_reg_rtx (mode);
2570 target2 = gen_reg_rtx (mode);
2571
2572 op0 = expand_normal (arg);
2573 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2574 alias_off = build_int_cst (alias_type, 0);
2575 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2576 sinp, alias_off));
2577 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2578 cosp, alias_off));
2579
2580 /* Compute into target1 and target2.
2581 Set TARGET to wherever the result comes back. */
2582 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2583 gcc_assert (result);
2584
2585 /* Move target1 and target2 to the memory locations indicated
2586 by op1 and op2. */
2587 emit_move_insn (op1, target1);
2588 emit_move_insn (op2, target2);
2589
2590 return const0_rtx;
2591 }
2592
2593 /* Expand a call to the internal cexpi builtin to the sincos math function.
2594 EXP is the expression that is a call to the builtin function; if convenient,
2595 the result should be placed in TARGET. */
2596
2597 static rtx
2598 expand_builtin_cexpi (tree exp, rtx target)
2599 {
2600 tree fndecl = get_callee_fndecl (exp);
2601 tree arg, type;
2602 machine_mode mode;
2603 rtx op0, op1, op2;
2604 location_t loc = EXPR_LOCATION (exp);
2605
2606 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2607 return NULL_RTX;
2608
2609 arg = CALL_EXPR_ARG (exp, 0);
2610 type = TREE_TYPE (arg);
2611 mode = TYPE_MODE (TREE_TYPE (arg));
2612
2613 /* Try expanding via a sincos optab, fall back to emitting a libcall
2614 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2615 is only generated from sincos, cexp or if we have either of them. */
2616 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2617 {
2618 op1 = gen_reg_rtx (mode);
2619 op2 = gen_reg_rtx (mode);
2620
2621 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2622
2623 /* Compute into op1 and op2. */
2624 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2625 }
2626 else if (targetm.libc_has_function (function_sincos))
2627 {
2628 tree call, fn = NULL_TREE;
2629 tree top1, top2;
2630 rtx op1a, op2a;
2631
2632 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2633 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2634 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2635 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2636 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2637 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2638 else
2639 gcc_unreachable ();
2640
2641 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2642 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2643 op1a = copy_addr_to_reg (XEXP (op1, 0));
2644 op2a = copy_addr_to_reg (XEXP (op2, 0));
2645 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2646 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2647
2648 /* Make sure not to fold the sincos call again. */
2649 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2650 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2651 call, 3, arg, top1, top2));
2652 }
2653 else
2654 {
2655 tree call, fn = NULL_TREE, narg;
2656 tree ctype = build_complex_type (type);
2657
2658 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2659 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2660 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2661 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2662 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2663 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2664 else
2665 gcc_unreachable ();
2666
2667 /* If we don't have a decl for cexp create one. This is the
2668 friendliest fallback if the user calls __builtin_cexpi
2669 without full target C99 function support. */
2670 if (fn == NULL_TREE)
2671 {
2672 tree fntype;
2673 const char *name = NULL;
2674
2675 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2676 name = "cexpf";
2677 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2678 name = "cexp";
2679 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2680 name = "cexpl";
2681
2682 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2683 fn = build_fn_decl (name, fntype);
2684 }
2685
2686 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2687 build_real (type, dconst0), arg);
2688
2689 /* Make sure not to fold the cexp call again. */
2690 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2691 return expand_expr (build_call_nary (ctype, call, 1, narg),
2692 target, VOIDmode, EXPAND_NORMAL);
2693 }
2694
2695 /* Now build the proper return type. */
2696 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2697 make_tree (TREE_TYPE (arg), op2),
2698 make_tree (TREE_TYPE (arg), op1)),
2699 target, VOIDmode, EXPAND_NORMAL);
2700 }
2701
2702 /* Conveniently construct a function call expression. FNDECL names the
2703 function to be called, N is the number of arguments, and the "..."
2704 parameters are the argument expressions. Unlike build_call_exr
2705 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2706
2707 static tree
2708 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2709 {
2710 va_list ap;
2711 tree fntype = TREE_TYPE (fndecl);
2712 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2713
2714 va_start (ap, n);
2715 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2716 va_end (ap);
2717 SET_EXPR_LOCATION (fn, loc);
2718 return fn;
2719 }
2720
2721 /* Expand a call to one of the builtin rounding functions gcc defines
2722 as an extension (lfloor and lceil). As these are gcc extensions we
2723 do not need to worry about setting errno to EDOM.
2724 If expanding via optab fails, lower expression to (int)(floor(x)).
2725 EXP is the expression that is a call to the builtin function;
2726 if convenient, the result should be placed in TARGET. */
2727
2728 static rtx
2729 expand_builtin_int_roundingfn (tree exp, rtx target)
2730 {
2731 convert_optab builtin_optab;
2732 rtx op0, tmp;
2733 rtx_insn *insns;
2734 tree fndecl = get_callee_fndecl (exp);
2735 enum built_in_function fallback_fn;
2736 tree fallback_fndecl;
2737 machine_mode mode;
2738 tree arg;
2739
2740 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2741 gcc_unreachable ();
2742
2743 arg = CALL_EXPR_ARG (exp, 0);
2744
2745 switch (DECL_FUNCTION_CODE (fndecl))
2746 {
2747 CASE_FLT_FN (BUILT_IN_ICEIL):
2748 CASE_FLT_FN (BUILT_IN_LCEIL):
2749 CASE_FLT_FN (BUILT_IN_LLCEIL):
2750 builtin_optab = lceil_optab;
2751 fallback_fn = BUILT_IN_CEIL;
2752 break;
2753
2754 CASE_FLT_FN (BUILT_IN_IFLOOR):
2755 CASE_FLT_FN (BUILT_IN_LFLOOR):
2756 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2757 builtin_optab = lfloor_optab;
2758 fallback_fn = BUILT_IN_FLOOR;
2759 break;
2760
2761 default:
2762 gcc_unreachable ();
2763 }
2764
2765 /* Make a suitable register to place result in. */
2766 mode = TYPE_MODE (TREE_TYPE (exp));
2767
2768 target = gen_reg_rtx (mode);
2769
2770 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2771 need to expand the argument again. This way, we will not perform
2772 side-effects more the once. */
2773 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2774
2775 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2776
2777 start_sequence ();
2778
2779 /* Compute into TARGET. */
2780 if (expand_sfix_optab (target, op0, builtin_optab))
2781 {
2782 /* Output the entire sequence. */
2783 insns = get_insns ();
2784 end_sequence ();
2785 emit_insn (insns);
2786 return target;
2787 }
2788
2789 /* If we were unable to expand via the builtin, stop the sequence
2790 (without outputting the insns). */
2791 end_sequence ();
2792
2793 /* Fall back to floating point rounding optab. */
2794 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2795
2796 /* For non-C99 targets we may end up without a fallback fndecl here
2797 if the user called __builtin_lfloor directly. In this case emit
2798 a call to the floor/ceil variants nevertheless. This should result
2799 in the best user experience for not full C99 targets. */
2800 if (fallback_fndecl == NULL_TREE)
2801 {
2802 tree fntype;
2803 const char *name = NULL;
2804
2805 switch (DECL_FUNCTION_CODE (fndecl))
2806 {
2807 case BUILT_IN_ICEIL:
2808 case BUILT_IN_LCEIL:
2809 case BUILT_IN_LLCEIL:
2810 name = "ceil";
2811 break;
2812 case BUILT_IN_ICEILF:
2813 case BUILT_IN_LCEILF:
2814 case BUILT_IN_LLCEILF:
2815 name = "ceilf";
2816 break;
2817 case BUILT_IN_ICEILL:
2818 case BUILT_IN_LCEILL:
2819 case BUILT_IN_LLCEILL:
2820 name = "ceill";
2821 break;
2822 case BUILT_IN_IFLOOR:
2823 case BUILT_IN_LFLOOR:
2824 case BUILT_IN_LLFLOOR:
2825 name = "floor";
2826 break;
2827 case BUILT_IN_IFLOORF:
2828 case BUILT_IN_LFLOORF:
2829 case BUILT_IN_LLFLOORF:
2830 name = "floorf";
2831 break;
2832 case BUILT_IN_IFLOORL:
2833 case BUILT_IN_LFLOORL:
2834 case BUILT_IN_LLFLOORL:
2835 name = "floorl";
2836 break;
2837 default:
2838 gcc_unreachable ();
2839 }
2840
2841 fntype = build_function_type_list (TREE_TYPE (arg),
2842 TREE_TYPE (arg), NULL_TREE);
2843 fallback_fndecl = build_fn_decl (name, fntype);
2844 }
2845
2846 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2847
2848 tmp = expand_normal (exp);
2849 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2850
2851 /* Truncate the result of floating point optab to integer
2852 via expand_fix (). */
2853 target = gen_reg_rtx (mode);
2854 expand_fix (target, tmp, 0);
2855
2856 return target;
2857 }
2858
2859 /* Expand a call to one of the builtin math functions doing integer
2860 conversion (lrint).
2861 Return 0 if a normal call should be emitted rather than expanding the
2862 function in-line. EXP is the expression that is a call to the builtin
2863 function; if convenient, the result should be placed in TARGET. */
2864
2865 static rtx
2866 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2867 {
2868 convert_optab builtin_optab;
2869 rtx op0;
2870 rtx_insn *insns;
2871 tree fndecl = get_callee_fndecl (exp);
2872 tree arg;
2873 machine_mode mode;
2874 enum built_in_function fallback_fn = BUILT_IN_NONE;
2875
2876 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2877 gcc_unreachable ();
2878
2879 arg = CALL_EXPR_ARG (exp, 0);
2880
2881 switch (DECL_FUNCTION_CODE (fndecl))
2882 {
2883 CASE_FLT_FN (BUILT_IN_IRINT):
2884 fallback_fn = BUILT_IN_LRINT;
2885 /* FALLTHRU */
2886 CASE_FLT_FN (BUILT_IN_LRINT):
2887 CASE_FLT_FN (BUILT_IN_LLRINT):
2888 builtin_optab = lrint_optab;
2889 break;
2890
2891 CASE_FLT_FN (BUILT_IN_IROUND):
2892 fallback_fn = BUILT_IN_LROUND;
2893 /* FALLTHRU */
2894 CASE_FLT_FN (BUILT_IN_LROUND):
2895 CASE_FLT_FN (BUILT_IN_LLROUND):
2896 builtin_optab = lround_optab;
2897 break;
2898
2899 default:
2900 gcc_unreachable ();
2901 }
2902
2903 /* There's no easy way to detect the case we need to set EDOM. */
2904 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2905 return NULL_RTX;
2906
2907 /* Make a suitable register to place result in. */
2908 mode = TYPE_MODE (TREE_TYPE (exp));
2909
2910 /* There's no easy way to detect the case we need to set EDOM. */
2911 if (!flag_errno_math)
2912 {
2913 rtx result = gen_reg_rtx (mode);
2914
2915 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2916 need to expand the argument again. This way, we will not perform
2917 side-effects more the once. */
2918 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2919
2920 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2921
2922 start_sequence ();
2923
2924 if (expand_sfix_optab (result, op0, builtin_optab))
2925 {
2926 /* Output the entire sequence. */
2927 insns = get_insns ();
2928 end_sequence ();
2929 emit_insn (insns);
2930 return result;
2931 }
2932
2933 /* If we were unable to expand via the builtin, stop the sequence
2934 (without outputting the insns) and call to the library function
2935 with the stabilized argument list. */
2936 end_sequence ();
2937 }
2938
2939 if (fallback_fn != BUILT_IN_NONE)
2940 {
2941 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2942 targets, (int) round (x) should never be transformed into
2943 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2944 a call to lround in the hope that the target provides at least some
2945 C99 functions. This should result in the best user experience for
2946 not full C99 targets. */
2947 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2948 fallback_fn, 0);
2949
2950 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2951 fallback_fndecl, 1, arg);
2952
2953 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2954 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2955 return convert_to_mode (mode, target, 0);
2956 }
2957
2958 return expand_call (exp, target, target == const0_rtx);
2959 }
2960
2961 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2962 a normal call should be emitted rather than expanding the function
2963 in-line. EXP is the expression that is a call to the builtin
2964 function; if convenient, the result should be placed in TARGET. */
2965
2966 static rtx
2967 expand_builtin_powi (tree exp, rtx target)
2968 {
2969 tree arg0, arg1;
2970 rtx op0, op1;
2971 machine_mode mode;
2972 machine_mode mode2;
2973
2974 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2975 return NULL_RTX;
2976
2977 arg0 = CALL_EXPR_ARG (exp, 0);
2978 arg1 = CALL_EXPR_ARG (exp, 1);
2979 mode = TYPE_MODE (TREE_TYPE (exp));
2980
2981 /* Emit a libcall to libgcc. */
2982
2983 /* Mode of the 2nd argument must match that of an int. */
2984 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2985
2986 if (target == NULL_RTX)
2987 target = gen_reg_rtx (mode);
2988
2989 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2990 if (GET_MODE (op0) != mode)
2991 op0 = convert_to_mode (mode, op0, 0);
2992 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2993 if (GET_MODE (op1) != mode2)
2994 op1 = convert_to_mode (mode2, op1, 0);
2995
2996 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2997 target, LCT_CONST, mode, 2,
2998 op0, mode, op1, mode2);
2999
3000 return target;
3001 }
3002
3003 /* Expand expression EXP which is a call to the strlen builtin. Return
3004 NULL_RTX if we failed the caller should emit a normal call, otherwise
3005 try to get the result in TARGET, if convenient. */
3006
3007 static rtx
3008 expand_builtin_strlen (tree exp, rtx target,
3009 machine_mode target_mode)
3010 {
3011 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3012 return NULL_RTX;
3013 else
3014 {
3015 struct expand_operand ops[4];
3016 rtx pat;
3017 tree len;
3018 tree src = CALL_EXPR_ARG (exp, 0);
3019 rtx src_reg;
3020 rtx_insn *before_strlen;
3021 machine_mode insn_mode = target_mode;
3022 enum insn_code icode = CODE_FOR_nothing;
3023 unsigned int align;
3024
3025 /* If the length can be computed at compile-time, return it. */
3026 len = c_strlen (src, 0);
3027 if (len)
3028 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3029
3030 /* If the length can be computed at compile-time and is constant
3031 integer, but there are side-effects in src, evaluate
3032 src for side-effects, then return len.
3033 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3034 can be optimized into: i++; x = 3; */
3035 len = c_strlen (src, 1);
3036 if (len && TREE_CODE (len) == INTEGER_CST)
3037 {
3038 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3039 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3040 }
3041
3042 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3043
3044 /* If SRC is not a pointer type, don't do this operation inline. */
3045 if (align == 0)
3046 return NULL_RTX;
3047
3048 /* Bail out if we can't compute strlen in the right mode. */
3049 while (insn_mode != VOIDmode)
3050 {
3051 icode = optab_handler (strlen_optab, insn_mode);
3052 if (icode != CODE_FOR_nothing)
3053 break;
3054
3055 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3056 }
3057 if (insn_mode == VOIDmode)
3058 return NULL_RTX;
3059
3060 /* Make a place to hold the source address. We will not expand
3061 the actual source until we are sure that the expansion will
3062 not fail -- there are trees that cannot be expanded twice. */
3063 src_reg = gen_reg_rtx (Pmode);
3064
3065 /* Mark the beginning of the strlen sequence so we can emit the
3066 source operand later. */
3067 before_strlen = get_last_insn ();
3068
3069 create_output_operand (&ops[0], target, insn_mode);
3070 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3071 create_integer_operand (&ops[2], 0);
3072 create_integer_operand (&ops[3], align);
3073 if (!maybe_expand_insn (icode, 4, ops))
3074 return NULL_RTX;
3075
3076 /* Now that we are assured of success, expand the source. */
3077 start_sequence ();
3078 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3079 if (pat != src_reg)
3080 {
3081 #ifdef POINTERS_EXTEND_UNSIGNED
3082 if (GET_MODE (pat) != Pmode)
3083 pat = convert_to_mode (Pmode, pat,
3084 POINTERS_EXTEND_UNSIGNED);
3085 #endif
3086 emit_move_insn (src_reg, pat);
3087 }
3088 pat = get_insns ();
3089 end_sequence ();
3090
3091 if (before_strlen)
3092 emit_insn_after (pat, before_strlen);
3093 else
3094 emit_insn_before (pat, get_insns ());
3095
3096 /* Return the value in the proper mode for this function. */
3097 if (GET_MODE (ops[0].value) == target_mode)
3098 target = ops[0].value;
3099 else if (target != 0)
3100 convert_move (target, ops[0].value, 0);
3101 else
3102 target = convert_to_mode (target_mode, ops[0].value, 0);
3103
3104 return target;
3105 }
3106 }
3107
3108 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3109 bytes from constant string DATA + OFFSET and return it as target
3110 constant. */
3111
3112 static rtx
3113 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3114 machine_mode mode)
3115 {
3116 const char *str = (const char *) data;
3117
3118 gcc_assert (offset >= 0
3119 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3120 <= strlen (str) + 1));
3121
3122 return c_readstr (str + offset, mode);
3123 }
3124
3125 /* LEN specify length of the block of memcpy/memset operation.
3126 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3127 In some cases we can make very likely guess on max size, then we
3128 set it into PROBABLE_MAX_SIZE. */
3129
3130 static void
3131 determine_block_size (tree len, rtx len_rtx,
3132 unsigned HOST_WIDE_INT *min_size,
3133 unsigned HOST_WIDE_INT *max_size,
3134 unsigned HOST_WIDE_INT *probable_max_size)
3135 {
3136 if (CONST_INT_P (len_rtx))
3137 {
3138 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3139 return;
3140 }
3141 else
3142 {
3143 wide_int min, max;
3144 enum value_range_type range_type = VR_UNDEFINED;
3145
3146 /* Determine bounds from the type. */
3147 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3148 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3149 else
3150 *min_size = 0;
3151 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3152 *probable_max_size = *max_size
3153 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3154 else
3155 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3156
3157 if (TREE_CODE (len) == SSA_NAME)
3158 range_type = get_range_info (len, &min, &max);
3159 if (range_type == VR_RANGE)
3160 {
3161 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3162 *min_size = min.to_uhwi ();
3163 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3164 *probable_max_size = *max_size = max.to_uhwi ();
3165 }
3166 else if (range_type == VR_ANTI_RANGE)
3167 {
3168 /* Anti range 0...N lets us to determine minimal size to N+1. */
3169 if (min == 0)
3170 {
3171 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3172 *min_size = max.to_uhwi () + 1;
3173 }
3174 /* Code like
3175
3176 int n;
3177 if (n < 100)
3178 memcpy (a, b, n)
3179
3180 Produce anti range allowing negative values of N. We still
3181 can use the information and make a guess that N is not negative.
3182 */
3183 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3184 *probable_max_size = min.to_uhwi () - 1;
3185 }
3186 }
3187 gcc_checking_assert (*max_size <=
3188 (unsigned HOST_WIDE_INT)
3189 GET_MODE_MASK (GET_MODE (len_rtx)));
3190 }
3191
3192 /* Helper function to do the actual work for expand_builtin_memcpy. */
3193
3194 static rtx
3195 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3196 {
3197 const char *src_str;
3198 unsigned int src_align = get_pointer_alignment (src);
3199 unsigned int dest_align = get_pointer_alignment (dest);
3200 rtx dest_mem, src_mem, dest_addr, len_rtx;
3201 HOST_WIDE_INT expected_size = -1;
3202 unsigned int expected_align = 0;
3203 unsigned HOST_WIDE_INT min_size;
3204 unsigned HOST_WIDE_INT max_size;
3205 unsigned HOST_WIDE_INT probable_max_size;
3206
3207 /* If DEST is not a pointer type, call the normal function. */
3208 if (dest_align == 0)
3209 return NULL_RTX;
3210
3211 /* If either SRC is not a pointer type, don't do this
3212 operation in-line. */
3213 if (src_align == 0)
3214 return NULL_RTX;
3215
3216 if (currently_expanding_gimple_stmt)
3217 stringop_block_profile (currently_expanding_gimple_stmt,
3218 &expected_align, &expected_size);
3219
3220 if (expected_align < dest_align)
3221 expected_align = dest_align;
3222 dest_mem = get_memory_rtx (dest, len);
3223 set_mem_align (dest_mem, dest_align);
3224 len_rtx = expand_normal (len);
3225 determine_block_size (len, len_rtx, &min_size, &max_size,
3226 &probable_max_size);
3227 src_str = c_getstr (src);
3228
3229 /* If SRC is a string constant and block move would be done
3230 by pieces, we can avoid loading the string from memory
3231 and only stored the computed constants. */
3232 if (src_str
3233 && CONST_INT_P (len_rtx)
3234 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3235 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3236 CONST_CAST (char *, src_str),
3237 dest_align, false))
3238 {
3239 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3240 builtin_memcpy_read_str,
3241 CONST_CAST (char *, src_str),
3242 dest_align, false, 0);
3243 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3244 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3245 return dest_mem;
3246 }
3247
3248 src_mem = get_memory_rtx (src, len);
3249 set_mem_align (src_mem, src_align);
3250
3251 /* Copy word part most expediently. */
3252 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3253 CALL_EXPR_TAILCALL (exp)
3254 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3255 expected_align, expected_size,
3256 min_size, max_size, probable_max_size);
3257
3258 if (dest_addr == 0)
3259 {
3260 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3261 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3262 }
3263
3264 return dest_addr;
3265 }
3266
3267 /* Expand a call EXP to the memcpy builtin.
3268 Return NULL_RTX if we failed, the caller should emit a normal call,
3269 otherwise try to get the result in TARGET, if convenient (and in
3270 mode MODE if that's convenient). */
3271
3272 static rtx
3273 expand_builtin_memcpy (tree exp, rtx target)
3274 {
3275 if (!validate_arglist (exp,
3276 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3277 return NULL_RTX;
3278 else
3279 {
3280 tree dest = CALL_EXPR_ARG (exp, 0);
3281 tree src = CALL_EXPR_ARG (exp, 1);
3282 tree len = CALL_EXPR_ARG (exp, 2);
3283 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3284 }
3285 }
3286
3287 /* Expand an instrumented call EXP to the memcpy builtin.
3288 Return NULL_RTX if we failed, the caller should emit a normal call,
3289 otherwise try to get the result in TARGET, if convenient (and in
3290 mode MODE if that's convenient). */
3291
3292 static rtx
3293 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3294 {
3295 if (!validate_arglist (exp,
3296 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3297 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3298 INTEGER_TYPE, VOID_TYPE))
3299 return NULL_RTX;
3300 else
3301 {
3302 tree dest = CALL_EXPR_ARG (exp, 0);
3303 tree src = CALL_EXPR_ARG (exp, 2);
3304 tree len = CALL_EXPR_ARG (exp, 4);
3305 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3306
3307 /* Return src bounds with the result. */
3308 if (res)
3309 {
3310 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3311 expand_normal (CALL_EXPR_ARG (exp, 1)));
3312 res = chkp_join_splitted_slot (res, bnd);
3313 }
3314 return res;
3315 }
3316 }
3317
3318 /* Expand a call EXP to the mempcpy builtin.
3319 Return NULL_RTX if we failed; the caller should emit a normal call,
3320 otherwise try to get the result in TARGET, if convenient (and in
3321 mode MODE if that's convenient). If ENDP is 0 return the
3322 destination pointer, if ENDP is 1 return the end pointer ala
3323 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3324 stpcpy. */
3325
3326 static rtx
3327 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3328 {
3329 if (!validate_arglist (exp,
3330 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3331 return NULL_RTX;
3332 else
3333 {
3334 tree dest = CALL_EXPR_ARG (exp, 0);
3335 tree src = CALL_EXPR_ARG (exp, 1);
3336 tree len = CALL_EXPR_ARG (exp, 2);
3337 return expand_builtin_mempcpy_args (dest, src, len,
3338 target, mode, /*endp=*/ 1,
3339 exp);
3340 }
3341 }
3342
3343 /* Expand an instrumented call EXP to the mempcpy builtin.
3344 Return NULL_RTX if we failed, the caller should emit a normal call,
3345 otherwise try to get the result in TARGET, if convenient (and in
3346 mode MODE if that's convenient). */
3347
3348 static rtx
3349 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3350 {
3351 if (!validate_arglist (exp,
3352 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3353 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3354 INTEGER_TYPE, VOID_TYPE))
3355 return NULL_RTX;
3356 else
3357 {
3358 tree dest = CALL_EXPR_ARG (exp, 0);
3359 tree src = CALL_EXPR_ARG (exp, 2);
3360 tree len = CALL_EXPR_ARG (exp, 4);
3361 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3362 mode, 1, exp);
3363
3364 /* Return src bounds with the result. */
3365 if (res)
3366 {
3367 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3368 expand_normal (CALL_EXPR_ARG (exp, 1)));
3369 res = chkp_join_splitted_slot (res, bnd);
3370 }
3371 return res;
3372 }
3373 }
3374
3375 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3376 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3377 so that this can also be called without constructing an actual CALL_EXPR.
3378 The other arguments and return value are the same as for
3379 expand_builtin_mempcpy. */
3380
3381 static rtx
3382 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3383 rtx target, machine_mode mode, int endp,
3384 tree orig_exp)
3385 {
3386 tree fndecl = get_callee_fndecl (orig_exp);
3387
3388 /* If return value is ignored, transform mempcpy into memcpy. */
3389 if (target == const0_rtx
3390 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3391 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3392 {
3393 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3394 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3395 dest, src, len);
3396 return expand_expr (result, target, mode, EXPAND_NORMAL);
3397 }
3398 else if (target == const0_rtx
3399 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3400 {
3401 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3402 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3403 dest, src, len);
3404 return expand_expr (result, target, mode, EXPAND_NORMAL);
3405 }
3406 else
3407 {
3408 const char *src_str;
3409 unsigned int src_align = get_pointer_alignment (src);
3410 unsigned int dest_align = get_pointer_alignment (dest);
3411 rtx dest_mem, src_mem, len_rtx;
3412
3413 /* If either SRC or DEST is not a pointer type, don't do this
3414 operation in-line. */
3415 if (dest_align == 0 || src_align == 0)
3416 return NULL_RTX;
3417
3418 /* If LEN is not constant, call the normal function. */
3419 if (! tree_fits_uhwi_p (len))
3420 return NULL_RTX;
3421
3422 len_rtx = expand_normal (len);
3423 src_str = c_getstr (src);
3424
3425 /* If SRC is a string constant and block move would be done
3426 by pieces, we can avoid loading the string from memory
3427 and only stored the computed constants. */
3428 if (src_str
3429 && CONST_INT_P (len_rtx)
3430 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3431 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3432 CONST_CAST (char *, src_str),
3433 dest_align, false))
3434 {
3435 dest_mem = get_memory_rtx (dest, len);
3436 set_mem_align (dest_mem, dest_align);
3437 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3438 builtin_memcpy_read_str,
3439 CONST_CAST (char *, src_str),
3440 dest_align, false, endp);
3441 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3442 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3443 return dest_mem;
3444 }
3445
3446 if (CONST_INT_P (len_rtx)
3447 && can_move_by_pieces (INTVAL (len_rtx),
3448 MIN (dest_align, src_align)))
3449 {
3450 dest_mem = get_memory_rtx (dest, len);
3451 set_mem_align (dest_mem, dest_align);
3452 src_mem = get_memory_rtx (src, len);
3453 set_mem_align (src_mem, src_align);
3454 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3455 MIN (dest_align, src_align), endp);
3456 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3457 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3458 return dest_mem;
3459 }
3460
3461 return NULL_RTX;
3462 }
3463 }
3464
3465 #ifndef HAVE_movstr
3466 # define HAVE_movstr 0
3467 # define CODE_FOR_movstr CODE_FOR_nothing
3468 #endif
3469
3470 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3471 we failed, the caller should emit a normal call, otherwise try to
3472 get the result in TARGET, if convenient. If ENDP is 0 return the
3473 destination pointer, if ENDP is 1 return the end pointer ala
3474 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3475 stpcpy. */
3476
3477 static rtx
3478 expand_movstr (tree dest, tree src, rtx target, int endp)
3479 {
3480 struct expand_operand ops[3];
3481 rtx dest_mem;
3482 rtx src_mem;
3483
3484 if (!HAVE_movstr)
3485 return NULL_RTX;
3486
3487 dest_mem = get_memory_rtx (dest, NULL);
3488 src_mem = get_memory_rtx (src, NULL);
3489 if (!endp)
3490 {
3491 target = force_reg (Pmode, XEXP (dest_mem, 0));
3492 dest_mem = replace_equiv_address (dest_mem, target);
3493 }
3494
3495 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3496 create_fixed_operand (&ops[1], dest_mem);
3497 create_fixed_operand (&ops[2], src_mem);
3498 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3499 return NULL_RTX;
3500
3501 if (endp && target != const0_rtx)
3502 {
3503 target = ops[0].value;
3504 /* movstr is supposed to set end to the address of the NUL
3505 terminator. If the caller requested a mempcpy-like return value,
3506 adjust it. */
3507 if (endp == 1)
3508 {
3509 rtx tem = plus_constant (GET_MODE (target),
3510 gen_lowpart (GET_MODE (target), target), 1);
3511 emit_move_insn (target, force_operand (tem, NULL_RTX));
3512 }
3513 }
3514 return target;
3515 }
3516
3517 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3518 NULL_RTX if we failed the caller should emit a normal call, otherwise
3519 try to get the result in TARGET, if convenient (and in mode MODE if that's
3520 convenient). */
3521
3522 static rtx
3523 expand_builtin_strcpy (tree exp, rtx target)
3524 {
3525 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3526 {
3527 tree dest = CALL_EXPR_ARG (exp, 0);
3528 tree src = CALL_EXPR_ARG (exp, 1);
3529 return expand_builtin_strcpy_args (dest, src, target);
3530 }
3531 return NULL_RTX;
3532 }
3533
3534 /* Helper function to do the actual work for expand_builtin_strcpy. The
3535 arguments to the builtin_strcpy call DEST and SRC are broken out
3536 so that this can also be called without constructing an actual CALL_EXPR.
3537 The other arguments and return value are the same as for
3538 expand_builtin_strcpy. */
3539
3540 static rtx
3541 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3542 {
3543 return expand_movstr (dest, src, target, /*endp=*/0);
3544 }
3545
3546 /* Expand a call EXP to the stpcpy builtin.
3547 Return NULL_RTX if we failed the caller should emit a normal call,
3548 otherwise try to get the result in TARGET, if convenient (and in
3549 mode MODE if that's convenient). */
3550
3551 static rtx
3552 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3553 {
3554 tree dst, src;
3555 location_t loc = EXPR_LOCATION (exp);
3556
3557 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3558 return NULL_RTX;
3559
3560 dst = CALL_EXPR_ARG (exp, 0);
3561 src = CALL_EXPR_ARG (exp, 1);
3562
3563 /* If return value is ignored, transform stpcpy into strcpy. */
3564 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3565 {
3566 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3567 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3568 return expand_expr (result, target, mode, EXPAND_NORMAL);
3569 }
3570 else
3571 {
3572 tree len, lenp1;
3573 rtx ret;
3574
3575 /* Ensure we get an actual string whose length can be evaluated at
3576 compile-time, not an expression containing a string. This is
3577 because the latter will potentially produce pessimized code
3578 when used to produce the return value. */
3579 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3580 return expand_movstr (dst, src, target, /*endp=*/2);
3581
3582 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3583 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3584 target, mode, /*endp=*/2,
3585 exp);
3586
3587 if (ret)
3588 return ret;
3589
3590 if (TREE_CODE (len) == INTEGER_CST)
3591 {
3592 rtx len_rtx = expand_normal (len);
3593
3594 if (CONST_INT_P (len_rtx))
3595 {
3596 ret = expand_builtin_strcpy_args (dst, src, target);
3597
3598 if (ret)
3599 {
3600 if (! target)
3601 {
3602 if (mode != VOIDmode)
3603 target = gen_reg_rtx (mode);
3604 else
3605 target = gen_reg_rtx (GET_MODE (ret));
3606 }
3607 if (GET_MODE (target) != GET_MODE (ret))
3608 ret = gen_lowpart (GET_MODE (target), ret);
3609
3610 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3611 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3612 gcc_assert (ret);
3613
3614 return target;
3615 }
3616 }
3617 }
3618
3619 return expand_movstr (dst, src, target, /*endp=*/2);
3620 }
3621 }
3622
3623 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3624 bytes from constant string DATA + OFFSET and return it as target
3625 constant. */
3626
3627 rtx
3628 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3629 machine_mode mode)
3630 {
3631 const char *str = (const char *) data;
3632
3633 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3634 return const0_rtx;
3635
3636 return c_readstr (str + offset, mode);
3637 }
3638
3639 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3640 NULL_RTX if we failed the caller should emit a normal call. */
3641
3642 static rtx
3643 expand_builtin_strncpy (tree exp, rtx target)
3644 {
3645 location_t loc = EXPR_LOCATION (exp);
3646
3647 if (validate_arglist (exp,
3648 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3649 {
3650 tree dest = CALL_EXPR_ARG (exp, 0);
3651 tree src = CALL_EXPR_ARG (exp, 1);
3652 tree len = CALL_EXPR_ARG (exp, 2);
3653 tree slen = c_strlen (src, 1);
3654
3655 /* We must be passed a constant len and src parameter. */
3656 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3657 return NULL_RTX;
3658
3659 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3660
3661 /* We're required to pad with trailing zeros if the requested
3662 len is greater than strlen(s2)+1. In that case try to
3663 use store_by_pieces, if it fails, punt. */
3664 if (tree_int_cst_lt (slen, len))
3665 {
3666 unsigned int dest_align = get_pointer_alignment (dest);
3667 const char *p = c_getstr (src);
3668 rtx dest_mem;
3669
3670 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3671 || !can_store_by_pieces (tree_to_uhwi (len),
3672 builtin_strncpy_read_str,
3673 CONST_CAST (char *, p),
3674 dest_align, false))
3675 return NULL_RTX;
3676
3677 dest_mem = get_memory_rtx (dest, len);
3678 store_by_pieces (dest_mem, tree_to_uhwi (len),
3679 builtin_strncpy_read_str,
3680 CONST_CAST (char *, p), dest_align, false, 0);
3681 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3682 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3683 return dest_mem;
3684 }
3685 }
3686 return NULL_RTX;
3687 }
3688
3689 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3690 bytes from constant string DATA + OFFSET and return it as target
3691 constant. */
3692
3693 rtx
3694 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3695 machine_mode mode)
3696 {
3697 const char *c = (const char *) data;
3698 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3699
3700 memset (p, *c, GET_MODE_SIZE (mode));
3701
3702 return c_readstr (p, mode);
3703 }
3704
3705 /* Callback routine for store_by_pieces. Return the RTL of a register
3706 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3707 char value given in the RTL register data. For example, if mode is
3708 4 bytes wide, return the RTL for 0x01010101*data. */
3709
3710 static rtx
3711 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3712 machine_mode mode)
3713 {
3714 rtx target, coeff;
3715 size_t size;
3716 char *p;
3717
3718 size = GET_MODE_SIZE (mode);
3719 if (size == 1)
3720 return (rtx) data;
3721
3722 p = XALLOCAVEC (char, size);
3723 memset (p, 1, size);
3724 coeff = c_readstr (p, mode);
3725
3726 target = convert_to_mode (mode, (rtx) data, 1);
3727 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3728 return force_reg (mode, target);
3729 }
3730
3731 /* Expand expression EXP, which is a call to the memset builtin. Return
3732 NULL_RTX if we failed the caller should emit a normal call, otherwise
3733 try to get the result in TARGET, if convenient (and in mode MODE if that's
3734 convenient). */
3735
3736 static rtx
3737 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3738 {
3739 if (!validate_arglist (exp,
3740 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3741 return NULL_RTX;
3742 else
3743 {
3744 tree dest = CALL_EXPR_ARG (exp, 0);
3745 tree val = CALL_EXPR_ARG (exp, 1);
3746 tree len = CALL_EXPR_ARG (exp, 2);
3747 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3748 }
3749 }
3750
3751 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3752 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3753 try to get the result in TARGET, if convenient (and in mode MODE if that's
3754 convenient). */
3755
3756 static rtx
3757 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3758 {
3759 if (!validate_arglist (exp,
3760 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3761 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3762 return NULL_RTX;
3763 else
3764 {
3765 tree dest = CALL_EXPR_ARG (exp, 0);
3766 tree val = CALL_EXPR_ARG (exp, 2);
3767 tree len = CALL_EXPR_ARG (exp, 3);
3768 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3769
3770 /* Return src bounds with the result. */
3771 if (res)
3772 {
3773 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3774 expand_normal (CALL_EXPR_ARG (exp, 1)));
3775 res = chkp_join_splitted_slot (res, bnd);
3776 }
3777 return res;
3778 }
3779 }
3780
3781 /* Helper function to do the actual work for expand_builtin_memset. The
3782 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3783 so that this can also be called without constructing an actual CALL_EXPR.
3784 The other arguments and return value are the same as for
3785 expand_builtin_memset. */
3786
3787 static rtx
3788 expand_builtin_memset_args (tree dest, tree val, tree len,
3789 rtx target, machine_mode mode, tree orig_exp)
3790 {
3791 tree fndecl, fn;
3792 enum built_in_function fcode;
3793 machine_mode val_mode;
3794 char c;
3795 unsigned int dest_align;
3796 rtx dest_mem, dest_addr, len_rtx;
3797 HOST_WIDE_INT expected_size = -1;
3798 unsigned int expected_align = 0;
3799 unsigned HOST_WIDE_INT min_size;
3800 unsigned HOST_WIDE_INT max_size;
3801 unsigned HOST_WIDE_INT probable_max_size;
3802
3803 dest_align = get_pointer_alignment (dest);
3804
3805 /* If DEST is not a pointer type, don't do this operation in-line. */
3806 if (dest_align == 0)
3807 return NULL_RTX;
3808
3809 if (currently_expanding_gimple_stmt)
3810 stringop_block_profile (currently_expanding_gimple_stmt,
3811 &expected_align, &expected_size);
3812
3813 if (expected_align < dest_align)
3814 expected_align = dest_align;
3815
3816 /* If the LEN parameter is zero, return DEST. */
3817 if (integer_zerop (len))
3818 {
3819 /* Evaluate and ignore VAL in case it has side-effects. */
3820 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3821 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3822 }
3823
3824 /* Stabilize the arguments in case we fail. */
3825 dest = builtin_save_expr (dest);
3826 val = builtin_save_expr (val);
3827 len = builtin_save_expr (len);
3828
3829 len_rtx = expand_normal (len);
3830 determine_block_size (len, len_rtx, &min_size, &max_size,
3831 &probable_max_size);
3832 dest_mem = get_memory_rtx (dest, len);
3833 val_mode = TYPE_MODE (unsigned_char_type_node);
3834
3835 if (TREE_CODE (val) != INTEGER_CST)
3836 {
3837 rtx val_rtx;
3838
3839 val_rtx = expand_normal (val);
3840 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3841
3842 /* Assume that we can memset by pieces if we can store
3843 * the coefficients by pieces (in the required modes).
3844 * We can't pass builtin_memset_gen_str as that emits RTL. */
3845 c = 1;
3846 if (tree_fits_uhwi_p (len)
3847 && can_store_by_pieces (tree_to_uhwi (len),
3848 builtin_memset_read_str, &c, dest_align,
3849 true))
3850 {
3851 val_rtx = force_reg (val_mode, val_rtx);
3852 store_by_pieces (dest_mem, tree_to_uhwi (len),
3853 builtin_memset_gen_str, val_rtx, dest_align,
3854 true, 0);
3855 }
3856 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3857 dest_align, expected_align,
3858 expected_size, min_size, max_size,
3859 probable_max_size))
3860 goto do_libcall;
3861
3862 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3863 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3864 return dest_mem;
3865 }
3866
3867 if (target_char_cast (val, &c))
3868 goto do_libcall;
3869
3870 if (c)
3871 {
3872 if (tree_fits_uhwi_p (len)
3873 && can_store_by_pieces (tree_to_uhwi (len),
3874 builtin_memset_read_str, &c, dest_align,
3875 true))
3876 store_by_pieces (dest_mem, tree_to_uhwi (len),
3877 builtin_memset_read_str, &c, dest_align, true, 0);
3878 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3879 gen_int_mode (c, val_mode),
3880 dest_align, expected_align,
3881 expected_size, min_size, max_size,
3882 probable_max_size))
3883 goto do_libcall;
3884
3885 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3886 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3887 return dest_mem;
3888 }
3889
3890 set_mem_align (dest_mem, dest_align);
3891 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3892 CALL_EXPR_TAILCALL (orig_exp)
3893 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3894 expected_align, expected_size,
3895 min_size, max_size,
3896 probable_max_size);
3897
3898 if (dest_addr == 0)
3899 {
3900 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3901 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3902 }
3903
3904 return dest_addr;
3905
3906 do_libcall:
3907 fndecl = get_callee_fndecl (orig_exp);
3908 fcode = DECL_FUNCTION_CODE (fndecl);
3909 if (fcode == BUILT_IN_MEMSET
3910 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3911 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3912 dest, val, len);
3913 else if (fcode == BUILT_IN_BZERO)
3914 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3915 dest, len);
3916 else
3917 gcc_unreachable ();
3918 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3919 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3920 return expand_call (fn, target, target == const0_rtx);
3921 }
3922
3923 /* Expand expression EXP, which is a call to the bzero builtin. Return
3924 NULL_RTX if we failed the caller should emit a normal call. */
3925
3926 static rtx
3927 expand_builtin_bzero (tree exp)
3928 {
3929 tree dest, size;
3930 location_t loc = EXPR_LOCATION (exp);
3931
3932 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3933 return NULL_RTX;
3934
3935 dest = CALL_EXPR_ARG (exp, 0);
3936 size = CALL_EXPR_ARG (exp, 1);
3937
3938 /* New argument list transforming bzero(ptr x, int y) to
3939 memset(ptr x, int 0, size_t y). This is done this way
3940 so that if it isn't expanded inline, we fallback to
3941 calling bzero instead of memset. */
3942
3943 return expand_builtin_memset_args (dest, integer_zero_node,
3944 fold_convert_loc (loc,
3945 size_type_node, size),
3946 const0_rtx, VOIDmode, exp);
3947 }
3948
3949 /* Expand expression EXP, which is a call to the memcmp built-in function.
3950 Return NULL_RTX if we failed and the caller should emit a normal call,
3951 otherwise try to get the result in TARGET, if convenient (and in mode
3952 MODE, if that's convenient). */
3953
3954 static rtx
3955 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3956 ATTRIBUTE_UNUSED machine_mode mode)
3957 {
3958 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3959
3960 if (!validate_arglist (exp,
3961 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3962 return NULL_RTX;
3963
3964 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3965 implementing memcmp because it will stop if it encounters two
3966 zero bytes. */
3967 #if defined HAVE_cmpmemsi
3968 {
3969 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3970 rtx result;
3971 rtx insn;
3972 tree arg1 = CALL_EXPR_ARG (exp, 0);
3973 tree arg2 = CALL_EXPR_ARG (exp, 1);
3974 tree len = CALL_EXPR_ARG (exp, 2);
3975
3976 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3977 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3978 machine_mode insn_mode;
3979
3980 if (HAVE_cmpmemsi)
3981 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3982 else
3983 return NULL_RTX;
3984
3985 /* If we don't have POINTER_TYPE, call the function. */
3986 if (arg1_align == 0 || arg2_align == 0)
3987 return NULL_RTX;
3988
3989 /* Make a place to write the result of the instruction. */
3990 result = target;
3991 if (! (result != 0
3992 && REG_P (result) && GET_MODE (result) == insn_mode
3993 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3994 result = gen_reg_rtx (insn_mode);
3995
3996 arg1_rtx = get_memory_rtx (arg1, len);
3997 arg2_rtx = get_memory_rtx (arg2, len);
3998 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3999
4000 /* Set MEM_SIZE as appropriate. */
4001 if (CONST_INT_P (arg3_rtx))
4002 {
4003 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
4004 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
4005 }
4006
4007 if (HAVE_cmpmemsi)
4008 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4009 GEN_INT (MIN (arg1_align, arg2_align)));
4010 else
4011 gcc_unreachable ();
4012
4013 if (insn)
4014 emit_insn (insn);
4015 else
4016 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4017 TYPE_MODE (integer_type_node), 3,
4018 XEXP (arg1_rtx, 0), Pmode,
4019 XEXP (arg2_rtx, 0), Pmode,
4020 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4021 TYPE_UNSIGNED (sizetype)),
4022 TYPE_MODE (sizetype));
4023
4024 /* Return the value in the proper mode for this function. */
4025 mode = TYPE_MODE (TREE_TYPE (exp));
4026 if (GET_MODE (result) == mode)
4027 return result;
4028 else if (target != 0)
4029 {
4030 convert_move (target, result, 0);
4031 return target;
4032 }
4033 else
4034 return convert_to_mode (mode, result, 0);
4035 }
4036 #endif /* HAVE_cmpmemsi. */
4037
4038 return NULL_RTX;
4039 }
4040
4041 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4042 if we failed the caller should emit a normal call, otherwise try to get
4043 the result in TARGET, if convenient. */
4044
4045 static rtx
4046 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4047 {
4048 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4049 return NULL_RTX;
4050
4051 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4052 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
4053 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
4054 {
4055 rtx arg1_rtx, arg2_rtx;
4056 rtx result, insn = NULL_RTX;
4057 tree fndecl, fn;
4058 tree arg1 = CALL_EXPR_ARG (exp, 0);
4059 tree arg2 = CALL_EXPR_ARG (exp, 1);
4060
4061 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4062 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4063
4064 /* If we don't have POINTER_TYPE, call the function. */
4065 if (arg1_align == 0 || arg2_align == 0)
4066 return NULL_RTX;
4067
4068 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4069 arg1 = builtin_save_expr (arg1);
4070 arg2 = builtin_save_expr (arg2);
4071
4072 arg1_rtx = get_memory_rtx (arg1, NULL);
4073 arg2_rtx = get_memory_rtx (arg2, NULL);
4074
4075 #ifdef HAVE_cmpstrsi
4076 /* Try to call cmpstrsi. */
4077 if (HAVE_cmpstrsi)
4078 {
4079 machine_mode insn_mode
4080 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4081
4082 /* Make a place to write the result of the instruction. */
4083 result = target;
4084 if (! (result != 0
4085 && REG_P (result) && GET_MODE (result) == insn_mode
4086 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4087 result = gen_reg_rtx (insn_mode);
4088
4089 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4090 GEN_INT (MIN (arg1_align, arg2_align)));
4091 }
4092 #endif
4093 #ifdef HAVE_cmpstrnsi
4094 /* Try to determine at least one length and call cmpstrnsi. */
4095 if (!insn && HAVE_cmpstrnsi)
4096 {
4097 tree len;
4098 rtx arg3_rtx;
4099
4100 machine_mode insn_mode
4101 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4102 tree len1 = c_strlen (arg1, 1);
4103 tree len2 = c_strlen (arg2, 1);
4104
4105 if (len1)
4106 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4107 if (len2)
4108 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4109
4110 /* If we don't have a constant length for the first, use the length
4111 of the second, if we know it. We don't require a constant for
4112 this case; some cost analysis could be done if both are available
4113 but neither is constant. For now, assume they're equally cheap,
4114 unless one has side effects. If both strings have constant lengths,
4115 use the smaller. */
4116
4117 if (!len1)
4118 len = len2;
4119 else if (!len2)
4120 len = len1;
4121 else if (TREE_SIDE_EFFECTS (len1))
4122 len = len2;
4123 else if (TREE_SIDE_EFFECTS (len2))
4124 len = len1;
4125 else if (TREE_CODE (len1) != INTEGER_CST)
4126 len = len2;
4127 else if (TREE_CODE (len2) != INTEGER_CST)
4128 len = len1;
4129 else if (tree_int_cst_lt (len1, len2))
4130 len = len1;
4131 else
4132 len = len2;
4133
4134 /* If both arguments have side effects, we cannot optimize. */
4135 if (!len || TREE_SIDE_EFFECTS (len))
4136 goto do_libcall;
4137
4138 arg3_rtx = expand_normal (len);
4139
4140 /* Make a place to write the result of the instruction. */
4141 result = target;
4142 if (! (result != 0
4143 && REG_P (result) && GET_MODE (result) == insn_mode
4144 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4145 result = gen_reg_rtx (insn_mode);
4146
4147 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4148 GEN_INT (MIN (arg1_align, arg2_align)));
4149 }
4150 #endif
4151
4152 if (insn)
4153 {
4154 machine_mode mode;
4155 emit_insn (insn);
4156
4157 /* Return the value in the proper mode for this function. */
4158 mode = TYPE_MODE (TREE_TYPE (exp));
4159 if (GET_MODE (result) == mode)
4160 return result;
4161 if (target == 0)
4162 return convert_to_mode (mode, result, 0);
4163 convert_move (target, result, 0);
4164 return target;
4165 }
4166
4167 /* Expand the library call ourselves using a stabilized argument
4168 list to avoid re-evaluating the function's arguments twice. */
4169 #ifdef HAVE_cmpstrnsi
4170 do_libcall:
4171 #endif
4172 fndecl = get_callee_fndecl (exp);
4173 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4174 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4175 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4176 return expand_call (fn, target, target == const0_rtx);
4177 }
4178 #endif
4179 return NULL_RTX;
4180 }
4181
4182 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4183 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4184 the result in TARGET, if convenient. */
4185
4186 static rtx
4187 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4188 ATTRIBUTE_UNUSED machine_mode mode)
4189 {
4190 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4191
4192 if (!validate_arglist (exp,
4193 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4194 return NULL_RTX;
4195
4196 /* If c_strlen can determine an expression for one of the string
4197 lengths, and it doesn't have side effects, then emit cmpstrnsi
4198 using length MIN(strlen(string)+1, arg3). */
4199 #ifdef HAVE_cmpstrnsi
4200 if (HAVE_cmpstrnsi)
4201 {
4202 tree len, len1, len2;
4203 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4204 rtx result, insn;
4205 tree fndecl, fn;
4206 tree arg1 = CALL_EXPR_ARG (exp, 0);
4207 tree arg2 = CALL_EXPR_ARG (exp, 1);
4208 tree arg3 = CALL_EXPR_ARG (exp, 2);
4209
4210 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4211 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4212 machine_mode insn_mode
4213 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4214
4215 len1 = c_strlen (arg1, 1);
4216 len2 = c_strlen (arg2, 1);
4217
4218 if (len1)
4219 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4220 if (len2)
4221 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4222
4223 /* If we don't have a constant length for the first, use the length
4224 of the second, if we know it. We don't require a constant for
4225 this case; some cost analysis could be done if both are available
4226 but neither is constant. For now, assume they're equally cheap,
4227 unless one has side effects. If both strings have constant lengths,
4228 use the smaller. */
4229
4230 if (!len1)
4231 len = len2;
4232 else if (!len2)
4233 len = len1;
4234 else if (TREE_SIDE_EFFECTS (len1))
4235 len = len2;
4236 else if (TREE_SIDE_EFFECTS (len2))
4237 len = len1;
4238 else if (TREE_CODE (len1) != INTEGER_CST)
4239 len = len2;
4240 else if (TREE_CODE (len2) != INTEGER_CST)
4241 len = len1;
4242 else if (tree_int_cst_lt (len1, len2))
4243 len = len1;
4244 else
4245 len = len2;
4246
4247 /* If both arguments have side effects, we cannot optimize. */
4248 if (!len || TREE_SIDE_EFFECTS (len))
4249 return NULL_RTX;
4250
4251 /* The actual new length parameter is MIN(len,arg3). */
4252 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4253 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4254
4255 /* If we don't have POINTER_TYPE, call the function. */
4256 if (arg1_align == 0 || arg2_align == 0)
4257 return NULL_RTX;
4258
4259 /* Make a place to write the result of the instruction. */
4260 result = target;
4261 if (! (result != 0
4262 && REG_P (result) && GET_MODE (result) == insn_mode
4263 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4264 result = gen_reg_rtx (insn_mode);
4265
4266 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4267 arg1 = builtin_save_expr (arg1);
4268 arg2 = builtin_save_expr (arg2);
4269 len = builtin_save_expr (len);
4270
4271 arg1_rtx = get_memory_rtx (arg1, len);
4272 arg2_rtx = get_memory_rtx (arg2, len);
4273 arg3_rtx = expand_normal (len);
4274 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4275 GEN_INT (MIN (arg1_align, arg2_align)));
4276 if (insn)
4277 {
4278 emit_insn (insn);
4279
4280 /* Return the value in the proper mode for this function. */
4281 mode = TYPE_MODE (TREE_TYPE (exp));
4282 if (GET_MODE (result) == mode)
4283 return result;
4284 if (target == 0)
4285 return convert_to_mode (mode, result, 0);
4286 convert_move (target, result, 0);
4287 return target;
4288 }
4289
4290 /* Expand the library call ourselves using a stabilized argument
4291 list to avoid re-evaluating the function's arguments twice. */
4292 fndecl = get_callee_fndecl (exp);
4293 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4294 arg1, arg2, len);
4295 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4296 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4297 return expand_call (fn, target, target == const0_rtx);
4298 }
4299 #endif
4300 return NULL_RTX;
4301 }
4302
4303 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4304 if that's convenient. */
4305
4306 rtx
4307 expand_builtin_saveregs (void)
4308 {
4309 rtx val;
4310 rtx_insn *seq;
4311
4312 /* Don't do __builtin_saveregs more than once in a function.
4313 Save the result of the first call and reuse it. */
4314 if (saveregs_value != 0)
4315 return saveregs_value;
4316
4317 /* When this function is called, it means that registers must be
4318 saved on entry to this function. So we migrate the call to the
4319 first insn of this function. */
4320
4321 start_sequence ();
4322
4323 /* Do whatever the machine needs done in this case. */
4324 val = targetm.calls.expand_builtin_saveregs ();
4325
4326 seq = get_insns ();
4327 end_sequence ();
4328
4329 saveregs_value = val;
4330
4331 /* Put the insns after the NOTE that starts the function. If this
4332 is inside a start_sequence, make the outer-level insn chain current, so
4333 the code is placed at the start of the function. */
4334 push_topmost_sequence ();
4335 emit_insn_after (seq, entry_of_function ());
4336 pop_topmost_sequence ();
4337
4338 return val;
4339 }
4340
4341 /* Expand a call to __builtin_next_arg. */
4342
4343 static rtx
4344 expand_builtin_next_arg (void)
4345 {
4346 /* Checking arguments is already done in fold_builtin_next_arg
4347 that must be called before this function. */
4348 return expand_binop (ptr_mode, add_optab,
4349 crtl->args.internal_arg_pointer,
4350 crtl->args.arg_offset_rtx,
4351 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4352 }
4353
4354 /* Make it easier for the backends by protecting the valist argument
4355 from multiple evaluations. */
4356
4357 static tree
4358 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4359 {
4360 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4361
4362 /* The current way of determining the type of valist is completely
4363 bogus. We should have the information on the va builtin instead. */
4364 if (!vatype)
4365 vatype = targetm.fn_abi_va_list (cfun->decl);
4366
4367 if (TREE_CODE (vatype) == ARRAY_TYPE)
4368 {
4369 if (TREE_SIDE_EFFECTS (valist))
4370 valist = save_expr (valist);
4371
4372 /* For this case, the backends will be expecting a pointer to
4373 vatype, but it's possible we've actually been given an array
4374 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4375 So fix it. */
4376 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4377 {
4378 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4379 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4380 }
4381 }
4382 else
4383 {
4384 tree pt = build_pointer_type (vatype);
4385
4386 if (! needs_lvalue)
4387 {
4388 if (! TREE_SIDE_EFFECTS (valist))
4389 return valist;
4390
4391 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4392 TREE_SIDE_EFFECTS (valist) = 1;
4393 }
4394
4395 if (TREE_SIDE_EFFECTS (valist))
4396 valist = save_expr (valist);
4397 valist = fold_build2_loc (loc, MEM_REF,
4398 vatype, valist, build_int_cst (pt, 0));
4399 }
4400
4401 return valist;
4402 }
4403
4404 /* The "standard" definition of va_list is void*. */
4405
4406 tree
4407 std_build_builtin_va_list (void)
4408 {
4409 return ptr_type_node;
4410 }
4411
4412 /* The "standard" abi va_list is va_list_type_node. */
4413
4414 tree
4415 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4416 {
4417 return va_list_type_node;
4418 }
4419
4420 /* The "standard" type of va_list is va_list_type_node. */
4421
4422 tree
4423 std_canonical_va_list_type (tree type)
4424 {
4425 tree wtype, htype;
4426
4427 if (INDIRECT_REF_P (type))
4428 type = TREE_TYPE (type);
4429 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4430 type = TREE_TYPE (type);
4431 wtype = va_list_type_node;
4432 htype = type;
4433 /* Treat structure va_list types. */
4434 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4435 htype = TREE_TYPE (htype);
4436 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4437 {
4438 /* If va_list is an array type, the argument may have decayed
4439 to a pointer type, e.g. by being passed to another function.
4440 In that case, unwrap both types so that we can compare the
4441 underlying records. */
4442 if (TREE_CODE (htype) == ARRAY_TYPE
4443 || POINTER_TYPE_P (htype))
4444 {
4445 wtype = TREE_TYPE (wtype);
4446 htype = TREE_TYPE (htype);
4447 }
4448 }
4449 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4450 return va_list_type_node;
4451
4452 return NULL_TREE;
4453 }
4454
4455 /* The "standard" implementation of va_start: just assign `nextarg' to
4456 the variable. */
4457
4458 void
4459 std_expand_builtin_va_start (tree valist, rtx nextarg)
4460 {
4461 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4462 convert_move (va_r, nextarg, 0);
4463
4464 /* We do not have any valid bounds for the pointer, so
4465 just store zero bounds for it. */
4466 if (chkp_function_instrumented_p (current_function_decl))
4467 chkp_expand_bounds_reset_for_mem (valist,
4468 make_tree (TREE_TYPE (valist),
4469 nextarg));
4470 }
4471
4472 /* Expand EXP, a call to __builtin_va_start. */
4473
4474 static rtx
4475 expand_builtin_va_start (tree exp)
4476 {
4477 rtx nextarg;
4478 tree valist;
4479 location_t loc = EXPR_LOCATION (exp);
4480
4481 if (call_expr_nargs (exp) < 2)
4482 {
4483 error_at (loc, "too few arguments to function %<va_start%>");
4484 return const0_rtx;
4485 }
4486
4487 if (fold_builtin_next_arg (exp, true))
4488 return const0_rtx;
4489
4490 nextarg = expand_builtin_next_arg ();
4491 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4492
4493 if (targetm.expand_builtin_va_start)
4494 targetm.expand_builtin_va_start (valist, nextarg);
4495 else
4496 std_expand_builtin_va_start (valist, nextarg);
4497
4498 return const0_rtx;
4499 }
4500
4501 /* Expand EXP, a call to __builtin_va_end. */
4502
4503 static rtx
4504 expand_builtin_va_end (tree exp)
4505 {
4506 tree valist = CALL_EXPR_ARG (exp, 0);
4507
4508 /* Evaluate for side effects, if needed. I hate macros that don't
4509 do that. */
4510 if (TREE_SIDE_EFFECTS (valist))
4511 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4512
4513 return const0_rtx;
4514 }
4515
4516 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4517 builtin rather than just as an assignment in stdarg.h because of the
4518 nastiness of array-type va_list types. */
4519
4520 static rtx
4521 expand_builtin_va_copy (tree exp)
4522 {
4523 tree dst, src, t;
4524 location_t loc = EXPR_LOCATION (exp);
4525
4526 dst = CALL_EXPR_ARG (exp, 0);
4527 src = CALL_EXPR_ARG (exp, 1);
4528
4529 dst = stabilize_va_list_loc (loc, dst, 1);
4530 src = stabilize_va_list_loc (loc, src, 0);
4531
4532 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4533
4534 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4535 {
4536 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4537 TREE_SIDE_EFFECTS (t) = 1;
4538 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4539 }
4540 else
4541 {
4542 rtx dstb, srcb, size;
4543
4544 /* Evaluate to pointers. */
4545 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4546 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4547 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4548 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4549
4550 dstb = convert_memory_address (Pmode, dstb);
4551 srcb = convert_memory_address (Pmode, srcb);
4552
4553 /* "Dereference" to BLKmode memories. */
4554 dstb = gen_rtx_MEM (BLKmode, dstb);
4555 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4556 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4557 srcb = gen_rtx_MEM (BLKmode, srcb);
4558 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4559 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4560
4561 /* Copy. */
4562 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4563 }
4564
4565 return const0_rtx;
4566 }
4567
4568 /* Expand a call to one of the builtin functions __builtin_frame_address or
4569 __builtin_return_address. */
4570
4571 static rtx
4572 expand_builtin_frame_address (tree fndecl, tree exp)
4573 {
4574 /* The argument must be a nonnegative integer constant.
4575 It counts the number of frames to scan up the stack.
4576 The value is the return address saved in that frame. */
4577 if (call_expr_nargs (exp) == 0)
4578 /* Warning about missing arg was already issued. */
4579 return const0_rtx;
4580 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4581 {
4582 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4583 error ("invalid argument to %<__builtin_frame_address%>");
4584 else
4585 error ("invalid argument to %<__builtin_return_address%>");
4586 return const0_rtx;
4587 }
4588 else
4589 {
4590 rtx tem
4591 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4592 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4593
4594 /* Some ports cannot access arbitrary stack frames. */
4595 if (tem == NULL)
4596 {
4597 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4598 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4599 else
4600 warning (0, "unsupported argument to %<__builtin_return_address%>");
4601 return const0_rtx;
4602 }
4603
4604 /* For __builtin_frame_address, return what we've got. */
4605 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4606 return tem;
4607
4608 if (!REG_P (tem)
4609 && ! CONSTANT_P (tem))
4610 tem = copy_addr_to_reg (tem);
4611 return tem;
4612 }
4613 }
4614
4615 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4616 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4617 is the same as for allocate_dynamic_stack_space. */
4618
4619 static rtx
4620 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4621 {
4622 rtx op0;
4623 rtx result;
4624 bool valid_arglist;
4625 unsigned int align;
4626 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4627 == BUILT_IN_ALLOCA_WITH_ALIGN);
4628
4629 valid_arglist
4630 = (alloca_with_align
4631 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4632 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4633
4634 if (!valid_arglist)
4635 return NULL_RTX;
4636
4637 /* Compute the argument. */
4638 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4639
4640 /* Compute the alignment. */
4641 align = (alloca_with_align
4642 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4643 : BIGGEST_ALIGNMENT);
4644
4645 /* Allocate the desired space. */
4646 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4647 result = convert_memory_address (ptr_mode, result);
4648
4649 return result;
4650 }
4651
4652 /* Expand a call to bswap builtin in EXP.
4653 Return NULL_RTX if a normal call should be emitted rather than expanding the
4654 function in-line. If convenient, the result should be placed in TARGET.
4655 SUBTARGET may be used as the target for computing one of EXP's operands. */
4656
4657 static rtx
4658 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4659 rtx subtarget)
4660 {
4661 tree arg;
4662 rtx op0;
4663
4664 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4665 return NULL_RTX;
4666
4667 arg = CALL_EXPR_ARG (exp, 0);
4668 op0 = expand_expr (arg,
4669 subtarget && GET_MODE (subtarget) == target_mode
4670 ? subtarget : NULL_RTX,
4671 target_mode, EXPAND_NORMAL);
4672 if (GET_MODE (op0) != target_mode)
4673 op0 = convert_to_mode (target_mode, op0, 1);
4674
4675 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4676
4677 gcc_assert (target);
4678
4679 return convert_to_mode (target_mode, target, 1);
4680 }
4681
4682 /* Expand a call to a unary builtin in EXP.
4683 Return NULL_RTX if a normal call should be emitted rather than expanding the
4684 function in-line. If convenient, the result should be placed in TARGET.
4685 SUBTARGET may be used as the target for computing one of EXP's operands. */
4686
4687 static rtx
4688 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4689 rtx subtarget, optab op_optab)
4690 {
4691 rtx op0;
4692
4693 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4694 return NULL_RTX;
4695
4696 /* Compute the argument. */
4697 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4698 (subtarget
4699 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4700 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4701 VOIDmode, EXPAND_NORMAL);
4702 /* Compute op, into TARGET if possible.
4703 Set TARGET to wherever the result comes back. */
4704 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4705 op_optab, op0, target, op_optab != clrsb_optab);
4706 gcc_assert (target);
4707
4708 return convert_to_mode (target_mode, target, 0);
4709 }
4710
4711 /* Expand a call to __builtin_expect. We just return our argument
4712 as the builtin_expect semantic should've been already executed by
4713 tree branch prediction pass. */
4714
4715 static rtx
4716 expand_builtin_expect (tree exp, rtx target)
4717 {
4718 tree arg;
4719
4720 if (call_expr_nargs (exp) < 2)
4721 return const0_rtx;
4722 arg = CALL_EXPR_ARG (exp, 0);
4723
4724 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4725 /* When guessing was done, the hints should be already stripped away. */
4726 gcc_assert (!flag_guess_branch_prob
4727 || optimize == 0 || seen_error ());
4728 return target;
4729 }
4730
4731 /* Expand a call to __builtin_assume_aligned. We just return our first
4732 argument as the builtin_assume_aligned semantic should've been already
4733 executed by CCP. */
4734
4735 static rtx
4736 expand_builtin_assume_aligned (tree exp, rtx target)
4737 {
4738 if (call_expr_nargs (exp) < 2)
4739 return const0_rtx;
4740 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4741 EXPAND_NORMAL);
4742 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4743 && (call_expr_nargs (exp) < 3
4744 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4745 return target;
4746 }
4747
4748 void
4749 expand_builtin_trap (void)
4750 {
4751 #ifdef HAVE_trap
4752 if (HAVE_trap)
4753 {
4754 rtx insn = emit_insn (gen_trap ());
4755 /* For trap insns when not accumulating outgoing args force
4756 REG_ARGS_SIZE note to prevent crossjumping of calls with
4757 different args sizes. */
4758 if (!ACCUMULATE_OUTGOING_ARGS)
4759 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4760 }
4761 else
4762 #endif
4763 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4764 emit_barrier ();
4765 }
4766
4767 /* Expand a call to __builtin_unreachable. We do nothing except emit
4768 a barrier saying that control flow will not pass here.
4769
4770 It is the responsibility of the program being compiled to ensure
4771 that control flow does never reach __builtin_unreachable. */
4772 static void
4773 expand_builtin_unreachable (void)
4774 {
4775 emit_barrier ();
4776 }
4777
4778 /* Expand EXP, a call to fabs, fabsf or fabsl.
4779 Return NULL_RTX if a normal call should be emitted rather than expanding
4780 the function inline. If convenient, the result should be placed
4781 in TARGET. SUBTARGET may be used as the target for computing
4782 the operand. */
4783
4784 static rtx
4785 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4786 {
4787 machine_mode mode;
4788 tree arg;
4789 rtx op0;
4790
4791 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4792 return NULL_RTX;
4793
4794 arg = CALL_EXPR_ARG (exp, 0);
4795 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4796 mode = TYPE_MODE (TREE_TYPE (arg));
4797 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4798 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4799 }
4800
4801 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4802 Return NULL is a normal call should be emitted rather than expanding the
4803 function inline. If convenient, the result should be placed in TARGET.
4804 SUBTARGET may be used as the target for computing the operand. */
4805
4806 static rtx
4807 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4808 {
4809 rtx op0, op1;
4810 tree arg;
4811
4812 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4813 return NULL_RTX;
4814
4815 arg = CALL_EXPR_ARG (exp, 0);
4816 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4817
4818 arg = CALL_EXPR_ARG (exp, 1);
4819 op1 = expand_normal (arg);
4820
4821 return expand_copysign (op0, op1, target);
4822 }
4823
4824 /* Expand a call to __builtin___clear_cache. */
4825
4826 static rtx
4827 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4828 {
4829 #ifndef HAVE_clear_cache
4830 #ifdef CLEAR_INSN_CACHE
4831 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4832 does something. Just do the default expansion to a call to
4833 __clear_cache(). */
4834 return NULL_RTX;
4835 #else
4836 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4837 does nothing. There is no need to call it. Do nothing. */
4838 return const0_rtx;
4839 #endif /* CLEAR_INSN_CACHE */
4840 #else
4841 /* We have a "clear_cache" insn, and it will handle everything. */
4842 tree begin, end;
4843 rtx begin_rtx, end_rtx;
4844
4845 /* We must not expand to a library call. If we did, any
4846 fallback library function in libgcc that might contain a call to
4847 __builtin___clear_cache() would recurse infinitely. */
4848 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4849 {
4850 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4851 return const0_rtx;
4852 }
4853
4854 if (HAVE_clear_cache)
4855 {
4856 struct expand_operand ops[2];
4857
4858 begin = CALL_EXPR_ARG (exp, 0);
4859 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4860
4861 end = CALL_EXPR_ARG (exp, 1);
4862 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4863
4864 create_address_operand (&ops[0], begin_rtx);
4865 create_address_operand (&ops[1], end_rtx);
4866 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4867 return const0_rtx;
4868 }
4869 return const0_rtx;
4870 #endif /* HAVE_clear_cache */
4871 }
4872
4873 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4874
4875 static rtx
4876 round_trampoline_addr (rtx tramp)
4877 {
4878 rtx temp, addend, mask;
4879
4880 /* If we don't need too much alignment, we'll have been guaranteed
4881 proper alignment by get_trampoline_type. */
4882 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4883 return tramp;
4884
4885 /* Round address up to desired boundary. */
4886 temp = gen_reg_rtx (Pmode);
4887 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4888 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4889
4890 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4891 temp, 0, OPTAB_LIB_WIDEN);
4892 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4893 temp, 0, OPTAB_LIB_WIDEN);
4894
4895 return tramp;
4896 }
4897
4898 static rtx
4899 expand_builtin_init_trampoline (tree exp, bool onstack)
4900 {
4901 tree t_tramp, t_func, t_chain;
4902 rtx m_tramp, r_tramp, r_chain, tmp;
4903
4904 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4905 POINTER_TYPE, VOID_TYPE))
4906 return NULL_RTX;
4907
4908 t_tramp = CALL_EXPR_ARG (exp, 0);
4909 t_func = CALL_EXPR_ARG (exp, 1);
4910 t_chain = CALL_EXPR_ARG (exp, 2);
4911
4912 r_tramp = expand_normal (t_tramp);
4913 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4914 MEM_NOTRAP_P (m_tramp) = 1;
4915
4916 /* If ONSTACK, the TRAMP argument should be the address of a field
4917 within the local function's FRAME decl. Either way, let's see if
4918 we can fill in the MEM_ATTRs for this memory. */
4919 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4920 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4921
4922 /* Creator of a heap trampoline is responsible for making sure the
4923 address is aligned to at least STACK_BOUNDARY. Normally malloc
4924 will ensure this anyhow. */
4925 tmp = round_trampoline_addr (r_tramp);
4926 if (tmp != r_tramp)
4927 {
4928 m_tramp = change_address (m_tramp, BLKmode, tmp);
4929 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4930 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4931 }
4932
4933 /* The FUNC argument should be the address of the nested function.
4934 Extract the actual function decl to pass to the hook. */
4935 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4936 t_func = TREE_OPERAND (t_func, 0);
4937 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4938
4939 r_chain = expand_normal (t_chain);
4940
4941 /* Generate insns to initialize the trampoline. */
4942 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4943
4944 if (onstack)
4945 {
4946 trampolines_created = 1;
4947
4948 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4949 "trampoline generated for nested function %qD", t_func);
4950 }
4951
4952 return const0_rtx;
4953 }
4954
4955 static rtx
4956 expand_builtin_adjust_trampoline (tree exp)
4957 {
4958 rtx tramp;
4959
4960 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4961 return NULL_RTX;
4962
4963 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4964 tramp = round_trampoline_addr (tramp);
4965 if (targetm.calls.trampoline_adjust_address)
4966 tramp = targetm.calls.trampoline_adjust_address (tramp);
4967
4968 return tramp;
4969 }
4970
4971 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4972 function. The function first checks whether the back end provides
4973 an insn to implement signbit for the respective mode. If not, it
4974 checks whether the floating point format of the value is such that
4975 the sign bit can be extracted. If that is not the case, the
4976 function returns NULL_RTX to indicate that a normal call should be
4977 emitted rather than expanding the function in-line. EXP is the
4978 expression that is a call to the builtin function; if convenient,
4979 the result should be placed in TARGET. */
4980 static rtx
4981 expand_builtin_signbit (tree exp, rtx target)
4982 {
4983 const struct real_format *fmt;
4984 machine_mode fmode, imode, rmode;
4985 tree arg;
4986 int word, bitpos;
4987 enum insn_code icode;
4988 rtx temp;
4989 location_t loc = EXPR_LOCATION (exp);
4990
4991 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4992 return NULL_RTX;
4993
4994 arg = CALL_EXPR_ARG (exp, 0);
4995 fmode = TYPE_MODE (TREE_TYPE (arg));
4996 rmode = TYPE_MODE (TREE_TYPE (exp));
4997 fmt = REAL_MODE_FORMAT (fmode);
4998
4999 arg = builtin_save_expr (arg);
5000
5001 /* Expand the argument yielding a RTX expression. */
5002 temp = expand_normal (arg);
5003
5004 /* Check if the back end provides an insn that handles signbit for the
5005 argument's mode. */
5006 icode = optab_handler (signbit_optab, fmode);
5007 if (icode != CODE_FOR_nothing)
5008 {
5009 rtx_insn *last = get_last_insn ();
5010 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5011 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5012 return target;
5013 delete_insns_since (last);
5014 }
5015
5016 /* For floating point formats without a sign bit, implement signbit
5017 as "ARG < 0.0". */
5018 bitpos = fmt->signbit_ro;
5019 if (bitpos < 0)
5020 {
5021 /* But we can't do this if the format supports signed zero. */
5022 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5023 return NULL_RTX;
5024
5025 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5026 build_real (TREE_TYPE (arg), dconst0));
5027 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5028 }
5029
5030 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5031 {
5032 imode = int_mode_for_mode (fmode);
5033 if (imode == BLKmode)
5034 return NULL_RTX;
5035 temp = gen_lowpart (imode, temp);
5036 }
5037 else
5038 {
5039 imode = word_mode;
5040 /* Handle targets with different FP word orders. */
5041 if (FLOAT_WORDS_BIG_ENDIAN)
5042 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5043 else
5044 word = bitpos / BITS_PER_WORD;
5045 temp = operand_subword_force (temp, word, fmode);
5046 bitpos = bitpos % BITS_PER_WORD;
5047 }
5048
5049 /* Force the intermediate word_mode (or narrower) result into a
5050 register. This avoids attempting to create paradoxical SUBREGs
5051 of floating point modes below. */
5052 temp = force_reg (imode, temp);
5053
5054 /* If the bitpos is within the "result mode" lowpart, the operation
5055 can be implement with a single bitwise AND. Otherwise, we need
5056 a right shift and an AND. */
5057
5058 if (bitpos < GET_MODE_BITSIZE (rmode))
5059 {
5060 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5061
5062 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5063 temp = gen_lowpart (rmode, temp);
5064 temp = expand_binop (rmode, and_optab, temp,
5065 immed_wide_int_const (mask, rmode),
5066 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5067 }
5068 else
5069 {
5070 /* Perform a logical right shift to place the signbit in the least
5071 significant bit, then truncate the result to the desired mode
5072 and mask just this bit. */
5073 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5074 temp = gen_lowpart (rmode, temp);
5075 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5076 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5077 }
5078
5079 return temp;
5080 }
5081
5082 /* Expand fork or exec calls. TARGET is the desired target of the
5083 call. EXP is the call. FN is the
5084 identificator of the actual function. IGNORE is nonzero if the
5085 value is to be ignored. */
5086
5087 static rtx
5088 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5089 {
5090 tree id, decl;
5091 tree call;
5092
5093 /* If we are not profiling, just call the function. */
5094 if (!profile_arc_flag)
5095 return NULL_RTX;
5096
5097 /* Otherwise call the wrapper. This should be equivalent for the rest of
5098 compiler, so the code does not diverge, and the wrapper may run the
5099 code necessary for keeping the profiling sane. */
5100
5101 switch (DECL_FUNCTION_CODE (fn))
5102 {
5103 case BUILT_IN_FORK:
5104 id = get_identifier ("__gcov_fork");
5105 break;
5106
5107 case BUILT_IN_EXECL:
5108 id = get_identifier ("__gcov_execl");
5109 break;
5110
5111 case BUILT_IN_EXECV:
5112 id = get_identifier ("__gcov_execv");
5113 break;
5114
5115 case BUILT_IN_EXECLP:
5116 id = get_identifier ("__gcov_execlp");
5117 break;
5118
5119 case BUILT_IN_EXECLE:
5120 id = get_identifier ("__gcov_execle");
5121 break;
5122
5123 case BUILT_IN_EXECVP:
5124 id = get_identifier ("__gcov_execvp");
5125 break;
5126
5127 case BUILT_IN_EXECVE:
5128 id = get_identifier ("__gcov_execve");
5129 break;
5130
5131 default:
5132 gcc_unreachable ();
5133 }
5134
5135 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5136 FUNCTION_DECL, id, TREE_TYPE (fn));
5137 DECL_EXTERNAL (decl) = 1;
5138 TREE_PUBLIC (decl) = 1;
5139 DECL_ARTIFICIAL (decl) = 1;
5140 TREE_NOTHROW (decl) = 1;
5141 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5142 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5143 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5144 return expand_call (call, target, ignore);
5145 }
5146
5147
5148 \f
5149 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5150 the pointer in these functions is void*, the tree optimizers may remove
5151 casts. The mode computed in expand_builtin isn't reliable either, due
5152 to __sync_bool_compare_and_swap.
5153
5154 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5155 group of builtins. This gives us log2 of the mode size. */
5156
5157 static inline machine_mode
5158 get_builtin_sync_mode (int fcode_diff)
5159 {
5160 /* The size is not negotiable, so ask not to get BLKmode in return
5161 if the target indicates that a smaller size would be better. */
5162 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5163 }
5164
5165 /* Expand the memory expression LOC and return the appropriate memory operand
5166 for the builtin_sync operations. */
5167
5168 static rtx
5169 get_builtin_sync_mem (tree loc, machine_mode mode)
5170 {
5171 rtx addr, mem;
5172
5173 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5174 addr = convert_memory_address (Pmode, addr);
5175
5176 /* Note that we explicitly do not want any alias information for this
5177 memory, so that we kill all other live memories. Otherwise we don't
5178 satisfy the full barrier semantics of the intrinsic. */
5179 mem = validize_mem (gen_rtx_MEM (mode, addr));
5180
5181 /* The alignment needs to be at least according to that of the mode. */
5182 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5183 get_pointer_alignment (loc)));
5184 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5185 MEM_VOLATILE_P (mem) = 1;
5186
5187 return mem;
5188 }
5189
5190 /* Make sure an argument is in the right mode.
5191 EXP is the tree argument.
5192 MODE is the mode it should be in. */
5193
5194 static rtx
5195 expand_expr_force_mode (tree exp, machine_mode mode)
5196 {
5197 rtx val;
5198 machine_mode old_mode;
5199
5200 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5201 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5202 of CONST_INTs, where we know the old_mode only from the call argument. */
5203
5204 old_mode = GET_MODE (val);
5205 if (old_mode == VOIDmode)
5206 old_mode = TYPE_MODE (TREE_TYPE (exp));
5207 val = convert_modes (mode, old_mode, val, 1);
5208 return val;
5209 }
5210
5211
5212 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5213 EXP is the CALL_EXPR. CODE is the rtx code
5214 that corresponds to the arithmetic or logical operation from the name;
5215 an exception here is that NOT actually means NAND. TARGET is an optional
5216 place for us to store the results; AFTER is true if this is the
5217 fetch_and_xxx form. */
5218
5219 static rtx
5220 expand_builtin_sync_operation (machine_mode mode, tree exp,
5221 enum rtx_code code, bool after,
5222 rtx target)
5223 {
5224 rtx val, mem;
5225 location_t loc = EXPR_LOCATION (exp);
5226
5227 if (code == NOT && warn_sync_nand)
5228 {
5229 tree fndecl = get_callee_fndecl (exp);
5230 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5231
5232 static bool warned_f_a_n, warned_n_a_f;
5233
5234 switch (fcode)
5235 {
5236 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5237 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5238 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5239 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5240 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5241 if (warned_f_a_n)
5242 break;
5243
5244 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5245 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5246 warned_f_a_n = true;
5247 break;
5248
5249 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5250 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5251 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5252 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5253 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5254 if (warned_n_a_f)
5255 break;
5256
5257 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5258 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5259 warned_n_a_f = true;
5260 break;
5261
5262 default:
5263 gcc_unreachable ();
5264 }
5265 }
5266
5267 /* Expand the operands. */
5268 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5269 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5270
5271 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5272 after);
5273 }
5274
5275 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5276 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5277 true if this is the boolean form. TARGET is a place for us to store the
5278 results; this is NOT optional if IS_BOOL is true. */
5279
5280 static rtx
5281 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5282 bool is_bool, rtx target)
5283 {
5284 rtx old_val, new_val, mem;
5285 rtx *pbool, *poval;
5286
5287 /* Expand the operands. */
5288 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5289 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5290 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5291
5292 pbool = poval = NULL;
5293 if (target != const0_rtx)
5294 {
5295 if (is_bool)
5296 pbool = &target;
5297 else
5298 poval = &target;
5299 }
5300 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5301 false, MEMMODEL_SEQ_CST,
5302 MEMMODEL_SEQ_CST))
5303 return NULL_RTX;
5304
5305 return target;
5306 }
5307
5308 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5309 general form is actually an atomic exchange, and some targets only
5310 support a reduced form with the second argument being a constant 1.
5311 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5312 the results. */
5313
5314 static rtx
5315 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5316 rtx target)
5317 {
5318 rtx val, mem;
5319
5320 /* Expand the operands. */
5321 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5322 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5323
5324 return expand_sync_lock_test_and_set (target, mem, val);
5325 }
5326
5327 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5328
5329 static void
5330 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5331 {
5332 rtx mem;
5333
5334 /* Expand the operands. */
5335 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5336
5337 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5338 }
5339
5340 /* Given an integer representing an ``enum memmodel'', verify its
5341 correctness and return the memory model enum. */
5342
5343 static enum memmodel
5344 get_memmodel (tree exp)
5345 {
5346 rtx op;
5347 unsigned HOST_WIDE_INT val;
5348
5349 /* If the parameter is not a constant, it's a run time value so we'll just
5350 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5351 if (TREE_CODE (exp) != INTEGER_CST)
5352 return MEMMODEL_SEQ_CST;
5353
5354 op = expand_normal (exp);
5355
5356 val = INTVAL (op);
5357 if (targetm.memmodel_check)
5358 val = targetm.memmodel_check (val);
5359 else if (val & ~MEMMODEL_MASK)
5360 {
5361 warning (OPT_Winvalid_memory_model,
5362 "Unknown architecture specifier in memory model to builtin.");
5363 return MEMMODEL_SEQ_CST;
5364 }
5365
5366 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5367 {
5368 warning (OPT_Winvalid_memory_model,
5369 "invalid memory model argument to builtin");
5370 return MEMMODEL_SEQ_CST;
5371 }
5372
5373 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5374 be conservative and promote consume to acquire. */
5375 if (val == MEMMODEL_CONSUME)
5376 val = MEMMODEL_ACQUIRE;
5377
5378 return (enum memmodel) val;
5379 }
5380
5381 /* Expand the __atomic_exchange intrinsic:
5382 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5383 EXP is the CALL_EXPR.
5384 TARGET is an optional place for us to store the results. */
5385
5386 static rtx
5387 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5388 {
5389 rtx val, mem;
5390 enum memmodel model;
5391
5392 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5393
5394 if (!flag_inline_atomics)
5395 return NULL_RTX;
5396
5397 /* Expand the operands. */
5398 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5399 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5400
5401 return expand_atomic_exchange (target, mem, val, model);
5402 }
5403
5404 /* Expand the __atomic_compare_exchange intrinsic:
5405 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5406 TYPE desired, BOOL weak,
5407 enum memmodel success,
5408 enum memmodel failure)
5409 EXP is the CALL_EXPR.
5410 TARGET is an optional place for us to store the results. */
5411
5412 static rtx
5413 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5414 rtx target)
5415 {
5416 rtx expect, desired, mem, oldval;
5417 rtx_code_label *label;
5418 enum memmodel success, failure;
5419 tree weak;
5420 bool is_weak;
5421
5422 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5423 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5424
5425 if (failure > success)
5426 {
5427 warning (OPT_Winvalid_memory_model,
5428 "failure memory model cannot be stronger than success memory "
5429 "model for %<__atomic_compare_exchange%>");
5430 success = MEMMODEL_SEQ_CST;
5431 }
5432
5433 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5434 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5435 {
5436 warning (OPT_Winvalid_memory_model,
5437 "invalid failure memory model for "
5438 "%<__atomic_compare_exchange%>");
5439 failure = MEMMODEL_SEQ_CST;
5440 success = MEMMODEL_SEQ_CST;
5441 }
5442
5443
5444 if (!flag_inline_atomics)
5445 return NULL_RTX;
5446
5447 /* Expand the operands. */
5448 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5449
5450 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5451 expect = convert_memory_address (Pmode, expect);
5452 expect = gen_rtx_MEM (mode, expect);
5453 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5454
5455 weak = CALL_EXPR_ARG (exp, 3);
5456 is_weak = false;
5457 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5458 is_weak = true;
5459
5460 if (target == const0_rtx)
5461 target = NULL;
5462
5463 /* Lest the rtl backend create a race condition with an imporoper store
5464 to memory, always create a new pseudo for OLDVAL. */
5465 oldval = NULL;
5466
5467 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5468 is_weak, success, failure))
5469 return NULL_RTX;
5470
5471 /* Conditionally store back to EXPECT, lest we create a race condition
5472 with an improper store to memory. */
5473 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5474 the normal case where EXPECT is totally private, i.e. a register. At
5475 which point the store can be unconditional. */
5476 label = gen_label_rtx ();
5477 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL, VOIDmode, 1, label);
5478 emit_move_insn (expect, oldval);
5479 emit_label (label);
5480
5481 return target;
5482 }
5483
5484 /* Expand the __atomic_load intrinsic:
5485 TYPE __atomic_load (TYPE *object, enum memmodel)
5486 EXP is the CALL_EXPR.
5487 TARGET is an optional place for us to store the results. */
5488
5489 static rtx
5490 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5491 {
5492 rtx mem;
5493 enum memmodel model;
5494
5495 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5496 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5497 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5498 {
5499 warning (OPT_Winvalid_memory_model,
5500 "invalid memory model for %<__atomic_load%>");
5501 model = MEMMODEL_SEQ_CST;
5502 }
5503
5504 if (!flag_inline_atomics)
5505 return NULL_RTX;
5506
5507 /* Expand the operand. */
5508 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5509
5510 return expand_atomic_load (target, mem, model);
5511 }
5512
5513
5514 /* Expand the __atomic_store intrinsic:
5515 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5516 EXP is the CALL_EXPR.
5517 TARGET is an optional place for us to store the results. */
5518
5519 static rtx
5520 expand_builtin_atomic_store (machine_mode mode, tree exp)
5521 {
5522 rtx mem, val;
5523 enum memmodel model;
5524
5525 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5526 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5527 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5528 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5529 {
5530 warning (OPT_Winvalid_memory_model,
5531 "invalid memory model for %<__atomic_store%>");
5532 model = MEMMODEL_SEQ_CST;
5533 }
5534
5535 if (!flag_inline_atomics)
5536 return NULL_RTX;
5537
5538 /* Expand the operands. */
5539 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5540 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5541
5542 return expand_atomic_store (mem, val, model, false);
5543 }
5544
5545 /* Expand the __atomic_fetch_XXX intrinsic:
5546 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5547 EXP is the CALL_EXPR.
5548 TARGET is an optional place for us to store the results.
5549 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5550 FETCH_AFTER is true if returning the result of the operation.
5551 FETCH_AFTER is false if returning the value before the operation.
5552 IGNORE is true if the result is not used.
5553 EXT_CALL is the correct builtin for an external call if this cannot be
5554 resolved to an instruction sequence. */
5555
5556 static rtx
5557 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5558 enum rtx_code code, bool fetch_after,
5559 bool ignore, enum built_in_function ext_call)
5560 {
5561 rtx val, mem, ret;
5562 enum memmodel model;
5563 tree fndecl;
5564 tree addr;
5565
5566 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5567
5568 /* Expand the operands. */
5569 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5570 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5571
5572 /* Only try generating instructions if inlining is turned on. */
5573 if (flag_inline_atomics)
5574 {
5575 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5576 if (ret)
5577 return ret;
5578 }
5579
5580 /* Return if a different routine isn't needed for the library call. */
5581 if (ext_call == BUILT_IN_NONE)
5582 return NULL_RTX;
5583
5584 /* Change the call to the specified function. */
5585 fndecl = get_callee_fndecl (exp);
5586 addr = CALL_EXPR_FN (exp);
5587 STRIP_NOPS (addr);
5588
5589 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5590 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5591
5592 /* Expand the call here so we can emit trailing code. */
5593 ret = expand_call (exp, target, ignore);
5594
5595 /* Replace the original function just in case it matters. */
5596 TREE_OPERAND (addr, 0) = fndecl;
5597
5598 /* Then issue the arithmetic correction to return the right result. */
5599 if (!ignore)
5600 {
5601 if (code == NOT)
5602 {
5603 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5604 OPTAB_LIB_WIDEN);
5605 ret = expand_simple_unop (mode, NOT, ret, target, true);
5606 }
5607 else
5608 ret = expand_simple_binop (mode, code, ret, val, target, true,
5609 OPTAB_LIB_WIDEN);
5610 }
5611 return ret;
5612 }
5613
5614
5615 #ifndef HAVE_atomic_clear
5616 # define HAVE_atomic_clear 0
5617 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5618 #endif
5619
5620 /* Expand an atomic clear operation.
5621 void _atomic_clear (BOOL *obj, enum memmodel)
5622 EXP is the call expression. */
5623
5624 static rtx
5625 expand_builtin_atomic_clear (tree exp)
5626 {
5627 machine_mode mode;
5628 rtx mem, ret;
5629 enum memmodel model;
5630
5631 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5632 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5633 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5634
5635 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME
5636 || (model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5637 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5638 {
5639 warning (OPT_Winvalid_memory_model,
5640 "invalid memory model for %<__atomic_store%>");
5641 model = MEMMODEL_SEQ_CST;
5642 }
5643
5644 if (HAVE_atomic_clear)
5645 {
5646 emit_insn (gen_atomic_clear (mem, model));
5647 return const0_rtx;
5648 }
5649
5650 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5651 Failing that, a store is issued by __atomic_store. The only way this can
5652 fail is if the bool type is larger than a word size. Unlikely, but
5653 handle it anyway for completeness. Assume a single threaded model since
5654 there is no atomic support in this case, and no barriers are required. */
5655 ret = expand_atomic_store (mem, const0_rtx, model, true);
5656 if (!ret)
5657 emit_move_insn (mem, const0_rtx);
5658 return const0_rtx;
5659 }
5660
5661 /* Expand an atomic test_and_set operation.
5662 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5663 EXP is the call expression. */
5664
5665 static rtx
5666 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5667 {
5668 rtx mem;
5669 enum memmodel model;
5670 machine_mode mode;
5671
5672 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5673 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5674 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5675
5676 return expand_atomic_test_and_set (target, mem, model);
5677 }
5678
5679
5680 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5681 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5682
5683 static tree
5684 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5685 {
5686 int size;
5687 machine_mode mode;
5688 unsigned int mode_align, type_align;
5689
5690 if (TREE_CODE (arg0) != INTEGER_CST)
5691 return NULL_TREE;
5692
5693 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5694 mode = mode_for_size (size, MODE_INT, 0);
5695 mode_align = GET_MODE_ALIGNMENT (mode);
5696
5697 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5698 type_align = mode_align;
5699 else
5700 {
5701 tree ttype = TREE_TYPE (arg1);
5702
5703 /* This function is usually invoked and folded immediately by the front
5704 end before anything else has a chance to look at it. The pointer
5705 parameter at this point is usually cast to a void *, so check for that
5706 and look past the cast. */
5707 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5708 && VOID_TYPE_P (TREE_TYPE (ttype)))
5709 arg1 = TREE_OPERAND (arg1, 0);
5710
5711 ttype = TREE_TYPE (arg1);
5712 gcc_assert (POINTER_TYPE_P (ttype));
5713
5714 /* Get the underlying type of the object. */
5715 ttype = TREE_TYPE (ttype);
5716 type_align = TYPE_ALIGN (ttype);
5717 }
5718
5719 /* If the object has smaller alignment, the the lock free routines cannot
5720 be used. */
5721 if (type_align < mode_align)
5722 return boolean_false_node;
5723
5724 /* Check if a compare_and_swap pattern exists for the mode which represents
5725 the required size. The pattern is not allowed to fail, so the existence
5726 of the pattern indicates support is present. */
5727 if (can_compare_and_swap_p (mode, true))
5728 return boolean_true_node;
5729 else
5730 return boolean_false_node;
5731 }
5732
5733 /* Return true if the parameters to call EXP represent an object which will
5734 always generate lock free instructions. The first argument represents the
5735 size of the object, and the second parameter is a pointer to the object
5736 itself. If NULL is passed for the object, then the result is based on
5737 typical alignment for an object of the specified size. Otherwise return
5738 false. */
5739
5740 static rtx
5741 expand_builtin_atomic_always_lock_free (tree exp)
5742 {
5743 tree size;
5744 tree arg0 = CALL_EXPR_ARG (exp, 0);
5745 tree arg1 = CALL_EXPR_ARG (exp, 1);
5746
5747 if (TREE_CODE (arg0) != INTEGER_CST)
5748 {
5749 error ("non-constant argument 1 to __atomic_always_lock_free");
5750 return const0_rtx;
5751 }
5752
5753 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5754 if (size == boolean_true_node)
5755 return const1_rtx;
5756 return const0_rtx;
5757 }
5758
5759 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5760 is lock free on this architecture. */
5761
5762 static tree
5763 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5764 {
5765 if (!flag_inline_atomics)
5766 return NULL_TREE;
5767
5768 /* If it isn't always lock free, don't generate a result. */
5769 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5770 return boolean_true_node;
5771
5772 return NULL_TREE;
5773 }
5774
5775 /* Return true if the parameters to call EXP represent an object which will
5776 always generate lock free instructions. The first argument represents the
5777 size of the object, and the second parameter is a pointer to the object
5778 itself. If NULL is passed for the object, then the result is based on
5779 typical alignment for an object of the specified size. Otherwise return
5780 NULL*/
5781
5782 static rtx
5783 expand_builtin_atomic_is_lock_free (tree exp)
5784 {
5785 tree size;
5786 tree arg0 = CALL_EXPR_ARG (exp, 0);
5787 tree arg1 = CALL_EXPR_ARG (exp, 1);
5788
5789 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5790 {
5791 error ("non-integer argument 1 to __atomic_is_lock_free");
5792 return NULL_RTX;
5793 }
5794
5795 if (!flag_inline_atomics)
5796 return NULL_RTX;
5797
5798 /* If the value is known at compile time, return the RTX for it. */
5799 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5800 if (size == boolean_true_node)
5801 return const1_rtx;
5802
5803 return NULL_RTX;
5804 }
5805
5806 /* Expand the __atomic_thread_fence intrinsic:
5807 void __atomic_thread_fence (enum memmodel)
5808 EXP is the CALL_EXPR. */
5809
5810 static void
5811 expand_builtin_atomic_thread_fence (tree exp)
5812 {
5813 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5814 expand_mem_thread_fence (model);
5815 }
5816
5817 /* Expand the __atomic_signal_fence intrinsic:
5818 void __atomic_signal_fence (enum memmodel)
5819 EXP is the CALL_EXPR. */
5820
5821 static void
5822 expand_builtin_atomic_signal_fence (tree exp)
5823 {
5824 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5825 expand_mem_signal_fence (model);
5826 }
5827
5828 /* Expand the __sync_synchronize intrinsic. */
5829
5830 static void
5831 expand_builtin_sync_synchronize (void)
5832 {
5833 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5834 }
5835
5836 static rtx
5837 expand_builtin_thread_pointer (tree exp, rtx target)
5838 {
5839 enum insn_code icode;
5840 if (!validate_arglist (exp, VOID_TYPE))
5841 return const0_rtx;
5842 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5843 if (icode != CODE_FOR_nothing)
5844 {
5845 struct expand_operand op;
5846 /* If the target is not sutitable then create a new target. */
5847 if (target == NULL_RTX
5848 || !REG_P (target)
5849 || GET_MODE (target) != Pmode)
5850 target = gen_reg_rtx (Pmode);
5851 create_output_operand (&op, target, Pmode);
5852 expand_insn (icode, 1, &op);
5853 return target;
5854 }
5855 error ("__builtin_thread_pointer is not supported on this target");
5856 return const0_rtx;
5857 }
5858
5859 static void
5860 expand_builtin_set_thread_pointer (tree exp)
5861 {
5862 enum insn_code icode;
5863 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5864 return;
5865 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5866 if (icode != CODE_FOR_nothing)
5867 {
5868 struct expand_operand op;
5869 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5870 Pmode, EXPAND_NORMAL);
5871 create_input_operand (&op, val, Pmode);
5872 expand_insn (icode, 1, &op);
5873 return;
5874 }
5875 error ("__builtin_set_thread_pointer is not supported on this target");
5876 }
5877
5878 \f
5879 /* Emit code to restore the current value of stack. */
5880
5881 static void
5882 expand_stack_restore (tree var)
5883 {
5884 rtx_insn *prev;
5885 rtx sa = expand_normal (var);
5886
5887 sa = convert_memory_address (Pmode, sa);
5888
5889 prev = get_last_insn ();
5890 emit_stack_restore (SAVE_BLOCK, sa);
5891 fixup_args_size_notes (prev, get_last_insn (), 0);
5892 }
5893
5894
5895 /* Emit code to save the current value of stack. */
5896
5897 static rtx
5898 expand_stack_save (void)
5899 {
5900 rtx ret = NULL_RTX;
5901
5902 do_pending_stack_adjust ();
5903 emit_stack_save (SAVE_BLOCK, &ret);
5904 return ret;
5905 }
5906
5907
5908 /* Expand OpenACC acc_on_device.
5909
5910 This has to happen late (that is, not in early folding; expand_builtin_*,
5911 rather than fold_builtin_*), as we have to act differently for host and
5912 acceleration device (ACCEL_COMPILER conditional). */
5913
5914 static rtx
5915 expand_builtin_acc_on_device (tree exp, rtx target)
5916 {
5917 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5918 return NULL_RTX;
5919
5920 tree arg = CALL_EXPR_ARG (exp, 0);
5921
5922 /* Return (arg == v1 || arg == v2) ? 1 : 0. */
5923 machine_mode v_mode = TYPE_MODE (TREE_TYPE (arg));
5924 rtx v = expand_normal (arg), v1, v2;
5925 #ifdef ACCEL_COMPILER
5926 v1 = GEN_INT (GOMP_DEVICE_NOT_HOST);
5927 v2 = GEN_INT (ACCEL_COMPILER_acc_device);
5928 #else
5929 v1 = GEN_INT (GOMP_DEVICE_NONE);
5930 v2 = GEN_INT (GOMP_DEVICE_HOST);
5931 #endif
5932 machine_mode target_mode = TYPE_MODE (integer_type_node);
5933 if (!target || !register_operand (target, target_mode))
5934 target = gen_reg_rtx (target_mode);
5935 emit_move_insn (target, const1_rtx);
5936 rtx_code_label *done_label = gen_label_rtx ();
5937 do_compare_rtx_and_jump (v, v1, EQ, false, v_mode, NULL_RTX,
5938 NULL_RTX, done_label, PROB_EVEN);
5939 do_compare_rtx_and_jump (v, v2, EQ, false, v_mode, NULL_RTX,
5940 NULL_RTX, done_label, PROB_EVEN);
5941 emit_move_insn (target, const0_rtx);
5942 emit_label (done_label);
5943
5944 return target;
5945 }
5946
5947
5948 /* Expand an expression EXP that calls a built-in function,
5949 with result going to TARGET if that's convenient
5950 (and in mode MODE if that's convenient).
5951 SUBTARGET may be used as the target for computing one of EXP's operands.
5952 IGNORE is nonzero if the value is to be ignored. */
5953
5954 rtx
5955 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5956 int ignore)
5957 {
5958 tree fndecl = get_callee_fndecl (exp);
5959 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5960 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5961 int flags;
5962
5963 /* When ASan is enabled, we don't want to expand some memory/string
5964 builtins and rely on libsanitizer's hooks. This allows us to avoid
5965 redundant checks and be sure, that possible overflow will be detected
5966 by ASan. */
5967
5968 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5969 return expand_call (exp, target, ignore);
5970
5971 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5972 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5973
5974 /* When not optimizing, generate calls to library functions for a certain
5975 set of builtins. */
5976 if (!optimize
5977 && !called_as_built_in (fndecl)
5978 && fcode != BUILT_IN_FORK
5979 && fcode != BUILT_IN_EXECL
5980 && fcode != BUILT_IN_EXECV
5981 && fcode != BUILT_IN_EXECLP
5982 && fcode != BUILT_IN_EXECLE
5983 && fcode != BUILT_IN_EXECVP
5984 && fcode != BUILT_IN_EXECVE
5985 && fcode != BUILT_IN_ALLOCA
5986 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5987 && fcode != BUILT_IN_FREE
5988 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5989 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5990 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5991 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5992 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5993 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5994 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5995 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5996 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5997 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5998 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5999 && fcode != BUILT_IN_CHKP_BNDRET)
6000 return expand_call (exp, target, ignore);
6001
6002 /* The built-in function expanders test for target == const0_rtx
6003 to determine whether the function's result will be ignored. */
6004 if (ignore)
6005 target = const0_rtx;
6006
6007 /* If the result of a pure or const built-in function is ignored, and
6008 none of its arguments are volatile, we can avoid expanding the
6009 built-in call and just evaluate the arguments for side-effects. */
6010 if (target == const0_rtx
6011 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6012 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6013 {
6014 bool volatilep = false;
6015 tree arg;
6016 call_expr_arg_iterator iter;
6017
6018 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6019 if (TREE_THIS_VOLATILE (arg))
6020 {
6021 volatilep = true;
6022 break;
6023 }
6024
6025 if (! volatilep)
6026 {
6027 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6028 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6029 return const0_rtx;
6030 }
6031 }
6032
6033 /* expand_builtin_with_bounds is supposed to be used for
6034 instrumented builtin calls. */
6035 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6036
6037 switch (fcode)
6038 {
6039 CASE_FLT_FN (BUILT_IN_FABS):
6040 case BUILT_IN_FABSD32:
6041 case BUILT_IN_FABSD64:
6042 case BUILT_IN_FABSD128:
6043 target = expand_builtin_fabs (exp, target, subtarget);
6044 if (target)
6045 return target;
6046 break;
6047
6048 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6049 target = expand_builtin_copysign (exp, target, subtarget);
6050 if (target)
6051 return target;
6052 break;
6053
6054 /* Just do a normal library call if we were unable to fold
6055 the values. */
6056 CASE_FLT_FN (BUILT_IN_CABS):
6057 break;
6058
6059 CASE_FLT_FN (BUILT_IN_EXP):
6060 CASE_FLT_FN (BUILT_IN_EXP10):
6061 CASE_FLT_FN (BUILT_IN_POW10):
6062 CASE_FLT_FN (BUILT_IN_EXP2):
6063 CASE_FLT_FN (BUILT_IN_EXPM1):
6064 CASE_FLT_FN (BUILT_IN_LOGB):
6065 CASE_FLT_FN (BUILT_IN_LOG):
6066 CASE_FLT_FN (BUILT_IN_LOG10):
6067 CASE_FLT_FN (BUILT_IN_LOG2):
6068 CASE_FLT_FN (BUILT_IN_LOG1P):
6069 CASE_FLT_FN (BUILT_IN_TAN):
6070 CASE_FLT_FN (BUILT_IN_ASIN):
6071 CASE_FLT_FN (BUILT_IN_ACOS):
6072 CASE_FLT_FN (BUILT_IN_ATAN):
6073 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
6074 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6075 because of possible accuracy problems. */
6076 if (! flag_unsafe_math_optimizations)
6077 break;
6078 CASE_FLT_FN (BUILT_IN_SQRT):
6079 CASE_FLT_FN (BUILT_IN_FLOOR):
6080 CASE_FLT_FN (BUILT_IN_CEIL):
6081 CASE_FLT_FN (BUILT_IN_TRUNC):
6082 CASE_FLT_FN (BUILT_IN_ROUND):
6083 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6084 CASE_FLT_FN (BUILT_IN_RINT):
6085 target = expand_builtin_mathfn (exp, target, subtarget);
6086 if (target)
6087 return target;
6088 break;
6089
6090 CASE_FLT_FN (BUILT_IN_FMA):
6091 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6092 if (target)
6093 return target;
6094 break;
6095
6096 CASE_FLT_FN (BUILT_IN_ILOGB):
6097 if (! flag_unsafe_math_optimizations)
6098 break;
6099 CASE_FLT_FN (BUILT_IN_ISINF):
6100 CASE_FLT_FN (BUILT_IN_FINITE):
6101 case BUILT_IN_ISFINITE:
6102 case BUILT_IN_ISNORMAL:
6103 target = expand_builtin_interclass_mathfn (exp, target);
6104 if (target)
6105 return target;
6106 break;
6107
6108 CASE_FLT_FN (BUILT_IN_ICEIL):
6109 CASE_FLT_FN (BUILT_IN_LCEIL):
6110 CASE_FLT_FN (BUILT_IN_LLCEIL):
6111 CASE_FLT_FN (BUILT_IN_LFLOOR):
6112 CASE_FLT_FN (BUILT_IN_IFLOOR):
6113 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6114 target = expand_builtin_int_roundingfn (exp, target);
6115 if (target)
6116 return target;
6117 break;
6118
6119 CASE_FLT_FN (BUILT_IN_IRINT):
6120 CASE_FLT_FN (BUILT_IN_LRINT):
6121 CASE_FLT_FN (BUILT_IN_LLRINT):
6122 CASE_FLT_FN (BUILT_IN_IROUND):
6123 CASE_FLT_FN (BUILT_IN_LROUND):
6124 CASE_FLT_FN (BUILT_IN_LLROUND):
6125 target = expand_builtin_int_roundingfn_2 (exp, target);
6126 if (target)
6127 return target;
6128 break;
6129
6130 CASE_FLT_FN (BUILT_IN_POWI):
6131 target = expand_builtin_powi (exp, target);
6132 if (target)
6133 return target;
6134 break;
6135
6136 CASE_FLT_FN (BUILT_IN_ATAN2):
6137 CASE_FLT_FN (BUILT_IN_LDEXP):
6138 CASE_FLT_FN (BUILT_IN_SCALB):
6139 CASE_FLT_FN (BUILT_IN_SCALBN):
6140 CASE_FLT_FN (BUILT_IN_SCALBLN):
6141 if (! flag_unsafe_math_optimizations)
6142 break;
6143
6144 CASE_FLT_FN (BUILT_IN_FMOD):
6145 CASE_FLT_FN (BUILT_IN_REMAINDER):
6146 CASE_FLT_FN (BUILT_IN_DREM):
6147 CASE_FLT_FN (BUILT_IN_POW):
6148 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6149 if (target)
6150 return target;
6151 break;
6152
6153 CASE_FLT_FN (BUILT_IN_CEXPI):
6154 target = expand_builtin_cexpi (exp, target);
6155 gcc_assert (target);
6156 return target;
6157
6158 CASE_FLT_FN (BUILT_IN_SIN):
6159 CASE_FLT_FN (BUILT_IN_COS):
6160 if (! flag_unsafe_math_optimizations)
6161 break;
6162 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6163 if (target)
6164 return target;
6165 break;
6166
6167 CASE_FLT_FN (BUILT_IN_SINCOS):
6168 if (! flag_unsafe_math_optimizations)
6169 break;
6170 target = expand_builtin_sincos (exp);
6171 if (target)
6172 return target;
6173 break;
6174
6175 case BUILT_IN_APPLY_ARGS:
6176 return expand_builtin_apply_args ();
6177
6178 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6179 FUNCTION with a copy of the parameters described by
6180 ARGUMENTS, and ARGSIZE. It returns a block of memory
6181 allocated on the stack into which is stored all the registers
6182 that might possibly be used for returning the result of a
6183 function. ARGUMENTS is the value returned by
6184 __builtin_apply_args. ARGSIZE is the number of bytes of
6185 arguments that must be copied. ??? How should this value be
6186 computed? We'll also need a safe worst case value for varargs
6187 functions. */
6188 case BUILT_IN_APPLY:
6189 if (!validate_arglist (exp, POINTER_TYPE,
6190 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6191 && !validate_arglist (exp, REFERENCE_TYPE,
6192 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6193 return const0_rtx;
6194 else
6195 {
6196 rtx ops[3];
6197
6198 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6199 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6200 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6201
6202 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6203 }
6204
6205 /* __builtin_return (RESULT) causes the function to return the
6206 value described by RESULT. RESULT is address of the block of
6207 memory returned by __builtin_apply. */
6208 case BUILT_IN_RETURN:
6209 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6210 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6211 return const0_rtx;
6212
6213 case BUILT_IN_SAVEREGS:
6214 return expand_builtin_saveregs ();
6215
6216 case BUILT_IN_VA_ARG_PACK:
6217 /* All valid uses of __builtin_va_arg_pack () are removed during
6218 inlining. */
6219 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6220 return const0_rtx;
6221
6222 case BUILT_IN_VA_ARG_PACK_LEN:
6223 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6224 inlining. */
6225 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6226 return const0_rtx;
6227
6228 /* Return the address of the first anonymous stack arg. */
6229 case BUILT_IN_NEXT_ARG:
6230 if (fold_builtin_next_arg (exp, false))
6231 return const0_rtx;
6232 return expand_builtin_next_arg ();
6233
6234 case BUILT_IN_CLEAR_CACHE:
6235 target = expand_builtin___clear_cache (exp);
6236 if (target)
6237 return target;
6238 break;
6239
6240 case BUILT_IN_CLASSIFY_TYPE:
6241 return expand_builtin_classify_type (exp);
6242
6243 case BUILT_IN_CONSTANT_P:
6244 return const0_rtx;
6245
6246 case BUILT_IN_FRAME_ADDRESS:
6247 case BUILT_IN_RETURN_ADDRESS:
6248 return expand_builtin_frame_address (fndecl, exp);
6249
6250 /* Returns the address of the area where the structure is returned.
6251 0 otherwise. */
6252 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6253 if (call_expr_nargs (exp) != 0
6254 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6255 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6256 return const0_rtx;
6257 else
6258 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6259
6260 case BUILT_IN_ALLOCA:
6261 case BUILT_IN_ALLOCA_WITH_ALIGN:
6262 /* If the allocation stems from the declaration of a variable-sized
6263 object, it cannot accumulate. */
6264 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6265 if (target)
6266 return target;
6267 break;
6268
6269 case BUILT_IN_STACK_SAVE:
6270 return expand_stack_save ();
6271
6272 case BUILT_IN_STACK_RESTORE:
6273 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6274 return const0_rtx;
6275
6276 case BUILT_IN_BSWAP16:
6277 case BUILT_IN_BSWAP32:
6278 case BUILT_IN_BSWAP64:
6279 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6280 if (target)
6281 return target;
6282 break;
6283
6284 CASE_INT_FN (BUILT_IN_FFS):
6285 target = expand_builtin_unop (target_mode, exp, target,
6286 subtarget, ffs_optab);
6287 if (target)
6288 return target;
6289 break;
6290
6291 CASE_INT_FN (BUILT_IN_CLZ):
6292 target = expand_builtin_unop (target_mode, exp, target,
6293 subtarget, clz_optab);
6294 if (target)
6295 return target;
6296 break;
6297
6298 CASE_INT_FN (BUILT_IN_CTZ):
6299 target = expand_builtin_unop (target_mode, exp, target,
6300 subtarget, ctz_optab);
6301 if (target)
6302 return target;
6303 break;
6304
6305 CASE_INT_FN (BUILT_IN_CLRSB):
6306 target = expand_builtin_unop (target_mode, exp, target,
6307 subtarget, clrsb_optab);
6308 if (target)
6309 return target;
6310 break;
6311
6312 CASE_INT_FN (BUILT_IN_POPCOUNT):
6313 target = expand_builtin_unop (target_mode, exp, target,
6314 subtarget, popcount_optab);
6315 if (target)
6316 return target;
6317 break;
6318
6319 CASE_INT_FN (BUILT_IN_PARITY):
6320 target = expand_builtin_unop (target_mode, exp, target,
6321 subtarget, parity_optab);
6322 if (target)
6323 return target;
6324 break;
6325
6326 case BUILT_IN_STRLEN:
6327 target = expand_builtin_strlen (exp, target, target_mode);
6328 if (target)
6329 return target;
6330 break;
6331
6332 case BUILT_IN_STRCPY:
6333 target = expand_builtin_strcpy (exp, target);
6334 if (target)
6335 return target;
6336 break;
6337
6338 case BUILT_IN_STRNCPY:
6339 target = expand_builtin_strncpy (exp, target);
6340 if (target)
6341 return target;
6342 break;
6343
6344 case BUILT_IN_STPCPY:
6345 target = expand_builtin_stpcpy (exp, target, mode);
6346 if (target)
6347 return target;
6348 break;
6349
6350 case BUILT_IN_MEMCPY:
6351 target = expand_builtin_memcpy (exp, target);
6352 if (target)
6353 return target;
6354 break;
6355
6356 case BUILT_IN_MEMPCPY:
6357 target = expand_builtin_mempcpy (exp, target, mode);
6358 if (target)
6359 return target;
6360 break;
6361
6362 case BUILT_IN_MEMSET:
6363 target = expand_builtin_memset (exp, target, mode);
6364 if (target)
6365 return target;
6366 break;
6367
6368 case BUILT_IN_BZERO:
6369 target = expand_builtin_bzero (exp);
6370 if (target)
6371 return target;
6372 break;
6373
6374 case BUILT_IN_STRCMP:
6375 target = expand_builtin_strcmp (exp, target);
6376 if (target)
6377 return target;
6378 break;
6379
6380 case BUILT_IN_STRNCMP:
6381 target = expand_builtin_strncmp (exp, target, mode);
6382 if (target)
6383 return target;
6384 break;
6385
6386 case BUILT_IN_BCMP:
6387 case BUILT_IN_MEMCMP:
6388 target = expand_builtin_memcmp (exp, target, mode);
6389 if (target)
6390 return target;
6391 break;
6392
6393 case BUILT_IN_SETJMP:
6394 /* This should have been lowered to the builtins below. */
6395 gcc_unreachable ();
6396
6397 case BUILT_IN_SETJMP_SETUP:
6398 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6399 and the receiver label. */
6400 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6401 {
6402 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6403 VOIDmode, EXPAND_NORMAL);
6404 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6405 rtx label_r = label_rtx (label);
6406
6407 /* This is copied from the handling of non-local gotos. */
6408 expand_builtin_setjmp_setup (buf_addr, label_r);
6409 nonlocal_goto_handler_labels
6410 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6411 nonlocal_goto_handler_labels);
6412 /* ??? Do not let expand_label treat us as such since we would
6413 not want to be both on the list of non-local labels and on
6414 the list of forced labels. */
6415 FORCED_LABEL (label) = 0;
6416 return const0_rtx;
6417 }
6418 break;
6419
6420 case BUILT_IN_SETJMP_RECEIVER:
6421 /* __builtin_setjmp_receiver is passed the receiver label. */
6422 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6423 {
6424 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6425 rtx label_r = label_rtx (label);
6426
6427 expand_builtin_setjmp_receiver (label_r);
6428 return const0_rtx;
6429 }
6430 break;
6431
6432 /* __builtin_longjmp is passed a pointer to an array of five words.
6433 It's similar to the C library longjmp function but works with
6434 __builtin_setjmp above. */
6435 case BUILT_IN_LONGJMP:
6436 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6437 {
6438 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6439 VOIDmode, EXPAND_NORMAL);
6440 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6441
6442 if (value != const1_rtx)
6443 {
6444 error ("%<__builtin_longjmp%> second argument must be 1");
6445 return const0_rtx;
6446 }
6447
6448 expand_builtin_longjmp (buf_addr, value);
6449 return const0_rtx;
6450 }
6451 break;
6452
6453 case BUILT_IN_NONLOCAL_GOTO:
6454 target = expand_builtin_nonlocal_goto (exp);
6455 if (target)
6456 return target;
6457 break;
6458
6459 /* This updates the setjmp buffer that is its argument with the value
6460 of the current stack pointer. */
6461 case BUILT_IN_UPDATE_SETJMP_BUF:
6462 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6463 {
6464 rtx buf_addr
6465 = expand_normal (CALL_EXPR_ARG (exp, 0));
6466
6467 expand_builtin_update_setjmp_buf (buf_addr);
6468 return const0_rtx;
6469 }
6470 break;
6471
6472 case BUILT_IN_TRAP:
6473 expand_builtin_trap ();
6474 return const0_rtx;
6475
6476 case BUILT_IN_UNREACHABLE:
6477 expand_builtin_unreachable ();
6478 return const0_rtx;
6479
6480 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6481 case BUILT_IN_SIGNBITD32:
6482 case BUILT_IN_SIGNBITD64:
6483 case BUILT_IN_SIGNBITD128:
6484 target = expand_builtin_signbit (exp, target);
6485 if (target)
6486 return target;
6487 break;
6488
6489 /* Various hooks for the DWARF 2 __throw routine. */
6490 case BUILT_IN_UNWIND_INIT:
6491 expand_builtin_unwind_init ();
6492 return const0_rtx;
6493 case BUILT_IN_DWARF_CFA:
6494 return virtual_cfa_rtx;
6495 #ifdef DWARF2_UNWIND_INFO
6496 case BUILT_IN_DWARF_SP_COLUMN:
6497 return expand_builtin_dwarf_sp_column ();
6498 case BUILT_IN_INIT_DWARF_REG_SIZES:
6499 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6500 return const0_rtx;
6501 #endif
6502 case BUILT_IN_FROB_RETURN_ADDR:
6503 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6504 case BUILT_IN_EXTRACT_RETURN_ADDR:
6505 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6506 case BUILT_IN_EH_RETURN:
6507 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6508 CALL_EXPR_ARG (exp, 1));
6509 return const0_rtx;
6510 #ifdef EH_RETURN_DATA_REGNO
6511 case BUILT_IN_EH_RETURN_DATA_REGNO:
6512 return expand_builtin_eh_return_data_regno (exp);
6513 #endif
6514 case BUILT_IN_EXTEND_POINTER:
6515 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6516 case BUILT_IN_EH_POINTER:
6517 return expand_builtin_eh_pointer (exp);
6518 case BUILT_IN_EH_FILTER:
6519 return expand_builtin_eh_filter (exp);
6520 case BUILT_IN_EH_COPY_VALUES:
6521 return expand_builtin_eh_copy_values (exp);
6522
6523 case BUILT_IN_VA_START:
6524 return expand_builtin_va_start (exp);
6525 case BUILT_IN_VA_END:
6526 return expand_builtin_va_end (exp);
6527 case BUILT_IN_VA_COPY:
6528 return expand_builtin_va_copy (exp);
6529 case BUILT_IN_EXPECT:
6530 return expand_builtin_expect (exp, target);
6531 case BUILT_IN_ASSUME_ALIGNED:
6532 return expand_builtin_assume_aligned (exp, target);
6533 case BUILT_IN_PREFETCH:
6534 expand_builtin_prefetch (exp);
6535 return const0_rtx;
6536
6537 case BUILT_IN_INIT_TRAMPOLINE:
6538 return expand_builtin_init_trampoline (exp, true);
6539 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6540 return expand_builtin_init_trampoline (exp, false);
6541 case BUILT_IN_ADJUST_TRAMPOLINE:
6542 return expand_builtin_adjust_trampoline (exp);
6543
6544 case BUILT_IN_FORK:
6545 case BUILT_IN_EXECL:
6546 case BUILT_IN_EXECV:
6547 case BUILT_IN_EXECLP:
6548 case BUILT_IN_EXECLE:
6549 case BUILT_IN_EXECVP:
6550 case BUILT_IN_EXECVE:
6551 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6552 if (target)
6553 return target;
6554 break;
6555
6556 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6557 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6558 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6559 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6560 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6561 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6562 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6563 if (target)
6564 return target;
6565 break;
6566
6567 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6568 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6569 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6570 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6571 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6572 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6573 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6574 if (target)
6575 return target;
6576 break;
6577
6578 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6579 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6580 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6581 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6582 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6583 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6584 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6585 if (target)
6586 return target;
6587 break;
6588
6589 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6590 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6591 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6592 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6593 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6594 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6595 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6596 if (target)
6597 return target;
6598 break;
6599
6600 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6601 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6602 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6603 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6604 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6605 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6606 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6607 if (target)
6608 return target;
6609 break;
6610
6611 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6612 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6613 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6614 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6615 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6616 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6617 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6618 if (target)
6619 return target;
6620 break;
6621
6622 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6623 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6624 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6625 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6626 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6627 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6628 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6629 if (target)
6630 return target;
6631 break;
6632
6633 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6634 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6635 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6636 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6637 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6638 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6639 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6640 if (target)
6641 return target;
6642 break;
6643
6644 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6645 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6646 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6647 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6648 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6649 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6650 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6651 if (target)
6652 return target;
6653 break;
6654
6655 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6656 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6657 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6658 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6659 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6660 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6661 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6662 if (target)
6663 return target;
6664 break;
6665
6666 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6667 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6668 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6669 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6670 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6671 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6672 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6673 if (target)
6674 return target;
6675 break;
6676
6677 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6678 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6679 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6680 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6681 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6682 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6683 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6684 if (target)
6685 return target;
6686 break;
6687
6688 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6689 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6690 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6691 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6692 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6693 if (mode == VOIDmode)
6694 mode = TYPE_MODE (boolean_type_node);
6695 if (!target || !register_operand (target, mode))
6696 target = gen_reg_rtx (mode);
6697
6698 mode = get_builtin_sync_mode
6699 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6700 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6701 if (target)
6702 return target;
6703 break;
6704
6705 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6706 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6707 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6708 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6709 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6710 mode = get_builtin_sync_mode
6711 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6712 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6713 if (target)
6714 return target;
6715 break;
6716
6717 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6718 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6719 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6720 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6721 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6722 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6723 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6724 if (target)
6725 return target;
6726 break;
6727
6728 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6729 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6730 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6731 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6732 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6733 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6734 expand_builtin_sync_lock_release (mode, exp);
6735 return const0_rtx;
6736
6737 case BUILT_IN_SYNC_SYNCHRONIZE:
6738 expand_builtin_sync_synchronize ();
6739 return const0_rtx;
6740
6741 case BUILT_IN_ATOMIC_EXCHANGE_1:
6742 case BUILT_IN_ATOMIC_EXCHANGE_2:
6743 case BUILT_IN_ATOMIC_EXCHANGE_4:
6744 case BUILT_IN_ATOMIC_EXCHANGE_8:
6745 case BUILT_IN_ATOMIC_EXCHANGE_16:
6746 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6747 target = expand_builtin_atomic_exchange (mode, exp, target);
6748 if (target)
6749 return target;
6750 break;
6751
6752 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6753 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6754 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6755 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6756 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6757 {
6758 unsigned int nargs, z;
6759 vec<tree, va_gc> *vec;
6760
6761 mode =
6762 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6763 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6764 if (target)
6765 return target;
6766
6767 /* If this is turned into an external library call, the weak parameter
6768 must be dropped to match the expected parameter list. */
6769 nargs = call_expr_nargs (exp);
6770 vec_alloc (vec, nargs - 1);
6771 for (z = 0; z < 3; z++)
6772 vec->quick_push (CALL_EXPR_ARG (exp, z));
6773 /* Skip the boolean weak parameter. */
6774 for (z = 4; z < 6; z++)
6775 vec->quick_push (CALL_EXPR_ARG (exp, z));
6776 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6777 break;
6778 }
6779
6780 case BUILT_IN_ATOMIC_LOAD_1:
6781 case BUILT_IN_ATOMIC_LOAD_2:
6782 case BUILT_IN_ATOMIC_LOAD_4:
6783 case BUILT_IN_ATOMIC_LOAD_8:
6784 case BUILT_IN_ATOMIC_LOAD_16:
6785 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6786 target = expand_builtin_atomic_load (mode, exp, target);
6787 if (target)
6788 return target;
6789 break;
6790
6791 case BUILT_IN_ATOMIC_STORE_1:
6792 case BUILT_IN_ATOMIC_STORE_2:
6793 case BUILT_IN_ATOMIC_STORE_4:
6794 case BUILT_IN_ATOMIC_STORE_8:
6795 case BUILT_IN_ATOMIC_STORE_16:
6796 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6797 target = expand_builtin_atomic_store (mode, exp);
6798 if (target)
6799 return const0_rtx;
6800 break;
6801
6802 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6803 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6804 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6805 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6806 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6807 {
6808 enum built_in_function lib;
6809 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6810 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6811 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6812 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6813 ignore, lib);
6814 if (target)
6815 return target;
6816 break;
6817 }
6818 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6819 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6820 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6821 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6822 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6823 {
6824 enum built_in_function lib;
6825 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6826 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6827 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6828 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6829 ignore, lib);
6830 if (target)
6831 return target;
6832 break;
6833 }
6834 case BUILT_IN_ATOMIC_AND_FETCH_1:
6835 case BUILT_IN_ATOMIC_AND_FETCH_2:
6836 case BUILT_IN_ATOMIC_AND_FETCH_4:
6837 case BUILT_IN_ATOMIC_AND_FETCH_8:
6838 case BUILT_IN_ATOMIC_AND_FETCH_16:
6839 {
6840 enum built_in_function lib;
6841 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6842 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6843 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6844 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6845 ignore, lib);
6846 if (target)
6847 return target;
6848 break;
6849 }
6850 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6851 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6852 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6853 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6854 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6855 {
6856 enum built_in_function lib;
6857 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6858 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6859 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6860 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6861 ignore, lib);
6862 if (target)
6863 return target;
6864 break;
6865 }
6866 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6867 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6868 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6869 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6870 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6871 {
6872 enum built_in_function lib;
6873 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6874 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6875 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6876 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6877 ignore, lib);
6878 if (target)
6879 return target;
6880 break;
6881 }
6882 case BUILT_IN_ATOMIC_OR_FETCH_1:
6883 case BUILT_IN_ATOMIC_OR_FETCH_2:
6884 case BUILT_IN_ATOMIC_OR_FETCH_4:
6885 case BUILT_IN_ATOMIC_OR_FETCH_8:
6886 case BUILT_IN_ATOMIC_OR_FETCH_16:
6887 {
6888 enum built_in_function lib;
6889 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6890 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6891 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6892 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6893 ignore, lib);
6894 if (target)
6895 return target;
6896 break;
6897 }
6898 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6899 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6900 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6901 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6902 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6903 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6904 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6905 ignore, BUILT_IN_NONE);
6906 if (target)
6907 return target;
6908 break;
6909
6910 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6911 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6912 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6913 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6914 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6915 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6916 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6917 ignore, BUILT_IN_NONE);
6918 if (target)
6919 return target;
6920 break;
6921
6922 case BUILT_IN_ATOMIC_FETCH_AND_1:
6923 case BUILT_IN_ATOMIC_FETCH_AND_2:
6924 case BUILT_IN_ATOMIC_FETCH_AND_4:
6925 case BUILT_IN_ATOMIC_FETCH_AND_8:
6926 case BUILT_IN_ATOMIC_FETCH_AND_16:
6927 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6928 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6929 ignore, BUILT_IN_NONE);
6930 if (target)
6931 return target;
6932 break;
6933
6934 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6935 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6936 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6937 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6938 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6939 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6940 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6941 ignore, BUILT_IN_NONE);
6942 if (target)
6943 return target;
6944 break;
6945
6946 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6947 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6948 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6949 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6950 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6951 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6952 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6953 ignore, BUILT_IN_NONE);
6954 if (target)
6955 return target;
6956 break;
6957
6958 case BUILT_IN_ATOMIC_FETCH_OR_1:
6959 case BUILT_IN_ATOMIC_FETCH_OR_2:
6960 case BUILT_IN_ATOMIC_FETCH_OR_4:
6961 case BUILT_IN_ATOMIC_FETCH_OR_8:
6962 case BUILT_IN_ATOMIC_FETCH_OR_16:
6963 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6964 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6965 ignore, BUILT_IN_NONE);
6966 if (target)
6967 return target;
6968 break;
6969
6970 case BUILT_IN_ATOMIC_TEST_AND_SET:
6971 return expand_builtin_atomic_test_and_set (exp, target);
6972
6973 case BUILT_IN_ATOMIC_CLEAR:
6974 return expand_builtin_atomic_clear (exp);
6975
6976 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6977 return expand_builtin_atomic_always_lock_free (exp);
6978
6979 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6980 target = expand_builtin_atomic_is_lock_free (exp);
6981 if (target)
6982 return target;
6983 break;
6984
6985 case BUILT_IN_ATOMIC_THREAD_FENCE:
6986 expand_builtin_atomic_thread_fence (exp);
6987 return const0_rtx;
6988
6989 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6990 expand_builtin_atomic_signal_fence (exp);
6991 return const0_rtx;
6992
6993 case BUILT_IN_OBJECT_SIZE:
6994 return expand_builtin_object_size (exp);
6995
6996 case BUILT_IN_MEMCPY_CHK:
6997 case BUILT_IN_MEMPCPY_CHK:
6998 case BUILT_IN_MEMMOVE_CHK:
6999 case BUILT_IN_MEMSET_CHK:
7000 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7001 if (target)
7002 return target;
7003 break;
7004
7005 case BUILT_IN_STRCPY_CHK:
7006 case BUILT_IN_STPCPY_CHK:
7007 case BUILT_IN_STRNCPY_CHK:
7008 case BUILT_IN_STPNCPY_CHK:
7009 case BUILT_IN_STRCAT_CHK:
7010 case BUILT_IN_STRNCAT_CHK:
7011 case BUILT_IN_SNPRINTF_CHK:
7012 case BUILT_IN_VSNPRINTF_CHK:
7013 maybe_emit_chk_warning (exp, fcode);
7014 break;
7015
7016 case BUILT_IN_SPRINTF_CHK:
7017 case BUILT_IN_VSPRINTF_CHK:
7018 maybe_emit_sprintf_chk_warning (exp, fcode);
7019 break;
7020
7021 case BUILT_IN_FREE:
7022 if (warn_free_nonheap_object)
7023 maybe_emit_free_warning (exp);
7024 break;
7025
7026 case BUILT_IN_THREAD_POINTER:
7027 return expand_builtin_thread_pointer (exp, target);
7028
7029 case BUILT_IN_SET_THREAD_POINTER:
7030 expand_builtin_set_thread_pointer (exp);
7031 return const0_rtx;
7032
7033 case BUILT_IN_CILK_DETACH:
7034 expand_builtin_cilk_detach (exp);
7035 return const0_rtx;
7036
7037 case BUILT_IN_CILK_POP_FRAME:
7038 expand_builtin_cilk_pop_frame (exp);
7039 return const0_rtx;
7040
7041 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7042 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7043 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7044 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7045 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7046 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7047 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7048 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7049 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7050 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7051 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7052 /* We allow user CHKP builtins if Pointer Bounds
7053 Checker is off. */
7054 if (!chkp_function_instrumented_p (current_function_decl))
7055 {
7056 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7057 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7058 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7059 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7060 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7061 return expand_normal (CALL_EXPR_ARG (exp, 0));
7062 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7063 return expand_normal (size_zero_node);
7064 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7065 return expand_normal (size_int (-1));
7066 else
7067 return const0_rtx;
7068 }
7069 /* FALLTHROUGH */
7070
7071 case BUILT_IN_CHKP_BNDMK:
7072 case BUILT_IN_CHKP_BNDSTX:
7073 case BUILT_IN_CHKP_BNDCL:
7074 case BUILT_IN_CHKP_BNDCU:
7075 case BUILT_IN_CHKP_BNDLDX:
7076 case BUILT_IN_CHKP_BNDRET:
7077 case BUILT_IN_CHKP_INTERSECT:
7078 case BUILT_IN_CHKP_NARROW:
7079 case BUILT_IN_CHKP_EXTRACT_LOWER:
7080 case BUILT_IN_CHKP_EXTRACT_UPPER:
7081 /* Software implementation of Pointer Bounds Checker is NYI.
7082 Target support is required. */
7083 error ("Your target platform does not support -fcheck-pointer-bounds");
7084 break;
7085
7086 case BUILT_IN_ACC_ON_DEVICE:
7087 target = expand_builtin_acc_on_device (exp, target);
7088 if (target)
7089 return target;
7090 break;
7091
7092 default: /* just do library call, if unknown builtin */
7093 break;
7094 }
7095
7096 /* The switch statement above can drop through to cause the function
7097 to be called normally. */
7098 return expand_call (exp, target, ignore);
7099 }
7100
7101 /* Similar to expand_builtin but is used for instrumented calls. */
7102
7103 rtx
7104 expand_builtin_with_bounds (tree exp, rtx target,
7105 rtx subtarget ATTRIBUTE_UNUSED,
7106 machine_mode mode, int ignore)
7107 {
7108 tree fndecl = get_callee_fndecl (exp);
7109 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7110
7111 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7112
7113 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7114 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7115
7116 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7117 && fcode < END_CHKP_BUILTINS);
7118
7119 switch (fcode)
7120 {
7121 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7122 target = expand_builtin_memcpy_with_bounds (exp, target);
7123 if (target)
7124 return target;
7125 break;
7126
7127 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7128 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7129 if (target)
7130 return target;
7131 break;
7132
7133 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7134 target = expand_builtin_memset_with_bounds (exp, target, mode);
7135 if (target)
7136 return target;
7137 break;
7138
7139 default:
7140 break;
7141 }
7142
7143 /* The switch statement above can drop through to cause the function
7144 to be called normally. */
7145 return expand_call (exp, target, ignore);
7146 }
7147
7148 /* Determine whether a tree node represents a call to a built-in
7149 function. If the tree T is a call to a built-in function with
7150 the right number of arguments of the appropriate types, return
7151 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7152 Otherwise the return value is END_BUILTINS. */
7153
7154 enum built_in_function
7155 builtin_mathfn_code (const_tree t)
7156 {
7157 const_tree fndecl, arg, parmlist;
7158 const_tree argtype, parmtype;
7159 const_call_expr_arg_iterator iter;
7160
7161 if (TREE_CODE (t) != CALL_EXPR
7162 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7163 return END_BUILTINS;
7164
7165 fndecl = get_callee_fndecl (t);
7166 if (fndecl == NULL_TREE
7167 || TREE_CODE (fndecl) != FUNCTION_DECL
7168 || ! DECL_BUILT_IN (fndecl)
7169 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7170 return END_BUILTINS;
7171
7172 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7173 init_const_call_expr_arg_iterator (t, &iter);
7174 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7175 {
7176 /* If a function doesn't take a variable number of arguments,
7177 the last element in the list will have type `void'. */
7178 parmtype = TREE_VALUE (parmlist);
7179 if (VOID_TYPE_P (parmtype))
7180 {
7181 if (more_const_call_expr_args_p (&iter))
7182 return END_BUILTINS;
7183 return DECL_FUNCTION_CODE (fndecl);
7184 }
7185
7186 if (! more_const_call_expr_args_p (&iter))
7187 return END_BUILTINS;
7188
7189 arg = next_const_call_expr_arg (&iter);
7190 argtype = TREE_TYPE (arg);
7191
7192 if (SCALAR_FLOAT_TYPE_P (parmtype))
7193 {
7194 if (! SCALAR_FLOAT_TYPE_P (argtype))
7195 return END_BUILTINS;
7196 }
7197 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7198 {
7199 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7200 return END_BUILTINS;
7201 }
7202 else if (POINTER_TYPE_P (parmtype))
7203 {
7204 if (! POINTER_TYPE_P (argtype))
7205 return END_BUILTINS;
7206 }
7207 else if (INTEGRAL_TYPE_P (parmtype))
7208 {
7209 if (! INTEGRAL_TYPE_P (argtype))
7210 return END_BUILTINS;
7211 }
7212 else
7213 return END_BUILTINS;
7214 }
7215
7216 /* Variable-length argument list. */
7217 return DECL_FUNCTION_CODE (fndecl);
7218 }
7219
7220 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7221 evaluate to a constant. */
7222
7223 static tree
7224 fold_builtin_constant_p (tree arg)
7225 {
7226 /* We return 1 for a numeric type that's known to be a constant
7227 value at compile-time or for an aggregate type that's a
7228 literal constant. */
7229 STRIP_NOPS (arg);
7230
7231 /* If we know this is a constant, emit the constant of one. */
7232 if (CONSTANT_CLASS_P (arg)
7233 || (TREE_CODE (arg) == CONSTRUCTOR
7234 && TREE_CONSTANT (arg)))
7235 return integer_one_node;
7236 if (TREE_CODE (arg) == ADDR_EXPR)
7237 {
7238 tree op = TREE_OPERAND (arg, 0);
7239 if (TREE_CODE (op) == STRING_CST
7240 || (TREE_CODE (op) == ARRAY_REF
7241 && integer_zerop (TREE_OPERAND (op, 1))
7242 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7243 return integer_one_node;
7244 }
7245
7246 /* If this expression has side effects, show we don't know it to be a
7247 constant. Likewise if it's a pointer or aggregate type since in
7248 those case we only want literals, since those are only optimized
7249 when generating RTL, not later.
7250 And finally, if we are compiling an initializer, not code, we
7251 need to return a definite result now; there's not going to be any
7252 more optimization done. */
7253 if (TREE_SIDE_EFFECTS (arg)
7254 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7255 || POINTER_TYPE_P (TREE_TYPE (arg))
7256 || cfun == 0
7257 || folding_initializer
7258 || force_folding_builtin_constant_p)
7259 return integer_zero_node;
7260
7261 return NULL_TREE;
7262 }
7263
7264 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7265 return it as a truthvalue. */
7266
7267 static tree
7268 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7269 tree predictor)
7270 {
7271 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7272
7273 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7274 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7275 ret_type = TREE_TYPE (TREE_TYPE (fn));
7276 pred_type = TREE_VALUE (arg_types);
7277 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7278
7279 pred = fold_convert_loc (loc, pred_type, pred);
7280 expected = fold_convert_loc (loc, expected_type, expected);
7281 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7282 predictor);
7283
7284 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7285 build_int_cst (ret_type, 0));
7286 }
7287
7288 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7289 NULL_TREE if no simplification is possible. */
7290
7291 tree
7292 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7293 {
7294 tree inner, fndecl, inner_arg0;
7295 enum tree_code code;
7296
7297 /* Distribute the expected value over short-circuiting operators.
7298 See through the cast from truthvalue_type_node to long. */
7299 inner_arg0 = arg0;
7300 while (CONVERT_EXPR_P (inner_arg0)
7301 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7302 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7303 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7304
7305 /* If this is a builtin_expect within a builtin_expect keep the
7306 inner one. See through a comparison against a constant. It
7307 might have been added to create a thruthvalue. */
7308 inner = inner_arg0;
7309
7310 if (COMPARISON_CLASS_P (inner)
7311 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7312 inner = TREE_OPERAND (inner, 0);
7313
7314 if (TREE_CODE (inner) == CALL_EXPR
7315 && (fndecl = get_callee_fndecl (inner))
7316 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7317 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7318 return arg0;
7319
7320 inner = inner_arg0;
7321 code = TREE_CODE (inner);
7322 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7323 {
7324 tree op0 = TREE_OPERAND (inner, 0);
7325 tree op1 = TREE_OPERAND (inner, 1);
7326
7327 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7328 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7329 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7330
7331 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7332 }
7333
7334 /* If the argument isn't invariant then there's nothing else we can do. */
7335 if (!TREE_CONSTANT (inner_arg0))
7336 return NULL_TREE;
7337
7338 /* If we expect that a comparison against the argument will fold to
7339 a constant return the constant. In practice, this means a true
7340 constant or the address of a non-weak symbol. */
7341 inner = inner_arg0;
7342 STRIP_NOPS (inner);
7343 if (TREE_CODE (inner) == ADDR_EXPR)
7344 {
7345 do
7346 {
7347 inner = TREE_OPERAND (inner, 0);
7348 }
7349 while (TREE_CODE (inner) == COMPONENT_REF
7350 || TREE_CODE (inner) == ARRAY_REF);
7351 if ((TREE_CODE (inner) == VAR_DECL
7352 || TREE_CODE (inner) == FUNCTION_DECL)
7353 && DECL_WEAK (inner))
7354 return NULL_TREE;
7355 }
7356
7357 /* Otherwise, ARG0 already has the proper type for the return value. */
7358 return arg0;
7359 }
7360
7361 /* Fold a call to __builtin_classify_type with argument ARG. */
7362
7363 static tree
7364 fold_builtin_classify_type (tree arg)
7365 {
7366 if (arg == 0)
7367 return build_int_cst (integer_type_node, no_type_class);
7368
7369 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7370 }
7371
7372 /* Fold a call to __builtin_strlen with argument ARG. */
7373
7374 static tree
7375 fold_builtin_strlen (location_t loc, tree type, tree arg)
7376 {
7377 if (!validate_arg (arg, POINTER_TYPE))
7378 return NULL_TREE;
7379 else
7380 {
7381 tree len = c_strlen (arg, 0);
7382
7383 if (len)
7384 return fold_convert_loc (loc, type, len);
7385
7386 return NULL_TREE;
7387 }
7388 }
7389
7390 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7391
7392 static tree
7393 fold_builtin_inf (location_t loc, tree type, int warn)
7394 {
7395 REAL_VALUE_TYPE real;
7396
7397 /* __builtin_inff is intended to be usable to define INFINITY on all
7398 targets. If an infinity is not available, INFINITY expands "to a
7399 positive constant of type float that overflows at translation
7400 time", footnote "In this case, using INFINITY will violate the
7401 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7402 Thus we pedwarn to ensure this constraint violation is
7403 diagnosed. */
7404 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7405 pedwarn (loc, 0, "target format does not support infinity");
7406
7407 real_inf (&real);
7408 return build_real (type, real);
7409 }
7410
7411 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7412
7413 static tree
7414 fold_builtin_nan (tree arg, tree type, int quiet)
7415 {
7416 REAL_VALUE_TYPE real;
7417 const char *str;
7418
7419 if (!validate_arg (arg, POINTER_TYPE))
7420 return NULL_TREE;
7421 str = c_getstr (arg);
7422 if (!str)
7423 return NULL_TREE;
7424
7425 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7426 return NULL_TREE;
7427
7428 return build_real (type, real);
7429 }
7430
7431 /* Return true if the floating point expression T has an integer value.
7432 We also allow +Inf, -Inf and NaN to be considered integer values. */
7433
7434 static bool
7435 integer_valued_real_p (tree t)
7436 {
7437 switch (TREE_CODE (t))
7438 {
7439 case FLOAT_EXPR:
7440 return true;
7441
7442 case ABS_EXPR:
7443 case SAVE_EXPR:
7444 return integer_valued_real_p (TREE_OPERAND (t, 0));
7445
7446 case COMPOUND_EXPR:
7447 case MODIFY_EXPR:
7448 case BIND_EXPR:
7449 return integer_valued_real_p (TREE_OPERAND (t, 1));
7450
7451 case PLUS_EXPR:
7452 case MINUS_EXPR:
7453 case MULT_EXPR:
7454 case MIN_EXPR:
7455 case MAX_EXPR:
7456 return integer_valued_real_p (TREE_OPERAND (t, 0))
7457 && integer_valued_real_p (TREE_OPERAND (t, 1));
7458
7459 case COND_EXPR:
7460 return integer_valued_real_p (TREE_OPERAND (t, 1))
7461 && integer_valued_real_p (TREE_OPERAND (t, 2));
7462
7463 case REAL_CST:
7464 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7465
7466 CASE_CONVERT:
7467 {
7468 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7469 if (TREE_CODE (type) == INTEGER_TYPE)
7470 return true;
7471 if (TREE_CODE (type) == REAL_TYPE)
7472 return integer_valued_real_p (TREE_OPERAND (t, 0));
7473 break;
7474 }
7475
7476 case CALL_EXPR:
7477 switch (builtin_mathfn_code (t))
7478 {
7479 CASE_FLT_FN (BUILT_IN_CEIL):
7480 CASE_FLT_FN (BUILT_IN_FLOOR):
7481 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7482 CASE_FLT_FN (BUILT_IN_RINT):
7483 CASE_FLT_FN (BUILT_IN_ROUND):
7484 CASE_FLT_FN (BUILT_IN_TRUNC):
7485 return true;
7486
7487 CASE_FLT_FN (BUILT_IN_FMIN):
7488 CASE_FLT_FN (BUILT_IN_FMAX):
7489 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7490 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7491
7492 default:
7493 break;
7494 }
7495 break;
7496
7497 default:
7498 break;
7499 }
7500 return false;
7501 }
7502
7503 /* FNDECL is assumed to be a builtin where truncation can be propagated
7504 across (for instance floor((double)f) == (double)floorf (f).
7505 Do the transformation for a call with argument ARG. */
7506
7507 static tree
7508 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7509 {
7510 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7511
7512 if (!validate_arg (arg, REAL_TYPE))
7513 return NULL_TREE;
7514
7515 /* Integer rounding functions are idempotent. */
7516 if (fcode == builtin_mathfn_code (arg))
7517 return arg;
7518
7519 /* If argument is already integer valued, and we don't need to worry
7520 about setting errno, there's no need to perform rounding. */
7521 if (! flag_errno_math && integer_valued_real_p (arg))
7522 return arg;
7523
7524 if (optimize)
7525 {
7526 tree arg0 = strip_float_extensions (arg);
7527 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7528 tree newtype = TREE_TYPE (arg0);
7529 tree decl;
7530
7531 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7532 && (decl = mathfn_built_in (newtype, fcode)))
7533 return fold_convert_loc (loc, ftype,
7534 build_call_expr_loc (loc, decl, 1,
7535 fold_convert_loc (loc,
7536 newtype,
7537 arg0)));
7538 }
7539 return NULL_TREE;
7540 }
7541
7542 /* FNDECL is assumed to be builtin which can narrow the FP type of
7543 the argument, for instance lround((double)f) -> lroundf (f).
7544 Do the transformation for a call with argument ARG. */
7545
7546 static tree
7547 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7548 {
7549 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7550
7551 if (!validate_arg (arg, REAL_TYPE))
7552 return NULL_TREE;
7553
7554 /* If argument is already integer valued, and we don't need to worry
7555 about setting errno, there's no need to perform rounding. */
7556 if (! flag_errno_math && integer_valued_real_p (arg))
7557 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7558 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7559
7560 if (optimize)
7561 {
7562 tree ftype = TREE_TYPE (arg);
7563 tree arg0 = strip_float_extensions (arg);
7564 tree newtype = TREE_TYPE (arg0);
7565 tree decl;
7566
7567 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7568 && (decl = mathfn_built_in (newtype, fcode)))
7569 return build_call_expr_loc (loc, decl, 1,
7570 fold_convert_loc (loc, newtype, arg0));
7571 }
7572
7573 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7574 sizeof (int) == sizeof (long). */
7575 if (TYPE_PRECISION (integer_type_node)
7576 == TYPE_PRECISION (long_integer_type_node))
7577 {
7578 tree newfn = NULL_TREE;
7579 switch (fcode)
7580 {
7581 CASE_FLT_FN (BUILT_IN_ICEIL):
7582 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7583 break;
7584
7585 CASE_FLT_FN (BUILT_IN_IFLOOR):
7586 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7587 break;
7588
7589 CASE_FLT_FN (BUILT_IN_IROUND):
7590 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7591 break;
7592
7593 CASE_FLT_FN (BUILT_IN_IRINT):
7594 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7595 break;
7596
7597 default:
7598 break;
7599 }
7600
7601 if (newfn)
7602 {
7603 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7604 return fold_convert_loc (loc,
7605 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7606 }
7607 }
7608
7609 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7610 sizeof (long long) == sizeof (long). */
7611 if (TYPE_PRECISION (long_long_integer_type_node)
7612 == TYPE_PRECISION (long_integer_type_node))
7613 {
7614 tree newfn = NULL_TREE;
7615 switch (fcode)
7616 {
7617 CASE_FLT_FN (BUILT_IN_LLCEIL):
7618 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7619 break;
7620
7621 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7622 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7623 break;
7624
7625 CASE_FLT_FN (BUILT_IN_LLROUND):
7626 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7627 break;
7628
7629 CASE_FLT_FN (BUILT_IN_LLRINT):
7630 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7631 break;
7632
7633 default:
7634 break;
7635 }
7636
7637 if (newfn)
7638 {
7639 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7640 return fold_convert_loc (loc,
7641 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7642 }
7643 }
7644
7645 return NULL_TREE;
7646 }
7647
7648 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7649 return type. Return NULL_TREE if no simplification can be made. */
7650
7651 static tree
7652 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7653 {
7654 tree res;
7655
7656 if (!validate_arg (arg, COMPLEX_TYPE)
7657 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7658 return NULL_TREE;
7659
7660 /* Calculate the result when the argument is a constant. */
7661 if (TREE_CODE (arg) == COMPLEX_CST
7662 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7663 type, mpfr_hypot)))
7664 return res;
7665
7666 if (TREE_CODE (arg) == COMPLEX_EXPR)
7667 {
7668 tree real = TREE_OPERAND (arg, 0);
7669 tree imag = TREE_OPERAND (arg, 1);
7670
7671 /* If either part is zero, cabs is fabs of the other. */
7672 if (real_zerop (real))
7673 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7674 if (real_zerop (imag))
7675 return fold_build1_loc (loc, ABS_EXPR, type, real);
7676
7677 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7678 if (flag_unsafe_math_optimizations
7679 && operand_equal_p (real, imag, OEP_PURE_SAME))
7680 {
7681 const REAL_VALUE_TYPE sqrt2_trunc
7682 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7683 STRIP_NOPS (real);
7684 return fold_build2_loc (loc, MULT_EXPR, type,
7685 fold_build1_loc (loc, ABS_EXPR, type, real),
7686 build_real (type, sqrt2_trunc));
7687 }
7688 }
7689
7690 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7691 if (TREE_CODE (arg) == NEGATE_EXPR
7692 || TREE_CODE (arg) == CONJ_EXPR)
7693 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7694
7695 /* Don't do this when optimizing for size. */
7696 if (flag_unsafe_math_optimizations
7697 && optimize && optimize_function_for_speed_p (cfun))
7698 {
7699 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7700
7701 if (sqrtfn != NULL_TREE)
7702 {
7703 tree rpart, ipart, result;
7704
7705 arg = builtin_save_expr (arg);
7706
7707 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7708 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7709
7710 rpart = builtin_save_expr (rpart);
7711 ipart = builtin_save_expr (ipart);
7712
7713 result = fold_build2_loc (loc, PLUS_EXPR, type,
7714 fold_build2_loc (loc, MULT_EXPR, type,
7715 rpart, rpart),
7716 fold_build2_loc (loc, MULT_EXPR, type,
7717 ipart, ipart));
7718
7719 return build_call_expr_loc (loc, sqrtfn, 1, result);
7720 }
7721 }
7722
7723 return NULL_TREE;
7724 }
7725
7726 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7727 complex tree type of the result. If NEG is true, the imaginary
7728 zero is negative. */
7729
7730 static tree
7731 build_complex_cproj (tree type, bool neg)
7732 {
7733 REAL_VALUE_TYPE rinf, rzero = dconst0;
7734
7735 real_inf (&rinf);
7736 rzero.sign = neg;
7737 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7738 build_real (TREE_TYPE (type), rzero));
7739 }
7740
7741 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7742 return type. Return NULL_TREE if no simplification can be made. */
7743
7744 static tree
7745 fold_builtin_cproj (location_t loc, tree arg, tree type)
7746 {
7747 if (!validate_arg (arg, COMPLEX_TYPE)
7748 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7749 return NULL_TREE;
7750
7751 /* If there are no infinities, return arg. */
7752 if (! HONOR_INFINITIES (type))
7753 return non_lvalue_loc (loc, arg);
7754
7755 /* Calculate the result when the argument is a constant. */
7756 if (TREE_CODE (arg) == COMPLEX_CST)
7757 {
7758 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7759 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7760
7761 if (real_isinf (real) || real_isinf (imag))
7762 return build_complex_cproj (type, imag->sign);
7763 else
7764 return arg;
7765 }
7766 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7767 {
7768 tree real = TREE_OPERAND (arg, 0);
7769 tree imag = TREE_OPERAND (arg, 1);
7770
7771 STRIP_NOPS (real);
7772 STRIP_NOPS (imag);
7773
7774 /* If the real part is inf and the imag part is known to be
7775 nonnegative, return (inf + 0i). Remember side-effects are
7776 possible in the imag part. */
7777 if (TREE_CODE (real) == REAL_CST
7778 && real_isinf (TREE_REAL_CST_PTR (real))
7779 && tree_expr_nonnegative_p (imag))
7780 return omit_one_operand_loc (loc, type,
7781 build_complex_cproj (type, false),
7782 arg);
7783
7784 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7785 Remember side-effects are possible in the real part. */
7786 if (TREE_CODE (imag) == REAL_CST
7787 && real_isinf (TREE_REAL_CST_PTR (imag)))
7788 return
7789 omit_one_operand_loc (loc, type,
7790 build_complex_cproj (type, TREE_REAL_CST_PTR
7791 (imag)->sign), arg);
7792 }
7793
7794 return NULL_TREE;
7795 }
7796
7797 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7798 Return NULL_TREE if no simplification can be made. */
7799
7800 static tree
7801 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7802 {
7803
7804 enum built_in_function fcode;
7805 tree res;
7806
7807 if (!validate_arg (arg, REAL_TYPE))
7808 return NULL_TREE;
7809
7810 /* Calculate the result when the argument is a constant. */
7811 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7812 return res;
7813
7814 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7815 fcode = builtin_mathfn_code (arg);
7816 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7817 {
7818 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7819 arg = fold_build2_loc (loc, MULT_EXPR, type,
7820 CALL_EXPR_ARG (arg, 0),
7821 build_real (type, dconsthalf));
7822 return build_call_expr_loc (loc, expfn, 1, arg);
7823 }
7824
7825 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7826 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7827 {
7828 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7829
7830 if (powfn)
7831 {
7832 tree arg0 = CALL_EXPR_ARG (arg, 0);
7833 tree tree_root;
7834 /* The inner root was either sqrt or cbrt. */
7835 /* This was a conditional expression but it triggered a bug
7836 in Sun C 5.5. */
7837 REAL_VALUE_TYPE dconstroot;
7838 if (BUILTIN_SQRT_P (fcode))
7839 dconstroot = dconsthalf;
7840 else
7841 dconstroot = dconst_third ();
7842
7843 /* Adjust for the outer root. */
7844 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7845 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7846 tree_root = build_real (type, dconstroot);
7847 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7848 }
7849 }
7850
7851 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7852 if (flag_unsafe_math_optimizations
7853 && (fcode == BUILT_IN_POW
7854 || fcode == BUILT_IN_POWF
7855 || fcode == BUILT_IN_POWL))
7856 {
7857 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7858 tree arg0 = CALL_EXPR_ARG (arg, 0);
7859 tree arg1 = CALL_EXPR_ARG (arg, 1);
7860 tree narg1;
7861 if (!tree_expr_nonnegative_p (arg0))
7862 arg0 = build1 (ABS_EXPR, type, arg0);
7863 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7864 build_real (type, dconsthalf));
7865 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7866 }
7867
7868 return NULL_TREE;
7869 }
7870
7871 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7872 Return NULL_TREE if no simplification can be made. */
7873
7874 static tree
7875 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7876 {
7877 const enum built_in_function fcode = builtin_mathfn_code (arg);
7878 tree res;
7879
7880 if (!validate_arg (arg, REAL_TYPE))
7881 return NULL_TREE;
7882
7883 /* Calculate the result when the argument is a constant. */
7884 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7885 return res;
7886
7887 if (flag_unsafe_math_optimizations)
7888 {
7889 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7890 if (BUILTIN_EXPONENT_P (fcode))
7891 {
7892 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7893 const REAL_VALUE_TYPE third_trunc =
7894 real_value_truncate (TYPE_MODE (type), dconst_third ());
7895 arg = fold_build2_loc (loc, MULT_EXPR, type,
7896 CALL_EXPR_ARG (arg, 0),
7897 build_real (type, third_trunc));
7898 return build_call_expr_loc (loc, expfn, 1, arg);
7899 }
7900
7901 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7902 if (BUILTIN_SQRT_P (fcode))
7903 {
7904 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7905
7906 if (powfn)
7907 {
7908 tree arg0 = CALL_EXPR_ARG (arg, 0);
7909 tree tree_root;
7910 REAL_VALUE_TYPE dconstroot = dconst_third ();
7911
7912 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7913 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7914 tree_root = build_real (type, dconstroot);
7915 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7916 }
7917 }
7918
7919 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7920 if (BUILTIN_CBRT_P (fcode))
7921 {
7922 tree arg0 = CALL_EXPR_ARG (arg, 0);
7923 if (tree_expr_nonnegative_p (arg0))
7924 {
7925 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7926
7927 if (powfn)
7928 {
7929 tree tree_root;
7930 REAL_VALUE_TYPE dconstroot;
7931
7932 real_arithmetic (&dconstroot, MULT_EXPR,
7933 dconst_third_ptr (), dconst_third_ptr ());
7934 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7935 tree_root = build_real (type, dconstroot);
7936 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7937 }
7938 }
7939 }
7940
7941 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7942 if (fcode == BUILT_IN_POW
7943 || fcode == BUILT_IN_POWF
7944 || fcode == BUILT_IN_POWL)
7945 {
7946 tree arg00 = CALL_EXPR_ARG (arg, 0);
7947 tree arg01 = CALL_EXPR_ARG (arg, 1);
7948 if (tree_expr_nonnegative_p (arg00))
7949 {
7950 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7951 const REAL_VALUE_TYPE dconstroot
7952 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7953 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7954 build_real (type, dconstroot));
7955 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7956 }
7957 }
7958 }
7959 return NULL_TREE;
7960 }
7961
7962 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7963 TYPE is the type of the return value. Return NULL_TREE if no
7964 simplification can be made. */
7965
7966 static tree
7967 fold_builtin_cos (location_t loc,
7968 tree arg, tree type, tree fndecl)
7969 {
7970 tree res, narg;
7971
7972 if (!validate_arg (arg, REAL_TYPE))
7973 return NULL_TREE;
7974
7975 /* Calculate the result when the argument is a constant. */
7976 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7977 return res;
7978
7979 /* Optimize cos(-x) into cos (x). */
7980 if ((narg = fold_strip_sign_ops (arg)))
7981 return build_call_expr_loc (loc, fndecl, 1, narg);
7982
7983 return NULL_TREE;
7984 }
7985
7986 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7987 Return NULL_TREE if no simplification can be made. */
7988
7989 static tree
7990 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7991 {
7992 if (validate_arg (arg, REAL_TYPE))
7993 {
7994 tree res, narg;
7995
7996 /* Calculate the result when the argument is a constant. */
7997 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7998 return res;
7999
8000 /* Optimize cosh(-x) into cosh (x). */
8001 if ((narg = fold_strip_sign_ops (arg)))
8002 return build_call_expr_loc (loc, fndecl, 1, narg);
8003 }
8004
8005 return NULL_TREE;
8006 }
8007
8008 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
8009 argument ARG. TYPE is the type of the return value. Return
8010 NULL_TREE if no simplification can be made. */
8011
8012 static tree
8013 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
8014 bool hyper)
8015 {
8016 if (validate_arg (arg, COMPLEX_TYPE)
8017 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8018 {
8019 tree tmp;
8020
8021 /* Calculate the result when the argument is a constant. */
8022 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
8023 return tmp;
8024
8025 /* Optimize fn(-x) into fn(x). */
8026 if ((tmp = fold_strip_sign_ops (arg)))
8027 return build_call_expr_loc (loc, fndecl, 1, tmp);
8028 }
8029
8030 return NULL_TREE;
8031 }
8032
8033 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
8034 Return NULL_TREE if no simplification can be made. */
8035
8036 static tree
8037 fold_builtin_tan (tree arg, tree type)
8038 {
8039 enum built_in_function fcode;
8040 tree res;
8041
8042 if (!validate_arg (arg, REAL_TYPE))
8043 return NULL_TREE;
8044
8045 /* Calculate the result when the argument is a constant. */
8046 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
8047 return res;
8048
8049 /* Optimize tan(atan(x)) = x. */
8050 fcode = builtin_mathfn_code (arg);
8051 if (flag_unsafe_math_optimizations
8052 && (fcode == BUILT_IN_ATAN
8053 || fcode == BUILT_IN_ATANF
8054 || fcode == BUILT_IN_ATANL))
8055 return CALL_EXPR_ARG (arg, 0);
8056
8057 return NULL_TREE;
8058 }
8059
8060 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8061 NULL_TREE if no simplification can be made. */
8062
8063 static tree
8064 fold_builtin_sincos (location_t loc,
8065 tree arg0, tree arg1, tree arg2)
8066 {
8067 tree type;
8068 tree res, fn, call;
8069
8070 if (!validate_arg (arg0, REAL_TYPE)
8071 || !validate_arg (arg1, POINTER_TYPE)
8072 || !validate_arg (arg2, POINTER_TYPE))
8073 return NULL_TREE;
8074
8075 type = TREE_TYPE (arg0);
8076
8077 /* Calculate the result when the argument is a constant. */
8078 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
8079 return res;
8080
8081 /* Canonicalize sincos to cexpi. */
8082 if (!targetm.libc_has_function (function_c99_math_complex))
8083 return NULL_TREE;
8084 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
8085 if (!fn)
8086 return NULL_TREE;
8087
8088 call = build_call_expr_loc (loc, fn, 1, arg0);
8089 call = builtin_save_expr (call);
8090
8091 return build2 (COMPOUND_EXPR, void_type_node,
8092 build2 (MODIFY_EXPR, void_type_node,
8093 build_fold_indirect_ref_loc (loc, arg1),
8094 build1 (IMAGPART_EXPR, type, call)),
8095 build2 (MODIFY_EXPR, void_type_node,
8096 build_fold_indirect_ref_loc (loc, arg2),
8097 build1 (REALPART_EXPR, type, call)));
8098 }
8099
8100 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
8101 NULL_TREE if no simplification can be made. */
8102
8103 static tree
8104 fold_builtin_cexp (location_t loc, tree arg0, tree type)
8105 {
8106 tree rtype;
8107 tree realp, imagp, ifn;
8108 tree res;
8109
8110 if (!validate_arg (arg0, COMPLEX_TYPE)
8111 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
8112 return NULL_TREE;
8113
8114 /* Calculate the result when the argument is a constant. */
8115 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8116 return res;
8117
8118 rtype = TREE_TYPE (TREE_TYPE (arg0));
8119
8120 /* In case we can figure out the real part of arg0 and it is constant zero
8121 fold to cexpi. */
8122 if (!targetm.libc_has_function (function_c99_math_complex))
8123 return NULL_TREE;
8124 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8125 if (!ifn)
8126 return NULL_TREE;
8127
8128 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
8129 && real_zerop (realp))
8130 {
8131 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
8132 return build_call_expr_loc (loc, ifn, 1, narg);
8133 }
8134
8135 /* In case we can easily decompose real and imaginary parts split cexp
8136 to exp (r) * cexpi (i). */
8137 if (flag_unsafe_math_optimizations
8138 && realp)
8139 {
8140 tree rfn, rcall, icall;
8141
8142 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8143 if (!rfn)
8144 return NULL_TREE;
8145
8146 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
8147 if (!imagp)
8148 return NULL_TREE;
8149
8150 icall = build_call_expr_loc (loc, ifn, 1, imagp);
8151 icall = builtin_save_expr (icall);
8152 rcall = build_call_expr_loc (loc, rfn, 1, realp);
8153 rcall = builtin_save_expr (rcall);
8154 return fold_build2_loc (loc, COMPLEX_EXPR, type,
8155 fold_build2_loc (loc, MULT_EXPR, rtype,
8156 rcall,
8157 fold_build1_loc (loc, REALPART_EXPR,
8158 rtype, icall)),
8159 fold_build2_loc (loc, MULT_EXPR, rtype,
8160 rcall,
8161 fold_build1_loc (loc, IMAGPART_EXPR,
8162 rtype, icall)));
8163 }
8164
8165 return NULL_TREE;
8166 }
8167
8168 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8169 Return NULL_TREE if no simplification can be made. */
8170
8171 static tree
8172 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
8173 {
8174 if (!validate_arg (arg, REAL_TYPE))
8175 return NULL_TREE;
8176
8177 /* Optimize trunc of constant value. */
8178 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8179 {
8180 REAL_VALUE_TYPE r, x;
8181 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8182
8183 x = TREE_REAL_CST (arg);
8184 real_trunc (&r, TYPE_MODE (type), &x);
8185 return build_real (type, r);
8186 }
8187
8188 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8189 }
8190
8191 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8192 Return NULL_TREE if no simplification can be made. */
8193
8194 static tree
8195 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
8196 {
8197 if (!validate_arg (arg, REAL_TYPE))
8198 return NULL_TREE;
8199
8200 /* Optimize floor of constant value. */
8201 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8202 {
8203 REAL_VALUE_TYPE x;
8204
8205 x = TREE_REAL_CST (arg);
8206 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8207 {
8208 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8209 REAL_VALUE_TYPE r;
8210
8211 real_floor (&r, TYPE_MODE (type), &x);
8212 return build_real (type, r);
8213 }
8214 }
8215
8216 /* Fold floor (x) where x is nonnegative to trunc (x). */
8217 if (tree_expr_nonnegative_p (arg))
8218 {
8219 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8220 if (truncfn)
8221 return build_call_expr_loc (loc, truncfn, 1, arg);
8222 }
8223
8224 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8225 }
8226
8227 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8228 Return NULL_TREE if no simplification can be made. */
8229
8230 static tree
8231 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
8232 {
8233 if (!validate_arg (arg, REAL_TYPE))
8234 return NULL_TREE;
8235
8236 /* Optimize ceil of constant value. */
8237 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8238 {
8239 REAL_VALUE_TYPE x;
8240
8241 x = TREE_REAL_CST (arg);
8242 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8243 {
8244 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8245 REAL_VALUE_TYPE r;
8246
8247 real_ceil (&r, TYPE_MODE (type), &x);
8248 return build_real (type, r);
8249 }
8250 }
8251
8252 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8253 }
8254
8255 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8256 Return NULL_TREE if no simplification can be made. */
8257
8258 static tree
8259 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8260 {
8261 if (!validate_arg (arg, REAL_TYPE))
8262 return NULL_TREE;
8263
8264 /* Optimize round of constant value. */
8265 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8266 {
8267 REAL_VALUE_TYPE x;
8268
8269 x = TREE_REAL_CST (arg);
8270 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8271 {
8272 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8273 REAL_VALUE_TYPE r;
8274
8275 real_round (&r, TYPE_MODE (type), &x);
8276 return build_real (type, r);
8277 }
8278 }
8279
8280 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8281 }
8282
8283 /* Fold function call to builtin lround, lroundf or lroundl (or the
8284 corresponding long long versions) and other rounding functions. ARG
8285 is the argument to the call. Return NULL_TREE if no simplification
8286 can be made. */
8287
8288 static tree
8289 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8290 {
8291 if (!validate_arg (arg, REAL_TYPE))
8292 return NULL_TREE;
8293
8294 /* Optimize lround of constant value. */
8295 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8296 {
8297 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8298
8299 if (real_isfinite (&x))
8300 {
8301 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8302 tree ftype = TREE_TYPE (arg);
8303 REAL_VALUE_TYPE r;
8304 bool fail = false;
8305
8306 switch (DECL_FUNCTION_CODE (fndecl))
8307 {
8308 CASE_FLT_FN (BUILT_IN_IFLOOR):
8309 CASE_FLT_FN (BUILT_IN_LFLOOR):
8310 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8311 real_floor (&r, TYPE_MODE (ftype), &x);
8312 break;
8313
8314 CASE_FLT_FN (BUILT_IN_ICEIL):
8315 CASE_FLT_FN (BUILT_IN_LCEIL):
8316 CASE_FLT_FN (BUILT_IN_LLCEIL):
8317 real_ceil (&r, TYPE_MODE (ftype), &x);
8318 break;
8319
8320 CASE_FLT_FN (BUILT_IN_IROUND):
8321 CASE_FLT_FN (BUILT_IN_LROUND):
8322 CASE_FLT_FN (BUILT_IN_LLROUND):
8323 real_round (&r, TYPE_MODE (ftype), &x);
8324 break;
8325
8326 default:
8327 gcc_unreachable ();
8328 }
8329
8330 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
8331 if (!fail)
8332 return wide_int_to_tree (itype, val);
8333 }
8334 }
8335
8336 switch (DECL_FUNCTION_CODE (fndecl))
8337 {
8338 CASE_FLT_FN (BUILT_IN_LFLOOR):
8339 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8340 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8341 if (tree_expr_nonnegative_p (arg))
8342 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8343 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8344 break;
8345 default:;
8346 }
8347
8348 return fold_fixed_mathfn (loc, fndecl, arg);
8349 }
8350
8351 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8352 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8353 the argument to the call. Return NULL_TREE if no simplification can
8354 be made. */
8355
8356 static tree
8357 fold_builtin_bitop (tree fndecl, tree arg)
8358 {
8359 if (!validate_arg (arg, INTEGER_TYPE))
8360 return NULL_TREE;
8361
8362 /* Optimize for constant argument. */
8363 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8364 {
8365 tree type = TREE_TYPE (arg);
8366 int result;
8367
8368 switch (DECL_FUNCTION_CODE (fndecl))
8369 {
8370 CASE_INT_FN (BUILT_IN_FFS):
8371 result = wi::ffs (arg);
8372 break;
8373
8374 CASE_INT_FN (BUILT_IN_CLZ):
8375 if (wi::ne_p (arg, 0))
8376 result = wi::clz (arg);
8377 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8378 result = TYPE_PRECISION (type);
8379 break;
8380
8381 CASE_INT_FN (BUILT_IN_CTZ):
8382 if (wi::ne_p (arg, 0))
8383 result = wi::ctz (arg);
8384 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8385 result = TYPE_PRECISION (type);
8386 break;
8387
8388 CASE_INT_FN (BUILT_IN_CLRSB):
8389 result = wi::clrsb (arg);
8390 break;
8391
8392 CASE_INT_FN (BUILT_IN_POPCOUNT):
8393 result = wi::popcount (arg);
8394 break;
8395
8396 CASE_INT_FN (BUILT_IN_PARITY):
8397 result = wi::parity (arg);
8398 break;
8399
8400 default:
8401 gcc_unreachable ();
8402 }
8403
8404 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8405 }
8406
8407 return NULL_TREE;
8408 }
8409
8410 /* Fold function call to builtin_bswap and the short, long and long long
8411 variants. Return NULL_TREE if no simplification can be made. */
8412 static tree
8413 fold_builtin_bswap (tree fndecl, tree arg)
8414 {
8415 if (! validate_arg (arg, INTEGER_TYPE))
8416 return NULL_TREE;
8417
8418 /* Optimize constant value. */
8419 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8420 {
8421 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8422
8423 switch (DECL_FUNCTION_CODE (fndecl))
8424 {
8425 case BUILT_IN_BSWAP16:
8426 case BUILT_IN_BSWAP32:
8427 case BUILT_IN_BSWAP64:
8428 {
8429 signop sgn = TYPE_SIGN (type);
8430 tree result =
8431 wide_int_to_tree (type,
8432 wide_int::from (arg, TYPE_PRECISION (type),
8433 sgn).bswap ());
8434 return result;
8435 }
8436 default:
8437 gcc_unreachable ();
8438 }
8439 }
8440
8441 return NULL_TREE;
8442 }
8443
8444 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8445 NULL_TREE if no simplification can be made. */
8446
8447 static tree
8448 fold_builtin_hypot (location_t loc, tree fndecl,
8449 tree arg0, tree arg1, tree type)
8450 {
8451 tree res, narg0, narg1;
8452
8453 if (!validate_arg (arg0, REAL_TYPE)
8454 || !validate_arg (arg1, REAL_TYPE))
8455 return NULL_TREE;
8456
8457 /* Calculate the result when the argument is a constant. */
8458 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8459 return res;
8460
8461 /* If either argument to hypot has a negate or abs, strip that off.
8462 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8463 narg0 = fold_strip_sign_ops (arg0);
8464 narg1 = fold_strip_sign_ops (arg1);
8465 if (narg0 || narg1)
8466 {
8467 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8468 narg1 ? narg1 : arg1);
8469 }
8470
8471 /* If either argument is zero, hypot is fabs of the other. */
8472 if (real_zerop (arg0))
8473 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8474 else if (real_zerop (arg1))
8475 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8476
8477 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8478 if (flag_unsafe_math_optimizations
8479 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8480 {
8481 const REAL_VALUE_TYPE sqrt2_trunc
8482 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8483 return fold_build2_loc (loc, MULT_EXPR, type,
8484 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8485 build_real (type, sqrt2_trunc));
8486 }
8487
8488 return NULL_TREE;
8489 }
8490
8491
8492 /* Fold a builtin function call to pow, powf, or powl. Return
8493 NULL_TREE if no simplification can be made. */
8494 static tree
8495 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8496 {
8497 tree res;
8498
8499 if (!validate_arg (arg0, REAL_TYPE)
8500 || !validate_arg (arg1, REAL_TYPE))
8501 return NULL_TREE;
8502
8503 /* Calculate the result when the argument is a constant. */
8504 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8505 return res;
8506
8507 /* Optimize pow(1.0,y) = 1.0. */
8508 if (real_onep (arg0))
8509 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8510
8511 if (TREE_CODE (arg1) == REAL_CST
8512 && !TREE_OVERFLOW (arg1))
8513 {
8514 REAL_VALUE_TYPE cint;
8515 REAL_VALUE_TYPE c;
8516 HOST_WIDE_INT n;
8517
8518 c = TREE_REAL_CST (arg1);
8519
8520 /* Optimize pow(x,0.0) = 1.0. */
8521 if (REAL_VALUES_EQUAL (c, dconst0))
8522 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8523 arg0);
8524
8525 /* Optimize pow(x,1.0) = x. */
8526 if (REAL_VALUES_EQUAL (c, dconst1))
8527 return arg0;
8528
8529 /* Optimize pow(x,-1.0) = 1.0/x. */
8530 if (REAL_VALUES_EQUAL (c, dconstm1))
8531 return fold_build2_loc (loc, RDIV_EXPR, type,
8532 build_real (type, dconst1), arg0);
8533
8534 /* Optimize pow(x,0.5) = sqrt(x). */
8535 if (flag_unsafe_math_optimizations
8536 && REAL_VALUES_EQUAL (c, dconsthalf))
8537 {
8538 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8539
8540 if (sqrtfn != NULL_TREE)
8541 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8542 }
8543
8544 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8545 if (flag_unsafe_math_optimizations)
8546 {
8547 const REAL_VALUE_TYPE dconstroot
8548 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8549
8550 if (REAL_VALUES_EQUAL (c, dconstroot))
8551 {
8552 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8553 if (cbrtfn != NULL_TREE)
8554 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8555 }
8556 }
8557
8558 /* Check for an integer exponent. */
8559 n = real_to_integer (&c);
8560 real_from_integer (&cint, VOIDmode, n, SIGNED);
8561 if (real_identical (&c, &cint))
8562 {
8563 /* Attempt to evaluate pow at compile-time, unless this should
8564 raise an exception. */
8565 if (TREE_CODE (arg0) == REAL_CST
8566 && !TREE_OVERFLOW (arg0)
8567 && (n > 0
8568 || (!flag_trapping_math && !flag_errno_math)
8569 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8570 {
8571 REAL_VALUE_TYPE x;
8572 bool inexact;
8573
8574 x = TREE_REAL_CST (arg0);
8575 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8576 if (flag_unsafe_math_optimizations || !inexact)
8577 return build_real (type, x);
8578 }
8579
8580 /* Strip sign ops from even integer powers. */
8581 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8582 {
8583 tree narg0 = fold_strip_sign_ops (arg0);
8584 if (narg0)
8585 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8586 }
8587 }
8588 }
8589
8590 if (flag_unsafe_math_optimizations)
8591 {
8592 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8593
8594 /* Optimize pow(expN(x),y) = expN(x*y). */
8595 if (BUILTIN_EXPONENT_P (fcode))
8596 {
8597 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8598 tree arg = CALL_EXPR_ARG (arg0, 0);
8599 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8600 return build_call_expr_loc (loc, expfn, 1, arg);
8601 }
8602
8603 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8604 if (BUILTIN_SQRT_P (fcode))
8605 {
8606 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8607 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8608 build_real (type, dconsthalf));
8609 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8610 }
8611
8612 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8613 if (BUILTIN_CBRT_P (fcode))
8614 {
8615 tree arg = CALL_EXPR_ARG (arg0, 0);
8616 if (tree_expr_nonnegative_p (arg))
8617 {
8618 const REAL_VALUE_TYPE dconstroot
8619 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8620 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8621 build_real (type, dconstroot));
8622 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8623 }
8624 }
8625
8626 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8627 if (fcode == BUILT_IN_POW
8628 || fcode == BUILT_IN_POWF
8629 || fcode == BUILT_IN_POWL)
8630 {
8631 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8632 if (tree_expr_nonnegative_p (arg00))
8633 {
8634 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8635 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8636 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8637 }
8638 }
8639 }
8640
8641 return NULL_TREE;
8642 }
8643
8644 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8645 Return NULL_TREE if no simplification can be made. */
8646 static tree
8647 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8648 tree arg0, tree arg1, tree type)
8649 {
8650 if (!validate_arg (arg0, REAL_TYPE)
8651 || !validate_arg (arg1, INTEGER_TYPE))
8652 return NULL_TREE;
8653
8654 /* Optimize pow(1.0,y) = 1.0. */
8655 if (real_onep (arg0))
8656 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8657
8658 if (tree_fits_shwi_p (arg1))
8659 {
8660 HOST_WIDE_INT c = tree_to_shwi (arg1);
8661
8662 /* Evaluate powi at compile-time. */
8663 if (TREE_CODE (arg0) == REAL_CST
8664 && !TREE_OVERFLOW (arg0))
8665 {
8666 REAL_VALUE_TYPE x;
8667 x = TREE_REAL_CST (arg0);
8668 real_powi (&x, TYPE_MODE (type), &x, c);
8669 return build_real (type, x);
8670 }
8671
8672 /* Optimize pow(x,0) = 1.0. */
8673 if (c == 0)
8674 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8675 arg0);
8676
8677 /* Optimize pow(x,1) = x. */
8678 if (c == 1)
8679 return arg0;
8680
8681 /* Optimize pow(x,-1) = 1.0/x. */
8682 if (c == -1)
8683 return fold_build2_loc (loc, RDIV_EXPR, type,
8684 build_real (type, dconst1), arg0);
8685 }
8686
8687 return NULL_TREE;
8688 }
8689
8690 /* A subroutine of fold_builtin to fold the various exponent
8691 functions. Return NULL_TREE if no simplification can be made.
8692 FUNC is the corresponding MPFR exponent function. */
8693
8694 static tree
8695 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8696 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8697 {
8698 if (validate_arg (arg, REAL_TYPE))
8699 {
8700 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8701 tree res;
8702
8703 /* Calculate the result when the argument is a constant. */
8704 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8705 return res;
8706
8707 /* Optimize expN(logN(x)) = x. */
8708 if (flag_unsafe_math_optimizations)
8709 {
8710 const enum built_in_function fcode = builtin_mathfn_code (arg);
8711
8712 if ((func == mpfr_exp
8713 && (fcode == BUILT_IN_LOG
8714 || fcode == BUILT_IN_LOGF
8715 || fcode == BUILT_IN_LOGL))
8716 || (func == mpfr_exp2
8717 && (fcode == BUILT_IN_LOG2
8718 || fcode == BUILT_IN_LOG2F
8719 || fcode == BUILT_IN_LOG2L))
8720 || (func == mpfr_exp10
8721 && (fcode == BUILT_IN_LOG10
8722 || fcode == BUILT_IN_LOG10F
8723 || fcode == BUILT_IN_LOG10L)))
8724 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8725 }
8726 }
8727
8728 return NULL_TREE;
8729 }
8730
8731 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8732 arguments to the call, and TYPE is its return type.
8733 Return NULL_TREE if no simplification can be made. */
8734
8735 static tree
8736 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8737 {
8738 if (!validate_arg (arg1, POINTER_TYPE)
8739 || !validate_arg (arg2, INTEGER_TYPE)
8740 || !validate_arg (len, INTEGER_TYPE))
8741 return NULL_TREE;
8742 else
8743 {
8744 const char *p1;
8745
8746 if (TREE_CODE (arg2) != INTEGER_CST
8747 || !tree_fits_uhwi_p (len))
8748 return NULL_TREE;
8749
8750 p1 = c_getstr (arg1);
8751 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8752 {
8753 char c;
8754 const char *r;
8755 tree tem;
8756
8757 if (target_char_cast (arg2, &c))
8758 return NULL_TREE;
8759
8760 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
8761
8762 if (r == NULL)
8763 return build_int_cst (TREE_TYPE (arg1), 0);
8764
8765 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8766 return fold_convert_loc (loc, type, tem);
8767 }
8768 return NULL_TREE;
8769 }
8770 }
8771
8772 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8773 Return NULL_TREE if no simplification can be made. */
8774
8775 static tree
8776 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8777 {
8778 const char *p1, *p2;
8779
8780 if (!validate_arg (arg1, POINTER_TYPE)
8781 || !validate_arg (arg2, POINTER_TYPE)
8782 || !validate_arg (len, INTEGER_TYPE))
8783 return NULL_TREE;
8784
8785 /* If the LEN parameter is zero, return zero. */
8786 if (integer_zerop (len))
8787 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8788 arg1, arg2);
8789
8790 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8791 if (operand_equal_p (arg1, arg2, 0))
8792 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8793
8794 p1 = c_getstr (arg1);
8795 p2 = c_getstr (arg2);
8796
8797 /* If all arguments are constant, and the value of len is not greater
8798 than the lengths of arg1 and arg2, evaluate at compile-time. */
8799 if (tree_fits_uhwi_p (len) && p1 && p2
8800 && compare_tree_int (len, strlen (p1) + 1) <= 0
8801 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8802 {
8803 const int r = memcmp (p1, p2, tree_to_uhwi (len));
8804
8805 if (r > 0)
8806 return integer_one_node;
8807 else if (r < 0)
8808 return integer_minus_one_node;
8809 else
8810 return integer_zero_node;
8811 }
8812
8813 /* If len parameter is one, return an expression corresponding to
8814 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8815 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8816 {
8817 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8818 tree cst_uchar_ptr_node
8819 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8820
8821 tree ind1
8822 = fold_convert_loc (loc, integer_type_node,
8823 build1 (INDIRECT_REF, cst_uchar_node,
8824 fold_convert_loc (loc,
8825 cst_uchar_ptr_node,
8826 arg1)));
8827 tree ind2
8828 = fold_convert_loc (loc, integer_type_node,
8829 build1 (INDIRECT_REF, cst_uchar_node,
8830 fold_convert_loc (loc,
8831 cst_uchar_ptr_node,
8832 arg2)));
8833 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8834 }
8835
8836 return NULL_TREE;
8837 }
8838
8839 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8840 Return NULL_TREE if no simplification can be made. */
8841
8842 static tree
8843 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8844 {
8845 const char *p1, *p2;
8846
8847 if (!validate_arg (arg1, POINTER_TYPE)
8848 || !validate_arg (arg2, POINTER_TYPE))
8849 return NULL_TREE;
8850
8851 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8852 if (operand_equal_p (arg1, arg2, 0))
8853 return integer_zero_node;
8854
8855 p1 = c_getstr (arg1);
8856 p2 = c_getstr (arg2);
8857
8858 if (p1 && p2)
8859 {
8860 const int i = strcmp (p1, p2);
8861 if (i < 0)
8862 return integer_minus_one_node;
8863 else if (i > 0)
8864 return integer_one_node;
8865 else
8866 return integer_zero_node;
8867 }
8868
8869 /* If the second arg is "", return *(const unsigned char*)arg1. */
8870 if (p2 && *p2 == '\0')
8871 {
8872 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8873 tree cst_uchar_ptr_node
8874 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8875
8876 return fold_convert_loc (loc, integer_type_node,
8877 build1 (INDIRECT_REF, cst_uchar_node,
8878 fold_convert_loc (loc,
8879 cst_uchar_ptr_node,
8880 arg1)));
8881 }
8882
8883 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8884 if (p1 && *p1 == '\0')
8885 {
8886 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8887 tree cst_uchar_ptr_node
8888 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8889
8890 tree temp
8891 = fold_convert_loc (loc, integer_type_node,
8892 build1 (INDIRECT_REF, cst_uchar_node,
8893 fold_convert_loc (loc,
8894 cst_uchar_ptr_node,
8895 arg2)));
8896 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8897 }
8898
8899 return NULL_TREE;
8900 }
8901
8902 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8903 Return NULL_TREE if no simplification can be made. */
8904
8905 static tree
8906 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8907 {
8908 const char *p1, *p2;
8909
8910 if (!validate_arg (arg1, POINTER_TYPE)
8911 || !validate_arg (arg2, POINTER_TYPE)
8912 || !validate_arg (len, INTEGER_TYPE))
8913 return NULL_TREE;
8914
8915 /* If the LEN parameter is zero, return zero. */
8916 if (integer_zerop (len))
8917 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8918 arg1, arg2);
8919
8920 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8921 if (operand_equal_p (arg1, arg2, 0))
8922 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8923
8924 p1 = c_getstr (arg1);
8925 p2 = c_getstr (arg2);
8926
8927 if (tree_fits_uhwi_p (len) && p1 && p2)
8928 {
8929 const int i = strncmp (p1, p2, tree_to_uhwi (len));
8930 if (i > 0)
8931 return integer_one_node;
8932 else if (i < 0)
8933 return integer_minus_one_node;
8934 else
8935 return integer_zero_node;
8936 }
8937
8938 /* If the second arg is "", and the length is greater than zero,
8939 return *(const unsigned char*)arg1. */
8940 if (p2 && *p2 == '\0'
8941 && TREE_CODE (len) == INTEGER_CST
8942 && tree_int_cst_sgn (len) == 1)
8943 {
8944 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8945 tree cst_uchar_ptr_node
8946 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8947
8948 return fold_convert_loc (loc, integer_type_node,
8949 build1 (INDIRECT_REF, cst_uchar_node,
8950 fold_convert_loc (loc,
8951 cst_uchar_ptr_node,
8952 arg1)));
8953 }
8954
8955 /* If the first arg is "", and the length is greater than zero,
8956 return -*(const unsigned char*)arg2. */
8957 if (p1 && *p1 == '\0'
8958 && TREE_CODE (len) == INTEGER_CST
8959 && tree_int_cst_sgn (len) == 1)
8960 {
8961 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8962 tree cst_uchar_ptr_node
8963 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8964
8965 tree temp = fold_convert_loc (loc, integer_type_node,
8966 build1 (INDIRECT_REF, cst_uchar_node,
8967 fold_convert_loc (loc,
8968 cst_uchar_ptr_node,
8969 arg2)));
8970 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8971 }
8972
8973 /* If len parameter is one, return an expression corresponding to
8974 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8975 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8976 {
8977 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8978 tree cst_uchar_ptr_node
8979 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8980
8981 tree ind1 = fold_convert_loc (loc, integer_type_node,
8982 build1 (INDIRECT_REF, cst_uchar_node,
8983 fold_convert_loc (loc,
8984 cst_uchar_ptr_node,
8985 arg1)));
8986 tree ind2 = fold_convert_loc (loc, integer_type_node,
8987 build1 (INDIRECT_REF, cst_uchar_node,
8988 fold_convert_loc (loc,
8989 cst_uchar_ptr_node,
8990 arg2)));
8991 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8992 }
8993
8994 return NULL_TREE;
8995 }
8996
8997 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8998 ARG. Return NULL_TREE if no simplification can be made. */
8999
9000 static tree
9001 fold_builtin_signbit (location_t loc, tree arg, tree type)
9002 {
9003 if (!validate_arg (arg, REAL_TYPE))
9004 return NULL_TREE;
9005
9006 /* If ARG is a compile-time constant, determine the result. */
9007 if (TREE_CODE (arg) == REAL_CST
9008 && !TREE_OVERFLOW (arg))
9009 {
9010 REAL_VALUE_TYPE c;
9011
9012 c = TREE_REAL_CST (arg);
9013 return (REAL_VALUE_NEGATIVE (c)
9014 ? build_one_cst (type)
9015 : build_zero_cst (type));
9016 }
9017
9018 /* If ARG is non-negative, the result is always zero. */
9019 if (tree_expr_nonnegative_p (arg))
9020 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9021
9022 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9023 if (!HONOR_SIGNED_ZEROS (arg))
9024 return fold_convert (type,
9025 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9026 build_real (TREE_TYPE (arg), dconst0)));
9027
9028 return NULL_TREE;
9029 }
9030
9031 /* Fold function call to builtin copysign, copysignf or copysignl with
9032 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9033 be made. */
9034
9035 static tree
9036 fold_builtin_copysign (location_t loc, tree fndecl,
9037 tree arg1, tree arg2, tree type)
9038 {
9039 tree tem;
9040
9041 if (!validate_arg (arg1, REAL_TYPE)
9042 || !validate_arg (arg2, REAL_TYPE))
9043 return NULL_TREE;
9044
9045 /* copysign(X,X) is X. */
9046 if (operand_equal_p (arg1, arg2, 0))
9047 return fold_convert_loc (loc, type, arg1);
9048
9049 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9050 if (TREE_CODE (arg1) == REAL_CST
9051 && TREE_CODE (arg2) == REAL_CST
9052 && !TREE_OVERFLOW (arg1)
9053 && !TREE_OVERFLOW (arg2))
9054 {
9055 REAL_VALUE_TYPE c1, c2;
9056
9057 c1 = TREE_REAL_CST (arg1);
9058 c2 = TREE_REAL_CST (arg2);
9059 /* c1.sign := c2.sign. */
9060 real_copysign (&c1, &c2);
9061 return build_real (type, c1);
9062 }
9063
9064 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9065 Remember to evaluate Y for side-effects. */
9066 if (tree_expr_nonnegative_p (arg2))
9067 return omit_one_operand_loc (loc, type,
9068 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9069 arg2);
9070
9071 /* Strip sign changing operations for the first argument. */
9072 tem = fold_strip_sign_ops (arg1);
9073 if (tem)
9074 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9075
9076 return NULL_TREE;
9077 }
9078
9079 /* Fold a call to builtin isascii with argument ARG. */
9080
9081 static tree
9082 fold_builtin_isascii (location_t loc, tree arg)
9083 {
9084 if (!validate_arg (arg, INTEGER_TYPE))
9085 return NULL_TREE;
9086 else
9087 {
9088 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9089 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9090 build_int_cst (integer_type_node,
9091 ~ (unsigned HOST_WIDE_INT) 0x7f));
9092 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9093 arg, integer_zero_node);
9094 }
9095 }
9096
9097 /* Fold a call to builtin toascii with argument ARG. */
9098
9099 static tree
9100 fold_builtin_toascii (location_t loc, tree arg)
9101 {
9102 if (!validate_arg (arg, INTEGER_TYPE))
9103 return NULL_TREE;
9104
9105 /* Transform toascii(c) -> (c & 0x7f). */
9106 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9107 build_int_cst (integer_type_node, 0x7f));
9108 }
9109
9110 /* Fold a call to builtin isdigit with argument ARG. */
9111
9112 static tree
9113 fold_builtin_isdigit (location_t loc, tree arg)
9114 {
9115 if (!validate_arg (arg, INTEGER_TYPE))
9116 return NULL_TREE;
9117 else
9118 {
9119 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9120 /* According to the C standard, isdigit is unaffected by locale.
9121 However, it definitely is affected by the target character set. */
9122 unsigned HOST_WIDE_INT target_digit0
9123 = lang_hooks.to_target_charset ('0');
9124
9125 if (target_digit0 == 0)
9126 return NULL_TREE;
9127
9128 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9129 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9130 build_int_cst (unsigned_type_node, target_digit0));
9131 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9132 build_int_cst (unsigned_type_node, 9));
9133 }
9134 }
9135
9136 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9137
9138 static tree
9139 fold_builtin_fabs (location_t loc, tree arg, tree type)
9140 {
9141 if (!validate_arg (arg, REAL_TYPE))
9142 return NULL_TREE;
9143
9144 arg = fold_convert_loc (loc, type, arg);
9145 if (TREE_CODE (arg) == REAL_CST)
9146 return fold_abs_const (arg, type);
9147 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9148 }
9149
9150 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9151
9152 static tree
9153 fold_builtin_abs (location_t loc, tree arg, tree type)
9154 {
9155 if (!validate_arg (arg, INTEGER_TYPE))
9156 return NULL_TREE;
9157
9158 arg = fold_convert_loc (loc, type, arg);
9159 if (TREE_CODE (arg) == INTEGER_CST)
9160 return fold_abs_const (arg, type);
9161 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9162 }
9163
9164 /* Fold a fma operation with arguments ARG[012]. */
9165
9166 tree
9167 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9168 tree type, tree arg0, tree arg1, tree arg2)
9169 {
9170 if (TREE_CODE (arg0) == REAL_CST
9171 && TREE_CODE (arg1) == REAL_CST
9172 && TREE_CODE (arg2) == REAL_CST)
9173 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9174
9175 return NULL_TREE;
9176 }
9177
9178 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9179
9180 static tree
9181 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9182 {
9183 if (validate_arg (arg0, REAL_TYPE)
9184 && validate_arg (arg1, REAL_TYPE)
9185 && validate_arg (arg2, REAL_TYPE))
9186 {
9187 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9188 if (tem)
9189 return tem;
9190
9191 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9192 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9193 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9194 }
9195 return NULL_TREE;
9196 }
9197
9198 /* Fold a call to builtin fmin or fmax. */
9199
9200 static tree
9201 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9202 tree type, bool max)
9203 {
9204 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9205 {
9206 /* Calculate the result when the argument is a constant. */
9207 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9208
9209 if (res)
9210 return res;
9211
9212 /* If either argument is NaN, return the other one. Avoid the
9213 transformation if we get (and honor) a signalling NaN. Using
9214 omit_one_operand() ensures we create a non-lvalue. */
9215 if (TREE_CODE (arg0) == REAL_CST
9216 && real_isnan (&TREE_REAL_CST (arg0))
9217 && (! HONOR_SNANS (arg0)
9218 || ! TREE_REAL_CST (arg0).signalling))
9219 return omit_one_operand_loc (loc, type, arg1, arg0);
9220 if (TREE_CODE (arg1) == REAL_CST
9221 && real_isnan (&TREE_REAL_CST (arg1))
9222 && (! HONOR_SNANS (arg1)
9223 || ! TREE_REAL_CST (arg1).signalling))
9224 return omit_one_operand_loc (loc, type, arg0, arg1);
9225
9226 /* Transform fmin/fmax(x,x) -> x. */
9227 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9228 return omit_one_operand_loc (loc, type, arg0, arg1);
9229
9230 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9231 functions to return the numeric arg if the other one is NaN.
9232 These tree codes don't honor that, so only transform if
9233 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9234 handled, so we don't have to worry about it either. */
9235 if (flag_finite_math_only)
9236 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9237 fold_convert_loc (loc, type, arg0),
9238 fold_convert_loc (loc, type, arg1));
9239 }
9240 return NULL_TREE;
9241 }
9242
9243 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9244
9245 static tree
9246 fold_builtin_carg (location_t loc, tree arg, tree type)
9247 {
9248 if (validate_arg (arg, COMPLEX_TYPE)
9249 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9250 {
9251 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9252
9253 if (atan2_fn)
9254 {
9255 tree new_arg = builtin_save_expr (arg);
9256 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9257 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9258 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9259 }
9260 }
9261
9262 return NULL_TREE;
9263 }
9264
9265 /* Fold a call to builtin logb/ilogb. */
9266
9267 static tree
9268 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9269 {
9270 if (! validate_arg (arg, REAL_TYPE))
9271 return NULL_TREE;
9272
9273 STRIP_NOPS (arg);
9274
9275 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9276 {
9277 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9278
9279 switch (value->cl)
9280 {
9281 case rvc_nan:
9282 case rvc_inf:
9283 /* If arg is Inf or NaN and we're logb, return it. */
9284 if (TREE_CODE (rettype) == REAL_TYPE)
9285 {
9286 /* For logb(-Inf) we have to return +Inf. */
9287 if (real_isinf (value) && real_isneg (value))
9288 {
9289 REAL_VALUE_TYPE tem;
9290 real_inf (&tem);
9291 return build_real (rettype, tem);
9292 }
9293 return fold_convert_loc (loc, rettype, arg);
9294 }
9295 /* Fall through... */
9296 case rvc_zero:
9297 /* Zero may set errno and/or raise an exception for logb, also
9298 for ilogb we don't know FP_ILOGB0. */
9299 return NULL_TREE;
9300 case rvc_normal:
9301 /* For normal numbers, proceed iff radix == 2. In GCC,
9302 normalized significands are in the range [0.5, 1.0). We
9303 want the exponent as if they were [1.0, 2.0) so get the
9304 exponent and subtract 1. */
9305 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9306 return fold_convert_loc (loc, rettype,
9307 build_int_cst (integer_type_node,
9308 REAL_EXP (value)-1));
9309 break;
9310 }
9311 }
9312
9313 return NULL_TREE;
9314 }
9315
9316 /* Fold a call to builtin significand, if radix == 2. */
9317
9318 static tree
9319 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9320 {
9321 if (! validate_arg (arg, REAL_TYPE))
9322 return NULL_TREE;
9323
9324 STRIP_NOPS (arg);
9325
9326 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9327 {
9328 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9329
9330 switch (value->cl)
9331 {
9332 case rvc_zero:
9333 case rvc_nan:
9334 case rvc_inf:
9335 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9336 return fold_convert_loc (loc, rettype, arg);
9337 case rvc_normal:
9338 /* For normal numbers, proceed iff radix == 2. */
9339 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9340 {
9341 REAL_VALUE_TYPE result = *value;
9342 /* In GCC, normalized significands are in the range [0.5,
9343 1.0). We want them to be [1.0, 2.0) so set the
9344 exponent to 1. */
9345 SET_REAL_EXP (&result, 1);
9346 return build_real (rettype, result);
9347 }
9348 break;
9349 }
9350 }
9351
9352 return NULL_TREE;
9353 }
9354
9355 /* Fold a call to builtin frexp, we can assume the base is 2. */
9356
9357 static tree
9358 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9359 {
9360 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9361 return NULL_TREE;
9362
9363 STRIP_NOPS (arg0);
9364
9365 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9366 return NULL_TREE;
9367
9368 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9369
9370 /* Proceed if a valid pointer type was passed in. */
9371 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9372 {
9373 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9374 tree frac, exp;
9375
9376 switch (value->cl)
9377 {
9378 case rvc_zero:
9379 /* For +-0, return (*exp = 0, +-0). */
9380 exp = integer_zero_node;
9381 frac = arg0;
9382 break;
9383 case rvc_nan:
9384 case rvc_inf:
9385 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9386 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9387 case rvc_normal:
9388 {
9389 /* Since the frexp function always expects base 2, and in
9390 GCC normalized significands are already in the range
9391 [0.5, 1.0), we have exactly what frexp wants. */
9392 REAL_VALUE_TYPE frac_rvt = *value;
9393 SET_REAL_EXP (&frac_rvt, 0);
9394 frac = build_real (rettype, frac_rvt);
9395 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9396 }
9397 break;
9398 default:
9399 gcc_unreachable ();
9400 }
9401
9402 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9403 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9404 TREE_SIDE_EFFECTS (arg1) = 1;
9405 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9406 }
9407
9408 return NULL_TREE;
9409 }
9410
9411 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9412 then we can assume the base is two. If it's false, then we have to
9413 check the mode of the TYPE parameter in certain cases. */
9414
9415 static tree
9416 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9417 tree type, bool ldexp)
9418 {
9419 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9420 {
9421 STRIP_NOPS (arg0);
9422 STRIP_NOPS (arg1);
9423
9424 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9425 if (real_zerop (arg0) || integer_zerop (arg1)
9426 || (TREE_CODE (arg0) == REAL_CST
9427 && !real_isfinite (&TREE_REAL_CST (arg0))))
9428 return omit_one_operand_loc (loc, type, arg0, arg1);
9429
9430 /* If both arguments are constant, then try to evaluate it. */
9431 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9432 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9433 && tree_fits_shwi_p (arg1))
9434 {
9435 /* Bound the maximum adjustment to twice the range of the
9436 mode's valid exponents. Use abs to ensure the range is
9437 positive as a sanity check. */
9438 const long max_exp_adj = 2 *
9439 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9440 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9441
9442 /* Get the user-requested adjustment. */
9443 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9444
9445 /* The requested adjustment must be inside this range. This
9446 is a preliminary cap to avoid things like overflow, we
9447 may still fail to compute the result for other reasons. */
9448 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9449 {
9450 REAL_VALUE_TYPE initial_result;
9451
9452 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9453
9454 /* Ensure we didn't overflow. */
9455 if (! real_isinf (&initial_result))
9456 {
9457 const REAL_VALUE_TYPE trunc_result
9458 = real_value_truncate (TYPE_MODE (type), initial_result);
9459
9460 /* Only proceed if the target mode can hold the
9461 resulting value. */
9462 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9463 return build_real (type, trunc_result);
9464 }
9465 }
9466 }
9467 }
9468
9469 return NULL_TREE;
9470 }
9471
9472 /* Fold a call to builtin modf. */
9473
9474 static tree
9475 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9476 {
9477 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9478 return NULL_TREE;
9479
9480 STRIP_NOPS (arg0);
9481
9482 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9483 return NULL_TREE;
9484
9485 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9486
9487 /* Proceed if a valid pointer type was passed in. */
9488 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9489 {
9490 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9491 REAL_VALUE_TYPE trunc, frac;
9492
9493 switch (value->cl)
9494 {
9495 case rvc_nan:
9496 case rvc_zero:
9497 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9498 trunc = frac = *value;
9499 break;
9500 case rvc_inf:
9501 /* For +-Inf, return (*arg1 = arg0, +-0). */
9502 frac = dconst0;
9503 frac.sign = value->sign;
9504 trunc = *value;
9505 break;
9506 case rvc_normal:
9507 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9508 real_trunc (&trunc, VOIDmode, value);
9509 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9510 /* If the original number was negative and already
9511 integral, then the fractional part is -0.0. */
9512 if (value->sign && frac.cl == rvc_zero)
9513 frac.sign = value->sign;
9514 break;
9515 }
9516
9517 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9518 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9519 build_real (rettype, trunc));
9520 TREE_SIDE_EFFECTS (arg1) = 1;
9521 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9522 build_real (rettype, frac));
9523 }
9524
9525 return NULL_TREE;
9526 }
9527
9528 /* Given a location LOC, an interclass builtin function decl FNDECL
9529 and its single argument ARG, return an folded expression computing
9530 the same, or NULL_TREE if we either couldn't or didn't want to fold
9531 (the latter happen if there's an RTL instruction available). */
9532
9533 static tree
9534 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9535 {
9536 machine_mode mode;
9537
9538 if (!validate_arg (arg, REAL_TYPE))
9539 return NULL_TREE;
9540
9541 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9542 return NULL_TREE;
9543
9544 mode = TYPE_MODE (TREE_TYPE (arg));
9545
9546 /* If there is no optab, try generic code. */
9547 switch (DECL_FUNCTION_CODE (fndecl))
9548 {
9549 tree result;
9550
9551 CASE_FLT_FN (BUILT_IN_ISINF):
9552 {
9553 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9554 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9555 tree const type = TREE_TYPE (arg);
9556 REAL_VALUE_TYPE r;
9557 char buf[128];
9558
9559 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9560 real_from_string (&r, buf);
9561 result = build_call_expr (isgr_fn, 2,
9562 fold_build1_loc (loc, ABS_EXPR, type, arg),
9563 build_real (type, r));
9564 return result;
9565 }
9566 CASE_FLT_FN (BUILT_IN_FINITE):
9567 case BUILT_IN_ISFINITE:
9568 {
9569 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9570 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9571 tree const type = TREE_TYPE (arg);
9572 REAL_VALUE_TYPE r;
9573 char buf[128];
9574
9575 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9576 real_from_string (&r, buf);
9577 result = build_call_expr (isle_fn, 2,
9578 fold_build1_loc (loc, ABS_EXPR, type, arg),
9579 build_real (type, r));
9580 /*result = fold_build2_loc (loc, UNGT_EXPR,
9581 TREE_TYPE (TREE_TYPE (fndecl)),
9582 fold_build1_loc (loc, ABS_EXPR, type, arg),
9583 build_real (type, r));
9584 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9585 TREE_TYPE (TREE_TYPE (fndecl)),
9586 result);*/
9587 return result;
9588 }
9589 case BUILT_IN_ISNORMAL:
9590 {
9591 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9592 islessequal(fabs(x),DBL_MAX). */
9593 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9594 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9595 tree const type = TREE_TYPE (arg);
9596 REAL_VALUE_TYPE rmax, rmin;
9597 char buf[128];
9598
9599 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9600 real_from_string (&rmax, buf);
9601 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9602 real_from_string (&rmin, buf);
9603 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9604 result = build_call_expr (isle_fn, 2, arg,
9605 build_real (type, rmax));
9606 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9607 build_call_expr (isge_fn, 2, arg,
9608 build_real (type, rmin)));
9609 return result;
9610 }
9611 default:
9612 break;
9613 }
9614
9615 return NULL_TREE;
9616 }
9617
9618 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9619 ARG is the argument for the call. */
9620
9621 static tree
9622 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9623 {
9624 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9625 REAL_VALUE_TYPE r;
9626
9627 if (!validate_arg (arg, REAL_TYPE))
9628 return NULL_TREE;
9629
9630 switch (builtin_index)
9631 {
9632 case BUILT_IN_ISINF:
9633 if (!HONOR_INFINITIES (arg))
9634 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9635
9636 if (TREE_CODE (arg) == REAL_CST)
9637 {
9638 r = TREE_REAL_CST (arg);
9639 if (real_isinf (&r))
9640 return real_compare (GT_EXPR, &r, &dconst0)
9641 ? integer_one_node : integer_minus_one_node;
9642 else
9643 return integer_zero_node;
9644 }
9645
9646 return NULL_TREE;
9647
9648 case BUILT_IN_ISINF_SIGN:
9649 {
9650 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9651 /* In a boolean context, GCC will fold the inner COND_EXPR to
9652 1. So e.g. "if (isinf_sign(x))" would be folded to just
9653 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9654 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9655 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9656 tree tmp = NULL_TREE;
9657
9658 arg = builtin_save_expr (arg);
9659
9660 if (signbit_fn && isinf_fn)
9661 {
9662 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9663 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9664
9665 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9666 signbit_call, integer_zero_node);
9667 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9668 isinf_call, integer_zero_node);
9669
9670 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9671 integer_minus_one_node, integer_one_node);
9672 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9673 isinf_call, tmp,
9674 integer_zero_node);
9675 }
9676
9677 return tmp;
9678 }
9679
9680 case BUILT_IN_ISFINITE:
9681 if (!HONOR_NANS (arg)
9682 && !HONOR_INFINITIES (arg))
9683 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9684
9685 if (TREE_CODE (arg) == REAL_CST)
9686 {
9687 r = TREE_REAL_CST (arg);
9688 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9689 }
9690
9691 return NULL_TREE;
9692
9693 case BUILT_IN_ISNAN:
9694 if (!HONOR_NANS (arg))
9695 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9696
9697 if (TREE_CODE (arg) == REAL_CST)
9698 {
9699 r = TREE_REAL_CST (arg);
9700 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9701 }
9702
9703 arg = builtin_save_expr (arg);
9704 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9705
9706 default:
9707 gcc_unreachable ();
9708 }
9709 }
9710
9711 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9712 This builtin will generate code to return the appropriate floating
9713 point classification depending on the value of the floating point
9714 number passed in. The possible return values must be supplied as
9715 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9716 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9717 one floating point argument which is "type generic". */
9718
9719 static tree
9720 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9721 {
9722 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9723 arg, type, res, tmp;
9724 machine_mode mode;
9725 REAL_VALUE_TYPE r;
9726 char buf[128];
9727
9728 /* Verify the required arguments in the original call. */
9729 if (nargs != 6
9730 || !validate_arg (args[0], INTEGER_TYPE)
9731 || !validate_arg (args[1], INTEGER_TYPE)
9732 || !validate_arg (args[2], INTEGER_TYPE)
9733 || !validate_arg (args[3], INTEGER_TYPE)
9734 || !validate_arg (args[4], INTEGER_TYPE)
9735 || !validate_arg (args[5], REAL_TYPE))
9736 return NULL_TREE;
9737
9738 fp_nan = args[0];
9739 fp_infinite = args[1];
9740 fp_normal = args[2];
9741 fp_subnormal = args[3];
9742 fp_zero = args[4];
9743 arg = args[5];
9744 type = TREE_TYPE (arg);
9745 mode = TYPE_MODE (type);
9746 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9747
9748 /* fpclassify(x) ->
9749 isnan(x) ? FP_NAN :
9750 (fabs(x) == Inf ? FP_INFINITE :
9751 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9752 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9753
9754 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9755 build_real (type, dconst0));
9756 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9757 tmp, fp_zero, fp_subnormal);
9758
9759 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9760 real_from_string (&r, buf);
9761 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9762 arg, build_real (type, r));
9763 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9764
9765 if (HONOR_INFINITIES (mode))
9766 {
9767 real_inf (&r);
9768 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9769 build_real (type, r));
9770 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9771 fp_infinite, res);
9772 }
9773
9774 if (HONOR_NANS (mode))
9775 {
9776 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9777 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9778 }
9779
9780 return res;
9781 }
9782
9783 /* Fold a call to an unordered comparison function such as
9784 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9785 being called and ARG0 and ARG1 are the arguments for the call.
9786 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9787 the opposite of the desired result. UNORDERED_CODE is used
9788 for modes that can hold NaNs and ORDERED_CODE is used for
9789 the rest. */
9790
9791 static tree
9792 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9793 enum tree_code unordered_code,
9794 enum tree_code ordered_code)
9795 {
9796 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9797 enum tree_code code;
9798 tree type0, type1;
9799 enum tree_code code0, code1;
9800 tree cmp_type = NULL_TREE;
9801
9802 type0 = TREE_TYPE (arg0);
9803 type1 = TREE_TYPE (arg1);
9804
9805 code0 = TREE_CODE (type0);
9806 code1 = TREE_CODE (type1);
9807
9808 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9809 /* Choose the wider of two real types. */
9810 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9811 ? type0 : type1;
9812 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9813 cmp_type = type0;
9814 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9815 cmp_type = type1;
9816
9817 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9818 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9819
9820 if (unordered_code == UNORDERED_EXPR)
9821 {
9822 if (!HONOR_NANS (arg0))
9823 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9824 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9825 }
9826
9827 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9828 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9829 fold_build2_loc (loc, code, type, arg0, arg1));
9830 }
9831
9832 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9833 arithmetics if it can never overflow, or into internal functions that
9834 return both result of arithmetics and overflowed boolean flag in
9835 a complex integer result, or some other check for overflow. */
9836
9837 static tree
9838 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9839 tree arg0, tree arg1, tree arg2)
9840 {
9841 enum internal_fn ifn = IFN_LAST;
9842 tree type = TREE_TYPE (TREE_TYPE (arg2));
9843 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9844 switch (fcode)
9845 {
9846 case BUILT_IN_ADD_OVERFLOW:
9847 case BUILT_IN_SADD_OVERFLOW:
9848 case BUILT_IN_SADDL_OVERFLOW:
9849 case BUILT_IN_SADDLL_OVERFLOW:
9850 case BUILT_IN_UADD_OVERFLOW:
9851 case BUILT_IN_UADDL_OVERFLOW:
9852 case BUILT_IN_UADDLL_OVERFLOW:
9853 ifn = IFN_ADD_OVERFLOW;
9854 break;
9855 case BUILT_IN_SUB_OVERFLOW:
9856 case BUILT_IN_SSUB_OVERFLOW:
9857 case BUILT_IN_SSUBL_OVERFLOW:
9858 case BUILT_IN_SSUBLL_OVERFLOW:
9859 case BUILT_IN_USUB_OVERFLOW:
9860 case BUILT_IN_USUBL_OVERFLOW:
9861 case BUILT_IN_USUBLL_OVERFLOW:
9862 ifn = IFN_SUB_OVERFLOW;
9863 break;
9864 case BUILT_IN_MUL_OVERFLOW:
9865 case BUILT_IN_SMUL_OVERFLOW:
9866 case BUILT_IN_SMULL_OVERFLOW:
9867 case BUILT_IN_SMULLL_OVERFLOW:
9868 case BUILT_IN_UMUL_OVERFLOW:
9869 case BUILT_IN_UMULL_OVERFLOW:
9870 case BUILT_IN_UMULLL_OVERFLOW:
9871 ifn = IFN_MUL_OVERFLOW;
9872 break;
9873 default:
9874 gcc_unreachable ();
9875 }
9876 tree ctype = build_complex_type (type);
9877 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9878 2, arg0, arg1);
9879 tree tgt = save_expr (call);
9880 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9881 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9882 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9883 tree store
9884 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9885 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9886 }
9887
9888 /* Fold a call to built-in function FNDECL with 0 arguments.
9889 This function returns NULL_TREE if no simplification was possible. */
9890
9891 static tree
9892 fold_builtin_0 (location_t loc, tree fndecl)
9893 {
9894 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9895 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9896 switch (fcode)
9897 {
9898 CASE_FLT_FN (BUILT_IN_INF):
9899 case BUILT_IN_INFD32:
9900 case BUILT_IN_INFD64:
9901 case BUILT_IN_INFD128:
9902 return fold_builtin_inf (loc, type, true);
9903
9904 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9905 return fold_builtin_inf (loc, type, false);
9906
9907 case BUILT_IN_CLASSIFY_TYPE:
9908 return fold_builtin_classify_type (NULL_TREE);
9909
9910 default:
9911 break;
9912 }
9913 return NULL_TREE;
9914 }
9915
9916 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9917 This function returns NULL_TREE if no simplification was possible. */
9918
9919 static tree
9920 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9921 {
9922 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9923 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9924 switch (fcode)
9925 {
9926 case BUILT_IN_CONSTANT_P:
9927 {
9928 tree val = fold_builtin_constant_p (arg0);
9929
9930 /* Gimplification will pull the CALL_EXPR for the builtin out of
9931 an if condition. When not optimizing, we'll not CSE it back.
9932 To avoid link error types of regressions, return false now. */
9933 if (!val && !optimize)
9934 val = integer_zero_node;
9935
9936 return val;
9937 }
9938
9939 case BUILT_IN_CLASSIFY_TYPE:
9940 return fold_builtin_classify_type (arg0);
9941
9942 case BUILT_IN_STRLEN:
9943 return fold_builtin_strlen (loc, type, arg0);
9944
9945 CASE_FLT_FN (BUILT_IN_FABS):
9946 case BUILT_IN_FABSD32:
9947 case BUILT_IN_FABSD64:
9948 case BUILT_IN_FABSD128:
9949 return fold_builtin_fabs (loc, arg0, type);
9950
9951 case BUILT_IN_ABS:
9952 case BUILT_IN_LABS:
9953 case BUILT_IN_LLABS:
9954 case BUILT_IN_IMAXABS:
9955 return fold_builtin_abs (loc, arg0, type);
9956
9957 CASE_FLT_FN (BUILT_IN_CONJ):
9958 if (validate_arg (arg0, COMPLEX_TYPE)
9959 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9960 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9961 break;
9962
9963 CASE_FLT_FN (BUILT_IN_CREAL):
9964 if (validate_arg (arg0, COMPLEX_TYPE)
9965 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9966 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9967 break;
9968
9969 CASE_FLT_FN (BUILT_IN_CIMAG):
9970 if (validate_arg (arg0, COMPLEX_TYPE)
9971 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9972 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9973 break;
9974
9975 CASE_FLT_FN (BUILT_IN_CCOS):
9976 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
9977
9978 CASE_FLT_FN (BUILT_IN_CCOSH):
9979 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
9980
9981 CASE_FLT_FN (BUILT_IN_CPROJ):
9982 return fold_builtin_cproj (loc, arg0, type);
9983
9984 CASE_FLT_FN (BUILT_IN_CSIN):
9985 if (validate_arg (arg0, COMPLEX_TYPE)
9986 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9987 return do_mpc_arg1 (arg0, type, mpc_sin);
9988 break;
9989
9990 CASE_FLT_FN (BUILT_IN_CSINH):
9991 if (validate_arg (arg0, COMPLEX_TYPE)
9992 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9993 return do_mpc_arg1 (arg0, type, mpc_sinh);
9994 break;
9995
9996 CASE_FLT_FN (BUILT_IN_CTAN):
9997 if (validate_arg (arg0, COMPLEX_TYPE)
9998 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9999 return do_mpc_arg1 (arg0, type, mpc_tan);
10000 break;
10001
10002 CASE_FLT_FN (BUILT_IN_CTANH):
10003 if (validate_arg (arg0, COMPLEX_TYPE)
10004 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10005 return do_mpc_arg1 (arg0, type, mpc_tanh);
10006 break;
10007
10008 CASE_FLT_FN (BUILT_IN_CLOG):
10009 if (validate_arg (arg0, COMPLEX_TYPE)
10010 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10011 return do_mpc_arg1 (arg0, type, mpc_log);
10012 break;
10013
10014 CASE_FLT_FN (BUILT_IN_CSQRT):
10015 if (validate_arg (arg0, COMPLEX_TYPE)
10016 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10017 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10018 break;
10019
10020 CASE_FLT_FN (BUILT_IN_CASIN):
10021 if (validate_arg (arg0, COMPLEX_TYPE)
10022 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10023 return do_mpc_arg1 (arg0, type, mpc_asin);
10024 break;
10025
10026 CASE_FLT_FN (BUILT_IN_CACOS):
10027 if (validate_arg (arg0, COMPLEX_TYPE)
10028 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10029 return do_mpc_arg1 (arg0, type, mpc_acos);
10030 break;
10031
10032 CASE_FLT_FN (BUILT_IN_CATAN):
10033 if (validate_arg (arg0, COMPLEX_TYPE)
10034 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10035 return do_mpc_arg1 (arg0, type, mpc_atan);
10036 break;
10037
10038 CASE_FLT_FN (BUILT_IN_CASINH):
10039 if (validate_arg (arg0, COMPLEX_TYPE)
10040 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10041 return do_mpc_arg1 (arg0, type, mpc_asinh);
10042 break;
10043
10044 CASE_FLT_FN (BUILT_IN_CACOSH):
10045 if (validate_arg (arg0, COMPLEX_TYPE)
10046 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10047 return do_mpc_arg1 (arg0, type, mpc_acosh);
10048 break;
10049
10050 CASE_FLT_FN (BUILT_IN_CATANH):
10051 if (validate_arg (arg0, COMPLEX_TYPE)
10052 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10053 return do_mpc_arg1 (arg0, type, mpc_atanh);
10054 break;
10055
10056 CASE_FLT_FN (BUILT_IN_CABS):
10057 return fold_builtin_cabs (loc, arg0, type, fndecl);
10058
10059 CASE_FLT_FN (BUILT_IN_CARG):
10060 return fold_builtin_carg (loc, arg0, type);
10061
10062 CASE_FLT_FN (BUILT_IN_SQRT):
10063 return fold_builtin_sqrt (loc, arg0, type);
10064
10065 CASE_FLT_FN (BUILT_IN_CBRT):
10066 return fold_builtin_cbrt (loc, arg0, type);
10067
10068 CASE_FLT_FN (BUILT_IN_ASIN):
10069 if (validate_arg (arg0, REAL_TYPE))
10070 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10071 &dconstm1, &dconst1, true);
10072 break;
10073
10074 CASE_FLT_FN (BUILT_IN_ACOS):
10075 if (validate_arg (arg0, REAL_TYPE))
10076 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10077 &dconstm1, &dconst1, true);
10078 break;
10079
10080 CASE_FLT_FN (BUILT_IN_ATAN):
10081 if (validate_arg (arg0, REAL_TYPE))
10082 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10083 break;
10084
10085 CASE_FLT_FN (BUILT_IN_ASINH):
10086 if (validate_arg (arg0, REAL_TYPE))
10087 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10088 break;
10089
10090 CASE_FLT_FN (BUILT_IN_ACOSH):
10091 if (validate_arg (arg0, REAL_TYPE))
10092 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10093 &dconst1, NULL, true);
10094 break;
10095
10096 CASE_FLT_FN (BUILT_IN_ATANH):
10097 if (validate_arg (arg0, REAL_TYPE))
10098 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10099 &dconstm1, &dconst1, false);
10100 break;
10101
10102 CASE_FLT_FN (BUILT_IN_SIN):
10103 if (validate_arg (arg0, REAL_TYPE))
10104 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10105 break;
10106
10107 CASE_FLT_FN (BUILT_IN_COS):
10108 return fold_builtin_cos (loc, arg0, type, fndecl);
10109
10110 CASE_FLT_FN (BUILT_IN_TAN):
10111 return fold_builtin_tan (arg0, type);
10112
10113 CASE_FLT_FN (BUILT_IN_CEXP):
10114 return fold_builtin_cexp (loc, arg0, type);
10115
10116 CASE_FLT_FN (BUILT_IN_CEXPI):
10117 if (validate_arg (arg0, REAL_TYPE))
10118 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10119 break;
10120
10121 CASE_FLT_FN (BUILT_IN_SINH):
10122 if (validate_arg (arg0, REAL_TYPE))
10123 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10124 break;
10125
10126 CASE_FLT_FN (BUILT_IN_COSH):
10127 return fold_builtin_cosh (loc, arg0, type, fndecl);
10128
10129 CASE_FLT_FN (BUILT_IN_TANH):
10130 if (validate_arg (arg0, REAL_TYPE))
10131 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10132 break;
10133
10134 CASE_FLT_FN (BUILT_IN_ERF):
10135 if (validate_arg (arg0, REAL_TYPE))
10136 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10137 break;
10138
10139 CASE_FLT_FN (BUILT_IN_ERFC):
10140 if (validate_arg (arg0, REAL_TYPE))
10141 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10142 break;
10143
10144 CASE_FLT_FN (BUILT_IN_TGAMMA):
10145 if (validate_arg (arg0, REAL_TYPE))
10146 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10147 break;
10148
10149 CASE_FLT_FN (BUILT_IN_EXP):
10150 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10151
10152 CASE_FLT_FN (BUILT_IN_EXP2):
10153 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10154
10155 CASE_FLT_FN (BUILT_IN_EXP10):
10156 CASE_FLT_FN (BUILT_IN_POW10):
10157 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10158
10159 CASE_FLT_FN (BUILT_IN_EXPM1):
10160 if (validate_arg (arg0, REAL_TYPE))
10161 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10162 break;
10163
10164 CASE_FLT_FN (BUILT_IN_LOG):
10165 if (validate_arg (arg0, REAL_TYPE))
10166 return do_mpfr_arg1 (arg0, type, mpfr_log, &dconst0, NULL, false);
10167 break;
10168
10169 CASE_FLT_FN (BUILT_IN_LOG2):
10170 if (validate_arg (arg0, REAL_TYPE))
10171 return do_mpfr_arg1 (arg0, type, mpfr_log2, &dconst0, NULL, false);
10172 break;
10173
10174 CASE_FLT_FN (BUILT_IN_LOG10):
10175 if (validate_arg (arg0, REAL_TYPE))
10176 return do_mpfr_arg1 (arg0, type, mpfr_log10, &dconst0, NULL, false);
10177 break;
10178
10179 CASE_FLT_FN (BUILT_IN_LOG1P):
10180 if (validate_arg (arg0, REAL_TYPE))
10181 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10182 &dconstm1, NULL, false);
10183 break;
10184
10185 CASE_FLT_FN (BUILT_IN_J0):
10186 if (validate_arg (arg0, REAL_TYPE))
10187 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10188 NULL, NULL, 0);
10189 break;
10190
10191 CASE_FLT_FN (BUILT_IN_J1):
10192 if (validate_arg (arg0, REAL_TYPE))
10193 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10194 NULL, NULL, 0);
10195 break;
10196
10197 CASE_FLT_FN (BUILT_IN_Y0):
10198 if (validate_arg (arg0, REAL_TYPE))
10199 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10200 &dconst0, NULL, false);
10201 break;
10202
10203 CASE_FLT_FN (BUILT_IN_Y1):
10204 if (validate_arg (arg0, REAL_TYPE))
10205 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10206 &dconst0, NULL, false);
10207 break;
10208
10209 CASE_FLT_FN (BUILT_IN_NAN):
10210 case BUILT_IN_NAND32:
10211 case BUILT_IN_NAND64:
10212 case BUILT_IN_NAND128:
10213 return fold_builtin_nan (arg0, type, true);
10214
10215 CASE_FLT_FN (BUILT_IN_NANS):
10216 return fold_builtin_nan (arg0, type, false);
10217
10218 CASE_FLT_FN (BUILT_IN_FLOOR):
10219 return fold_builtin_floor (loc, fndecl, arg0);
10220
10221 CASE_FLT_FN (BUILT_IN_CEIL):
10222 return fold_builtin_ceil (loc, fndecl, arg0);
10223
10224 CASE_FLT_FN (BUILT_IN_TRUNC):
10225 return fold_builtin_trunc (loc, fndecl, arg0);
10226
10227 CASE_FLT_FN (BUILT_IN_ROUND):
10228 return fold_builtin_round (loc, fndecl, arg0);
10229
10230 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10231 CASE_FLT_FN (BUILT_IN_RINT):
10232 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10233
10234 CASE_FLT_FN (BUILT_IN_ICEIL):
10235 CASE_FLT_FN (BUILT_IN_LCEIL):
10236 CASE_FLT_FN (BUILT_IN_LLCEIL):
10237 CASE_FLT_FN (BUILT_IN_LFLOOR):
10238 CASE_FLT_FN (BUILT_IN_IFLOOR):
10239 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10240 CASE_FLT_FN (BUILT_IN_IROUND):
10241 CASE_FLT_FN (BUILT_IN_LROUND):
10242 CASE_FLT_FN (BUILT_IN_LLROUND):
10243 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10244
10245 CASE_FLT_FN (BUILT_IN_IRINT):
10246 CASE_FLT_FN (BUILT_IN_LRINT):
10247 CASE_FLT_FN (BUILT_IN_LLRINT):
10248 return fold_fixed_mathfn (loc, fndecl, arg0);
10249
10250 case BUILT_IN_BSWAP16:
10251 case BUILT_IN_BSWAP32:
10252 case BUILT_IN_BSWAP64:
10253 return fold_builtin_bswap (fndecl, arg0);
10254
10255 CASE_INT_FN (BUILT_IN_FFS):
10256 CASE_INT_FN (BUILT_IN_CLZ):
10257 CASE_INT_FN (BUILT_IN_CTZ):
10258 CASE_INT_FN (BUILT_IN_CLRSB):
10259 CASE_INT_FN (BUILT_IN_POPCOUNT):
10260 CASE_INT_FN (BUILT_IN_PARITY):
10261 return fold_builtin_bitop (fndecl, arg0);
10262
10263 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10264 return fold_builtin_signbit (loc, arg0, type);
10265
10266 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10267 return fold_builtin_significand (loc, arg0, type);
10268
10269 CASE_FLT_FN (BUILT_IN_ILOGB):
10270 CASE_FLT_FN (BUILT_IN_LOGB):
10271 return fold_builtin_logb (loc, arg0, type);
10272
10273 case BUILT_IN_ISASCII:
10274 return fold_builtin_isascii (loc, arg0);
10275
10276 case BUILT_IN_TOASCII:
10277 return fold_builtin_toascii (loc, arg0);
10278
10279 case BUILT_IN_ISDIGIT:
10280 return fold_builtin_isdigit (loc, arg0);
10281
10282 CASE_FLT_FN (BUILT_IN_FINITE):
10283 case BUILT_IN_FINITED32:
10284 case BUILT_IN_FINITED64:
10285 case BUILT_IN_FINITED128:
10286 case BUILT_IN_ISFINITE:
10287 {
10288 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10289 if (ret)
10290 return ret;
10291 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10292 }
10293
10294 CASE_FLT_FN (BUILT_IN_ISINF):
10295 case BUILT_IN_ISINFD32:
10296 case BUILT_IN_ISINFD64:
10297 case BUILT_IN_ISINFD128:
10298 {
10299 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10300 if (ret)
10301 return ret;
10302 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10303 }
10304
10305 case BUILT_IN_ISNORMAL:
10306 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10307
10308 case BUILT_IN_ISINF_SIGN:
10309 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10310
10311 CASE_FLT_FN (BUILT_IN_ISNAN):
10312 case BUILT_IN_ISNAND32:
10313 case BUILT_IN_ISNAND64:
10314 case BUILT_IN_ISNAND128:
10315 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10316
10317 case BUILT_IN_FREE:
10318 if (integer_zerop (arg0))
10319 return build_empty_stmt (loc);
10320 break;
10321
10322 default:
10323 break;
10324 }
10325
10326 return NULL_TREE;
10327
10328 }
10329
10330 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10331 This function returns NULL_TREE if no simplification was possible. */
10332
10333 static tree
10334 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
10335 {
10336 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10337 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10338
10339 switch (fcode)
10340 {
10341 CASE_FLT_FN (BUILT_IN_JN):
10342 if (validate_arg (arg0, INTEGER_TYPE)
10343 && validate_arg (arg1, REAL_TYPE))
10344 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10345 break;
10346
10347 CASE_FLT_FN (BUILT_IN_YN):
10348 if (validate_arg (arg0, INTEGER_TYPE)
10349 && validate_arg (arg1, REAL_TYPE))
10350 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10351 &dconst0, false);
10352 break;
10353
10354 CASE_FLT_FN (BUILT_IN_DREM):
10355 CASE_FLT_FN (BUILT_IN_REMAINDER):
10356 if (validate_arg (arg0, REAL_TYPE)
10357 && validate_arg (arg1, REAL_TYPE))
10358 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10359 break;
10360
10361 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10362 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10363 if (validate_arg (arg0, REAL_TYPE)
10364 && validate_arg (arg1, POINTER_TYPE))
10365 return do_mpfr_lgamma_r (arg0, arg1, type);
10366 break;
10367
10368 CASE_FLT_FN (BUILT_IN_ATAN2):
10369 if (validate_arg (arg0, REAL_TYPE)
10370 && validate_arg (arg1, REAL_TYPE))
10371 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10372 break;
10373
10374 CASE_FLT_FN (BUILT_IN_FDIM):
10375 if (validate_arg (arg0, REAL_TYPE)
10376 && validate_arg (arg1, REAL_TYPE))
10377 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10378 break;
10379
10380 CASE_FLT_FN (BUILT_IN_HYPOT):
10381 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10382
10383 CASE_FLT_FN (BUILT_IN_CPOW):
10384 if (validate_arg (arg0, COMPLEX_TYPE)
10385 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10386 && validate_arg (arg1, COMPLEX_TYPE)
10387 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10388 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10389 break;
10390
10391 CASE_FLT_FN (BUILT_IN_LDEXP):
10392 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10393 CASE_FLT_FN (BUILT_IN_SCALBN):
10394 CASE_FLT_FN (BUILT_IN_SCALBLN):
10395 return fold_builtin_load_exponent (loc, arg0, arg1,
10396 type, /*ldexp=*/false);
10397
10398 CASE_FLT_FN (BUILT_IN_FREXP):
10399 return fold_builtin_frexp (loc, arg0, arg1, type);
10400
10401 CASE_FLT_FN (BUILT_IN_MODF):
10402 return fold_builtin_modf (loc, arg0, arg1, type);
10403
10404 case BUILT_IN_STRSTR:
10405 return fold_builtin_strstr (loc, arg0, arg1, type);
10406
10407 case BUILT_IN_STRSPN:
10408 return fold_builtin_strspn (loc, arg0, arg1);
10409
10410 case BUILT_IN_STRCSPN:
10411 return fold_builtin_strcspn (loc, arg0, arg1);
10412
10413 case BUILT_IN_STRCHR:
10414 case BUILT_IN_INDEX:
10415 return fold_builtin_strchr (loc, arg0, arg1, type);
10416
10417 case BUILT_IN_STRRCHR:
10418 case BUILT_IN_RINDEX:
10419 return fold_builtin_strrchr (loc, arg0, arg1, type);
10420
10421 case BUILT_IN_STRCMP:
10422 return fold_builtin_strcmp (loc, arg0, arg1);
10423
10424 case BUILT_IN_STRPBRK:
10425 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10426
10427 case BUILT_IN_EXPECT:
10428 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10429
10430 CASE_FLT_FN (BUILT_IN_POW):
10431 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10432
10433 CASE_FLT_FN (BUILT_IN_POWI):
10434 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10435
10436 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10437 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10438
10439 CASE_FLT_FN (BUILT_IN_FMIN):
10440 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10441
10442 CASE_FLT_FN (BUILT_IN_FMAX):
10443 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10444
10445 case BUILT_IN_ISGREATER:
10446 return fold_builtin_unordered_cmp (loc, fndecl,
10447 arg0, arg1, UNLE_EXPR, LE_EXPR);
10448 case BUILT_IN_ISGREATEREQUAL:
10449 return fold_builtin_unordered_cmp (loc, fndecl,
10450 arg0, arg1, UNLT_EXPR, LT_EXPR);
10451 case BUILT_IN_ISLESS:
10452 return fold_builtin_unordered_cmp (loc, fndecl,
10453 arg0, arg1, UNGE_EXPR, GE_EXPR);
10454 case BUILT_IN_ISLESSEQUAL:
10455 return fold_builtin_unordered_cmp (loc, fndecl,
10456 arg0, arg1, UNGT_EXPR, GT_EXPR);
10457 case BUILT_IN_ISLESSGREATER:
10458 return fold_builtin_unordered_cmp (loc, fndecl,
10459 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10460 case BUILT_IN_ISUNORDERED:
10461 return fold_builtin_unordered_cmp (loc, fndecl,
10462 arg0, arg1, UNORDERED_EXPR,
10463 NOP_EXPR);
10464
10465 /* We do the folding for va_start in the expander. */
10466 case BUILT_IN_VA_START:
10467 break;
10468
10469 case BUILT_IN_OBJECT_SIZE:
10470 return fold_builtin_object_size (arg0, arg1);
10471
10472 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10473 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10474
10475 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10476 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10477
10478 default:
10479 break;
10480 }
10481 return NULL_TREE;
10482 }
10483
10484 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10485 and ARG2.
10486 This function returns NULL_TREE if no simplification was possible. */
10487
10488 static tree
10489 fold_builtin_3 (location_t loc, tree fndecl,
10490 tree arg0, tree arg1, tree arg2)
10491 {
10492 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10493 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10494 switch (fcode)
10495 {
10496
10497 CASE_FLT_FN (BUILT_IN_SINCOS):
10498 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10499
10500 CASE_FLT_FN (BUILT_IN_FMA):
10501 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10502 break;
10503
10504 CASE_FLT_FN (BUILT_IN_REMQUO):
10505 if (validate_arg (arg0, REAL_TYPE)
10506 && validate_arg (arg1, REAL_TYPE)
10507 && validate_arg (arg2, POINTER_TYPE))
10508 return do_mpfr_remquo (arg0, arg1, arg2);
10509 break;
10510
10511 case BUILT_IN_STRNCMP:
10512 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10513
10514 case BUILT_IN_MEMCHR:
10515 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10516
10517 case BUILT_IN_BCMP:
10518 case BUILT_IN_MEMCMP:
10519 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10520
10521 case BUILT_IN_EXPECT:
10522 return fold_builtin_expect (loc, arg0, arg1, arg2);
10523
10524 case BUILT_IN_ADD_OVERFLOW:
10525 case BUILT_IN_SUB_OVERFLOW:
10526 case BUILT_IN_MUL_OVERFLOW:
10527 case BUILT_IN_SADD_OVERFLOW:
10528 case BUILT_IN_SADDL_OVERFLOW:
10529 case BUILT_IN_SADDLL_OVERFLOW:
10530 case BUILT_IN_SSUB_OVERFLOW:
10531 case BUILT_IN_SSUBL_OVERFLOW:
10532 case BUILT_IN_SSUBLL_OVERFLOW:
10533 case BUILT_IN_SMUL_OVERFLOW:
10534 case BUILT_IN_SMULL_OVERFLOW:
10535 case BUILT_IN_SMULLL_OVERFLOW:
10536 case BUILT_IN_UADD_OVERFLOW:
10537 case BUILT_IN_UADDL_OVERFLOW:
10538 case BUILT_IN_UADDLL_OVERFLOW:
10539 case BUILT_IN_USUB_OVERFLOW:
10540 case BUILT_IN_USUBL_OVERFLOW:
10541 case BUILT_IN_USUBLL_OVERFLOW:
10542 case BUILT_IN_UMUL_OVERFLOW:
10543 case BUILT_IN_UMULL_OVERFLOW:
10544 case BUILT_IN_UMULLL_OVERFLOW:
10545 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10546
10547 default:
10548 break;
10549 }
10550 return NULL_TREE;
10551 }
10552
10553 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10554 arguments. IGNORE is true if the result of the
10555 function call is ignored. This function returns NULL_TREE if no
10556 simplification was possible. */
10557
10558 tree
10559 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
10560 {
10561 tree ret = NULL_TREE;
10562
10563 switch (nargs)
10564 {
10565 case 0:
10566 ret = fold_builtin_0 (loc, fndecl);
10567 break;
10568 case 1:
10569 ret = fold_builtin_1 (loc, fndecl, args[0]);
10570 break;
10571 case 2:
10572 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
10573 break;
10574 case 3:
10575 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10576 break;
10577 default:
10578 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10579 break;
10580 }
10581 if (ret)
10582 {
10583 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10584 SET_EXPR_LOCATION (ret, loc);
10585 TREE_NO_WARNING (ret) = 1;
10586 return ret;
10587 }
10588 return NULL_TREE;
10589 }
10590
10591 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10592 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10593 of arguments in ARGS to be omitted. OLDNARGS is the number of
10594 elements in ARGS. */
10595
10596 static tree
10597 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10598 int skip, tree fndecl, int n, va_list newargs)
10599 {
10600 int nargs = oldnargs - skip + n;
10601 tree *buffer;
10602
10603 if (n > 0)
10604 {
10605 int i, j;
10606
10607 buffer = XALLOCAVEC (tree, nargs);
10608 for (i = 0; i < n; i++)
10609 buffer[i] = va_arg (newargs, tree);
10610 for (j = skip; j < oldnargs; j++, i++)
10611 buffer[i] = args[j];
10612 }
10613 else
10614 buffer = args + skip;
10615
10616 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10617 }
10618
10619 /* Return true if FNDECL shouldn't be folded right now.
10620 If a built-in function has an inline attribute always_inline
10621 wrapper, defer folding it after always_inline functions have
10622 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10623 might not be performed. */
10624
10625 bool
10626 avoid_folding_inline_builtin (tree fndecl)
10627 {
10628 return (DECL_DECLARED_INLINE_P (fndecl)
10629 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10630 && cfun
10631 && !cfun->always_inline_functions_inlined
10632 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10633 }
10634
10635 /* A wrapper function for builtin folding that prevents warnings for
10636 "statement without effect" and the like, caused by removing the
10637 call node earlier than the warning is generated. */
10638
10639 tree
10640 fold_call_expr (location_t loc, tree exp, bool ignore)
10641 {
10642 tree ret = NULL_TREE;
10643 tree fndecl = get_callee_fndecl (exp);
10644 if (fndecl
10645 && TREE_CODE (fndecl) == FUNCTION_DECL
10646 && DECL_BUILT_IN (fndecl)
10647 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10648 yet. Defer folding until we see all the arguments
10649 (after inlining). */
10650 && !CALL_EXPR_VA_ARG_PACK (exp))
10651 {
10652 int nargs = call_expr_nargs (exp);
10653
10654 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10655 instead last argument is __builtin_va_arg_pack (). Defer folding
10656 even in that case, until arguments are finalized. */
10657 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10658 {
10659 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10660 if (fndecl2
10661 && TREE_CODE (fndecl2) == FUNCTION_DECL
10662 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10663 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10664 return NULL_TREE;
10665 }
10666
10667 if (avoid_folding_inline_builtin (fndecl))
10668 return NULL_TREE;
10669
10670 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10671 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10672 CALL_EXPR_ARGP (exp), ignore);
10673 else
10674 {
10675 tree *args = CALL_EXPR_ARGP (exp);
10676 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10677 if (ret)
10678 return ret;
10679 }
10680 }
10681 return NULL_TREE;
10682 }
10683
10684 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10685 N arguments are passed in the array ARGARRAY. Return a folded
10686 expression or NULL_TREE if no simplification was possible. */
10687
10688 tree
10689 fold_builtin_call_array (location_t loc, tree,
10690 tree fn,
10691 int n,
10692 tree *argarray)
10693 {
10694 if (TREE_CODE (fn) != ADDR_EXPR)
10695 return NULL_TREE;
10696
10697 tree fndecl = TREE_OPERAND (fn, 0);
10698 if (TREE_CODE (fndecl) == FUNCTION_DECL
10699 && DECL_BUILT_IN (fndecl))
10700 {
10701 /* If last argument is __builtin_va_arg_pack (), arguments to this
10702 function are not finalized yet. Defer folding until they are. */
10703 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10704 {
10705 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10706 if (fndecl2
10707 && TREE_CODE (fndecl2) == FUNCTION_DECL
10708 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10709 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10710 return NULL_TREE;
10711 }
10712 if (avoid_folding_inline_builtin (fndecl))
10713 return NULL_TREE;
10714 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10715 return targetm.fold_builtin (fndecl, n, argarray, false);
10716 else
10717 return fold_builtin_n (loc, fndecl, argarray, n, false);
10718 }
10719
10720 return NULL_TREE;
10721 }
10722
10723 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10724 along with N new arguments specified as the "..." parameters. SKIP
10725 is the number of arguments in EXP to be omitted. This function is used
10726 to do varargs-to-varargs transformations. */
10727
10728 static tree
10729 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10730 {
10731 va_list ap;
10732 tree t;
10733
10734 va_start (ap, n);
10735 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10736 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10737 va_end (ap);
10738
10739 return t;
10740 }
10741
10742 /* Validate a single argument ARG against a tree code CODE representing
10743 a type. */
10744
10745 static bool
10746 validate_arg (const_tree arg, enum tree_code code)
10747 {
10748 if (!arg)
10749 return false;
10750 else if (code == POINTER_TYPE)
10751 return POINTER_TYPE_P (TREE_TYPE (arg));
10752 else if (code == INTEGER_TYPE)
10753 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10754 return code == TREE_CODE (TREE_TYPE (arg));
10755 }
10756
10757 /* This function validates the types of a function call argument list
10758 against a specified list of tree_codes. If the last specifier is a 0,
10759 that represents an ellipses, otherwise the last specifier must be a
10760 VOID_TYPE.
10761
10762 This is the GIMPLE version of validate_arglist. Eventually we want to
10763 completely convert builtins.c to work from GIMPLEs and the tree based
10764 validate_arglist will then be removed. */
10765
10766 bool
10767 validate_gimple_arglist (const gcall *call, ...)
10768 {
10769 enum tree_code code;
10770 bool res = 0;
10771 va_list ap;
10772 const_tree arg;
10773 size_t i;
10774
10775 va_start (ap, call);
10776 i = 0;
10777
10778 do
10779 {
10780 code = (enum tree_code) va_arg (ap, int);
10781 switch (code)
10782 {
10783 case 0:
10784 /* This signifies an ellipses, any further arguments are all ok. */
10785 res = true;
10786 goto end;
10787 case VOID_TYPE:
10788 /* This signifies an endlink, if no arguments remain, return
10789 true, otherwise return false. */
10790 res = (i == gimple_call_num_args (call));
10791 goto end;
10792 default:
10793 /* If no parameters remain or the parameter's code does not
10794 match the specified code, return false. Otherwise continue
10795 checking any remaining arguments. */
10796 arg = gimple_call_arg (call, i++);
10797 if (!validate_arg (arg, code))
10798 goto end;
10799 break;
10800 }
10801 }
10802 while (1);
10803
10804 /* We need gotos here since we can only have one VA_CLOSE in a
10805 function. */
10806 end: ;
10807 va_end (ap);
10808
10809 return res;
10810 }
10811
10812 /* Default target-specific builtin expander that does nothing. */
10813
10814 rtx
10815 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10816 rtx target ATTRIBUTE_UNUSED,
10817 rtx subtarget ATTRIBUTE_UNUSED,
10818 machine_mode mode ATTRIBUTE_UNUSED,
10819 int ignore ATTRIBUTE_UNUSED)
10820 {
10821 return NULL_RTX;
10822 }
10823
10824 /* Returns true is EXP represents data that would potentially reside
10825 in a readonly section. */
10826
10827 bool
10828 readonly_data_expr (tree exp)
10829 {
10830 STRIP_NOPS (exp);
10831
10832 if (TREE_CODE (exp) != ADDR_EXPR)
10833 return false;
10834
10835 exp = get_base_address (TREE_OPERAND (exp, 0));
10836 if (!exp)
10837 return false;
10838
10839 /* Make sure we call decl_readonly_section only for trees it
10840 can handle (since it returns true for everything it doesn't
10841 understand). */
10842 if (TREE_CODE (exp) == STRING_CST
10843 || TREE_CODE (exp) == CONSTRUCTOR
10844 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10845 return decl_readonly_section (exp, 0);
10846 else
10847 return false;
10848 }
10849
10850 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10851 to the call, and TYPE is its return type.
10852
10853 Return NULL_TREE if no simplification was possible, otherwise return the
10854 simplified form of the call as a tree.
10855
10856 The simplified form may be a constant or other expression which
10857 computes the same value, but in a more efficient manner (including
10858 calls to other builtin functions).
10859
10860 The call may contain arguments which need to be evaluated, but
10861 which are not useful to determine the result of the call. In
10862 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10863 COMPOUND_EXPR will be an argument which must be evaluated.
10864 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10865 COMPOUND_EXPR in the chain will contain the tree for the simplified
10866 form of the builtin function call. */
10867
10868 static tree
10869 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10870 {
10871 if (!validate_arg (s1, POINTER_TYPE)
10872 || !validate_arg (s2, POINTER_TYPE))
10873 return NULL_TREE;
10874 else
10875 {
10876 tree fn;
10877 const char *p1, *p2;
10878
10879 p2 = c_getstr (s2);
10880 if (p2 == NULL)
10881 return NULL_TREE;
10882
10883 p1 = c_getstr (s1);
10884 if (p1 != NULL)
10885 {
10886 const char *r = strstr (p1, p2);
10887 tree tem;
10888
10889 if (r == NULL)
10890 return build_int_cst (TREE_TYPE (s1), 0);
10891
10892 /* Return an offset into the constant string argument. */
10893 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10894 return fold_convert_loc (loc, type, tem);
10895 }
10896
10897 /* The argument is const char *, and the result is char *, so we need
10898 a type conversion here to avoid a warning. */
10899 if (p2[0] == '\0')
10900 return fold_convert_loc (loc, type, s1);
10901
10902 if (p2[1] != '\0')
10903 return NULL_TREE;
10904
10905 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10906 if (!fn)
10907 return NULL_TREE;
10908
10909 /* New argument list transforming strstr(s1, s2) to
10910 strchr(s1, s2[0]). */
10911 return build_call_expr_loc (loc, fn, 2, s1,
10912 build_int_cst (integer_type_node, p2[0]));
10913 }
10914 }
10915
10916 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10917 the call, and TYPE is its return type.
10918
10919 Return NULL_TREE if no simplification was possible, otherwise return the
10920 simplified form of the call as a tree.
10921
10922 The simplified form may be a constant or other expression which
10923 computes the same value, but in a more efficient manner (including
10924 calls to other builtin functions).
10925
10926 The call may contain arguments which need to be evaluated, but
10927 which are not useful to determine the result of the call. In
10928 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10929 COMPOUND_EXPR will be an argument which must be evaluated.
10930 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10931 COMPOUND_EXPR in the chain will contain the tree for the simplified
10932 form of the builtin function call. */
10933
10934 static tree
10935 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10936 {
10937 if (!validate_arg (s1, POINTER_TYPE)
10938 || !validate_arg (s2, INTEGER_TYPE))
10939 return NULL_TREE;
10940 else
10941 {
10942 const char *p1;
10943
10944 if (TREE_CODE (s2) != INTEGER_CST)
10945 return NULL_TREE;
10946
10947 p1 = c_getstr (s1);
10948 if (p1 != NULL)
10949 {
10950 char c;
10951 const char *r;
10952 tree tem;
10953
10954 if (target_char_cast (s2, &c))
10955 return NULL_TREE;
10956
10957 r = strchr (p1, c);
10958
10959 if (r == NULL)
10960 return build_int_cst (TREE_TYPE (s1), 0);
10961
10962 /* Return an offset into the constant string argument. */
10963 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10964 return fold_convert_loc (loc, type, tem);
10965 }
10966 return NULL_TREE;
10967 }
10968 }
10969
10970 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10971 the call, and TYPE is its return type.
10972
10973 Return NULL_TREE if no simplification was possible, otherwise return the
10974 simplified form of the call as a tree.
10975
10976 The simplified form may be a constant or other expression which
10977 computes the same value, but in a more efficient manner (including
10978 calls to other builtin functions).
10979
10980 The call may contain arguments which need to be evaluated, but
10981 which are not useful to determine the result of the call. In
10982 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10983 COMPOUND_EXPR will be an argument which must be evaluated.
10984 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10985 COMPOUND_EXPR in the chain will contain the tree for the simplified
10986 form of the builtin function call. */
10987
10988 static tree
10989 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10990 {
10991 if (!validate_arg (s1, POINTER_TYPE)
10992 || !validate_arg (s2, INTEGER_TYPE))
10993 return NULL_TREE;
10994 else
10995 {
10996 tree fn;
10997 const char *p1;
10998
10999 if (TREE_CODE (s2) != INTEGER_CST)
11000 return NULL_TREE;
11001
11002 p1 = c_getstr (s1);
11003 if (p1 != NULL)
11004 {
11005 char c;
11006 const char *r;
11007 tree tem;
11008
11009 if (target_char_cast (s2, &c))
11010 return NULL_TREE;
11011
11012 r = strrchr (p1, c);
11013
11014 if (r == NULL)
11015 return build_int_cst (TREE_TYPE (s1), 0);
11016
11017 /* Return an offset into the constant string argument. */
11018 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11019 return fold_convert_loc (loc, type, tem);
11020 }
11021
11022 if (! integer_zerop (s2))
11023 return NULL_TREE;
11024
11025 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11026 if (!fn)
11027 return NULL_TREE;
11028
11029 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11030 return build_call_expr_loc (loc, fn, 2, s1, s2);
11031 }
11032 }
11033
11034 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11035 to the call, and TYPE is its return type.
11036
11037 Return NULL_TREE if no simplification was possible, otherwise return the
11038 simplified form of the call as a tree.
11039
11040 The simplified form may be a constant or other expression which
11041 computes the same value, but in a more efficient manner (including
11042 calls to other builtin functions).
11043
11044 The call may contain arguments which need to be evaluated, but
11045 which are not useful to determine the result of the call. In
11046 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11047 COMPOUND_EXPR will be an argument which must be evaluated.
11048 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11049 COMPOUND_EXPR in the chain will contain the tree for the simplified
11050 form of the builtin function call. */
11051
11052 static tree
11053 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11054 {
11055 if (!validate_arg (s1, POINTER_TYPE)
11056 || !validate_arg (s2, POINTER_TYPE))
11057 return NULL_TREE;
11058 else
11059 {
11060 tree fn;
11061 const char *p1, *p2;
11062
11063 p2 = c_getstr (s2);
11064 if (p2 == NULL)
11065 return NULL_TREE;
11066
11067 p1 = c_getstr (s1);
11068 if (p1 != NULL)
11069 {
11070 const char *r = strpbrk (p1, p2);
11071 tree tem;
11072
11073 if (r == NULL)
11074 return build_int_cst (TREE_TYPE (s1), 0);
11075
11076 /* Return an offset into the constant string argument. */
11077 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11078 return fold_convert_loc (loc, type, tem);
11079 }
11080
11081 if (p2[0] == '\0')
11082 /* strpbrk(x, "") == NULL.
11083 Evaluate and ignore s1 in case it had side-effects. */
11084 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11085
11086 if (p2[1] != '\0')
11087 return NULL_TREE; /* Really call strpbrk. */
11088
11089 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11090 if (!fn)
11091 return NULL_TREE;
11092
11093 /* New argument list transforming strpbrk(s1, s2) to
11094 strchr(s1, s2[0]). */
11095 return build_call_expr_loc (loc, fn, 2, s1,
11096 build_int_cst (integer_type_node, p2[0]));
11097 }
11098 }
11099
11100 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11101 to the call.
11102
11103 Return NULL_TREE if no simplification was possible, otherwise return the
11104 simplified form of the call as a tree.
11105
11106 The simplified form may be a constant or other expression which
11107 computes the same value, but in a more efficient manner (including
11108 calls to other builtin functions).
11109
11110 The call may contain arguments which need to be evaluated, but
11111 which are not useful to determine the result of the call. In
11112 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11113 COMPOUND_EXPR will be an argument which must be evaluated.
11114 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11115 COMPOUND_EXPR in the chain will contain the tree for the simplified
11116 form of the builtin function call. */
11117
11118 static tree
11119 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11120 {
11121 if (!validate_arg (s1, POINTER_TYPE)
11122 || !validate_arg (s2, POINTER_TYPE))
11123 return NULL_TREE;
11124 else
11125 {
11126 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11127
11128 /* If both arguments are constants, evaluate at compile-time. */
11129 if (p1 && p2)
11130 {
11131 const size_t r = strspn (p1, p2);
11132 return build_int_cst (size_type_node, r);
11133 }
11134
11135 /* If either argument is "", return NULL_TREE. */
11136 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11137 /* Evaluate and ignore both arguments in case either one has
11138 side-effects. */
11139 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11140 s1, s2);
11141 return NULL_TREE;
11142 }
11143 }
11144
11145 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11146 to the call.
11147
11148 Return NULL_TREE if no simplification was possible, otherwise return the
11149 simplified form of the call as a tree.
11150
11151 The simplified form may be a constant or other expression which
11152 computes the same value, but in a more efficient manner (including
11153 calls to other builtin functions).
11154
11155 The call may contain arguments which need to be evaluated, but
11156 which are not useful to determine the result of the call. In
11157 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11158 COMPOUND_EXPR will be an argument which must be evaluated.
11159 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11160 COMPOUND_EXPR in the chain will contain the tree for the simplified
11161 form of the builtin function call. */
11162
11163 static tree
11164 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11165 {
11166 if (!validate_arg (s1, POINTER_TYPE)
11167 || !validate_arg (s2, POINTER_TYPE))
11168 return NULL_TREE;
11169 else
11170 {
11171 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11172
11173 /* If both arguments are constants, evaluate at compile-time. */
11174 if (p1 && p2)
11175 {
11176 const size_t r = strcspn (p1, p2);
11177 return build_int_cst (size_type_node, r);
11178 }
11179
11180 /* If the first argument is "", return NULL_TREE. */
11181 if (p1 && *p1 == '\0')
11182 {
11183 /* Evaluate and ignore argument s2 in case it has
11184 side-effects. */
11185 return omit_one_operand_loc (loc, size_type_node,
11186 size_zero_node, s2);
11187 }
11188
11189 /* If the second argument is "", return __builtin_strlen(s1). */
11190 if (p2 && *p2 == '\0')
11191 {
11192 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11193
11194 /* If the replacement _DECL isn't initialized, don't do the
11195 transformation. */
11196 if (!fn)
11197 return NULL_TREE;
11198
11199 return build_call_expr_loc (loc, fn, 1, s1);
11200 }
11201 return NULL_TREE;
11202 }
11203 }
11204
11205 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11206 produced. False otherwise. This is done so that we don't output the error
11207 or warning twice or three times. */
11208
11209 bool
11210 fold_builtin_next_arg (tree exp, bool va_start_p)
11211 {
11212 tree fntype = TREE_TYPE (current_function_decl);
11213 int nargs = call_expr_nargs (exp);
11214 tree arg;
11215 /* There is good chance the current input_location points inside the
11216 definition of the va_start macro (perhaps on the token for
11217 builtin) in a system header, so warnings will not be emitted.
11218 Use the location in real source code. */
11219 source_location current_location =
11220 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11221 NULL);
11222
11223 if (!stdarg_p (fntype))
11224 {
11225 error ("%<va_start%> used in function with fixed args");
11226 return true;
11227 }
11228
11229 if (va_start_p)
11230 {
11231 if (va_start_p && (nargs != 2))
11232 {
11233 error ("wrong number of arguments to function %<va_start%>");
11234 return true;
11235 }
11236 arg = CALL_EXPR_ARG (exp, 1);
11237 }
11238 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11239 when we checked the arguments and if needed issued a warning. */
11240 else
11241 {
11242 if (nargs == 0)
11243 {
11244 /* Evidently an out of date version of <stdarg.h>; can't validate
11245 va_start's second argument, but can still work as intended. */
11246 warning_at (current_location,
11247 OPT_Wvarargs,
11248 "%<__builtin_next_arg%> called without an argument");
11249 return true;
11250 }
11251 else if (nargs > 1)
11252 {
11253 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11254 return true;
11255 }
11256 arg = CALL_EXPR_ARG (exp, 0);
11257 }
11258
11259 if (TREE_CODE (arg) == SSA_NAME)
11260 arg = SSA_NAME_VAR (arg);
11261
11262 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11263 or __builtin_next_arg (0) the first time we see it, after checking
11264 the arguments and if needed issuing a warning. */
11265 if (!integer_zerop (arg))
11266 {
11267 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11268
11269 /* Strip off all nops for the sake of the comparison. This
11270 is not quite the same as STRIP_NOPS. It does more.
11271 We must also strip off INDIRECT_EXPR for C++ reference
11272 parameters. */
11273 while (CONVERT_EXPR_P (arg)
11274 || TREE_CODE (arg) == INDIRECT_REF)
11275 arg = TREE_OPERAND (arg, 0);
11276 if (arg != last_parm)
11277 {
11278 /* FIXME: Sometimes with the tree optimizers we can get the
11279 not the last argument even though the user used the last
11280 argument. We just warn and set the arg to be the last
11281 argument so that we will get wrong-code because of
11282 it. */
11283 warning_at (current_location,
11284 OPT_Wvarargs,
11285 "second parameter of %<va_start%> not last named argument");
11286 }
11287
11288 /* Undefined by C99 7.15.1.4p4 (va_start):
11289 "If the parameter parmN is declared with the register storage
11290 class, with a function or array type, or with a type that is
11291 not compatible with the type that results after application of
11292 the default argument promotions, the behavior is undefined."
11293 */
11294 else if (DECL_REGISTER (arg))
11295 {
11296 warning_at (current_location,
11297 OPT_Wvarargs,
11298 "undefined behaviour when second parameter of "
11299 "%<va_start%> is declared with %<register%> storage");
11300 }
11301
11302 /* We want to verify the second parameter just once before the tree
11303 optimizers are run and then avoid keeping it in the tree,
11304 as otherwise we could warn even for correct code like:
11305 void foo (int i, ...)
11306 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11307 if (va_start_p)
11308 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11309 else
11310 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11311 }
11312 return false;
11313 }
11314
11315
11316 /* Expand a call EXP to __builtin_object_size. */
11317
11318 static rtx
11319 expand_builtin_object_size (tree exp)
11320 {
11321 tree ost;
11322 int object_size_type;
11323 tree fndecl = get_callee_fndecl (exp);
11324
11325 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11326 {
11327 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11328 exp, fndecl);
11329 expand_builtin_trap ();
11330 return const0_rtx;
11331 }
11332
11333 ost = CALL_EXPR_ARG (exp, 1);
11334 STRIP_NOPS (ost);
11335
11336 if (TREE_CODE (ost) != INTEGER_CST
11337 || tree_int_cst_sgn (ost) < 0
11338 || compare_tree_int (ost, 3) > 0)
11339 {
11340 error ("%Klast argument of %D is not integer constant between 0 and 3",
11341 exp, fndecl);
11342 expand_builtin_trap ();
11343 return const0_rtx;
11344 }
11345
11346 object_size_type = tree_to_shwi (ost);
11347
11348 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11349 }
11350
11351 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11352 FCODE is the BUILT_IN_* to use.
11353 Return NULL_RTX if we failed; the caller should emit a normal call,
11354 otherwise try to get the result in TARGET, if convenient (and in
11355 mode MODE if that's convenient). */
11356
11357 static rtx
11358 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11359 enum built_in_function fcode)
11360 {
11361 tree dest, src, len, size;
11362
11363 if (!validate_arglist (exp,
11364 POINTER_TYPE,
11365 fcode == BUILT_IN_MEMSET_CHK
11366 ? INTEGER_TYPE : POINTER_TYPE,
11367 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11368 return NULL_RTX;
11369
11370 dest = CALL_EXPR_ARG (exp, 0);
11371 src = CALL_EXPR_ARG (exp, 1);
11372 len = CALL_EXPR_ARG (exp, 2);
11373 size = CALL_EXPR_ARG (exp, 3);
11374
11375 if (! tree_fits_uhwi_p (size))
11376 return NULL_RTX;
11377
11378 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11379 {
11380 tree fn;
11381
11382 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11383 {
11384 warning_at (tree_nonartificial_location (exp),
11385 0, "%Kcall to %D will always overflow destination buffer",
11386 exp, get_callee_fndecl (exp));
11387 return NULL_RTX;
11388 }
11389
11390 fn = NULL_TREE;
11391 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11392 mem{cpy,pcpy,move,set} is available. */
11393 switch (fcode)
11394 {
11395 case BUILT_IN_MEMCPY_CHK:
11396 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11397 break;
11398 case BUILT_IN_MEMPCPY_CHK:
11399 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11400 break;
11401 case BUILT_IN_MEMMOVE_CHK:
11402 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11403 break;
11404 case BUILT_IN_MEMSET_CHK:
11405 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11406 break;
11407 default:
11408 break;
11409 }
11410
11411 if (! fn)
11412 return NULL_RTX;
11413
11414 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11415 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11416 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11417 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11418 }
11419 else if (fcode == BUILT_IN_MEMSET_CHK)
11420 return NULL_RTX;
11421 else
11422 {
11423 unsigned int dest_align = get_pointer_alignment (dest);
11424
11425 /* If DEST is not a pointer type, call the normal function. */
11426 if (dest_align == 0)
11427 return NULL_RTX;
11428
11429 /* If SRC and DEST are the same (and not volatile), do nothing. */
11430 if (operand_equal_p (src, dest, 0))
11431 {
11432 tree expr;
11433
11434 if (fcode != BUILT_IN_MEMPCPY_CHK)
11435 {
11436 /* Evaluate and ignore LEN in case it has side-effects. */
11437 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11438 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11439 }
11440
11441 expr = fold_build_pointer_plus (dest, len);
11442 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11443 }
11444
11445 /* __memmove_chk special case. */
11446 if (fcode == BUILT_IN_MEMMOVE_CHK)
11447 {
11448 unsigned int src_align = get_pointer_alignment (src);
11449
11450 if (src_align == 0)
11451 return NULL_RTX;
11452
11453 /* If src is categorized for a readonly section we can use
11454 normal __memcpy_chk. */
11455 if (readonly_data_expr (src))
11456 {
11457 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11458 if (!fn)
11459 return NULL_RTX;
11460 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11461 dest, src, len, size);
11462 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11463 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11464 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11465 }
11466 }
11467 return NULL_RTX;
11468 }
11469 }
11470
11471 /* Emit warning if a buffer overflow is detected at compile time. */
11472
11473 static void
11474 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11475 {
11476 int is_strlen = 0;
11477 tree len, size;
11478 location_t loc = tree_nonartificial_location (exp);
11479
11480 switch (fcode)
11481 {
11482 case BUILT_IN_STRCPY_CHK:
11483 case BUILT_IN_STPCPY_CHK:
11484 /* For __strcat_chk the warning will be emitted only if overflowing
11485 by at least strlen (dest) + 1 bytes. */
11486 case BUILT_IN_STRCAT_CHK:
11487 len = CALL_EXPR_ARG (exp, 1);
11488 size = CALL_EXPR_ARG (exp, 2);
11489 is_strlen = 1;
11490 break;
11491 case BUILT_IN_STRNCAT_CHK:
11492 case BUILT_IN_STRNCPY_CHK:
11493 case BUILT_IN_STPNCPY_CHK:
11494 len = CALL_EXPR_ARG (exp, 2);
11495 size = CALL_EXPR_ARG (exp, 3);
11496 break;
11497 case BUILT_IN_SNPRINTF_CHK:
11498 case BUILT_IN_VSNPRINTF_CHK:
11499 len = CALL_EXPR_ARG (exp, 1);
11500 size = CALL_EXPR_ARG (exp, 3);
11501 break;
11502 default:
11503 gcc_unreachable ();
11504 }
11505
11506 if (!len || !size)
11507 return;
11508
11509 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11510 return;
11511
11512 if (is_strlen)
11513 {
11514 len = c_strlen (len, 1);
11515 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11516 return;
11517 }
11518 else if (fcode == BUILT_IN_STRNCAT_CHK)
11519 {
11520 tree src = CALL_EXPR_ARG (exp, 1);
11521 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11522 return;
11523 src = c_strlen (src, 1);
11524 if (! src || ! tree_fits_uhwi_p (src))
11525 {
11526 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11527 exp, get_callee_fndecl (exp));
11528 return;
11529 }
11530 else if (tree_int_cst_lt (src, size))
11531 return;
11532 }
11533 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
11534 return;
11535
11536 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11537 exp, get_callee_fndecl (exp));
11538 }
11539
11540 /* Emit warning if a buffer overflow is detected at compile time
11541 in __sprintf_chk/__vsprintf_chk calls. */
11542
11543 static void
11544 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11545 {
11546 tree size, len, fmt;
11547 const char *fmt_str;
11548 int nargs = call_expr_nargs (exp);
11549
11550 /* Verify the required arguments in the original call. */
11551
11552 if (nargs < 4)
11553 return;
11554 size = CALL_EXPR_ARG (exp, 2);
11555 fmt = CALL_EXPR_ARG (exp, 3);
11556
11557 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11558 return;
11559
11560 /* Check whether the format is a literal string constant. */
11561 fmt_str = c_getstr (fmt);
11562 if (fmt_str == NULL)
11563 return;
11564
11565 if (!init_target_chars ())
11566 return;
11567
11568 /* If the format doesn't contain % args or %%, we know its size. */
11569 if (strchr (fmt_str, target_percent) == 0)
11570 len = build_int_cstu (size_type_node, strlen (fmt_str));
11571 /* If the format is "%s" and first ... argument is a string literal,
11572 we know it too. */
11573 else if (fcode == BUILT_IN_SPRINTF_CHK
11574 && strcmp (fmt_str, target_percent_s) == 0)
11575 {
11576 tree arg;
11577
11578 if (nargs < 5)
11579 return;
11580 arg = CALL_EXPR_ARG (exp, 4);
11581 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11582 return;
11583
11584 len = c_strlen (arg, 1);
11585 if (!len || ! tree_fits_uhwi_p (len))
11586 return;
11587 }
11588 else
11589 return;
11590
11591 if (! tree_int_cst_lt (len, size))
11592 warning_at (tree_nonartificial_location (exp),
11593 0, "%Kcall to %D will always overflow destination buffer",
11594 exp, get_callee_fndecl (exp));
11595 }
11596
11597 /* Emit warning if a free is called with address of a variable. */
11598
11599 static void
11600 maybe_emit_free_warning (tree exp)
11601 {
11602 tree arg = CALL_EXPR_ARG (exp, 0);
11603
11604 STRIP_NOPS (arg);
11605 if (TREE_CODE (arg) != ADDR_EXPR)
11606 return;
11607
11608 arg = get_base_address (TREE_OPERAND (arg, 0));
11609 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11610 return;
11611
11612 if (SSA_VAR_P (arg))
11613 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11614 "%Kattempt to free a non-heap object %qD", exp, arg);
11615 else
11616 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11617 "%Kattempt to free a non-heap object", exp);
11618 }
11619
11620 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11621 if possible. */
11622
11623 static tree
11624 fold_builtin_object_size (tree ptr, tree ost)
11625 {
11626 unsigned HOST_WIDE_INT bytes;
11627 int object_size_type;
11628
11629 if (!validate_arg (ptr, POINTER_TYPE)
11630 || !validate_arg (ost, INTEGER_TYPE))
11631 return NULL_TREE;
11632
11633 STRIP_NOPS (ost);
11634
11635 if (TREE_CODE (ost) != INTEGER_CST
11636 || tree_int_cst_sgn (ost) < 0
11637 || compare_tree_int (ost, 3) > 0)
11638 return NULL_TREE;
11639
11640 object_size_type = tree_to_shwi (ost);
11641
11642 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11643 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11644 and (size_t) 0 for types 2 and 3. */
11645 if (TREE_SIDE_EFFECTS (ptr))
11646 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11647
11648 if (TREE_CODE (ptr) == ADDR_EXPR)
11649 {
11650 bytes = compute_builtin_object_size (ptr, object_size_type);
11651 if (wi::fits_to_tree_p (bytes, size_type_node))
11652 return build_int_cstu (size_type_node, bytes);
11653 }
11654 else if (TREE_CODE (ptr) == SSA_NAME)
11655 {
11656 /* If object size is not known yet, delay folding until
11657 later. Maybe subsequent passes will help determining
11658 it. */
11659 bytes = compute_builtin_object_size (ptr, object_size_type);
11660 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11661 && wi::fits_to_tree_p (bytes, size_type_node))
11662 return build_int_cstu (size_type_node, bytes);
11663 }
11664
11665 return NULL_TREE;
11666 }
11667
11668 /* Builtins with folding operations that operate on "..." arguments
11669 need special handling; we need to store the arguments in a convenient
11670 data structure before attempting any folding. Fortunately there are
11671 only a few builtins that fall into this category. FNDECL is the
11672 function, EXP is the CALL_EXPR for the call. */
11673
11674 static tree
11675 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11676 {
11677 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11678 tree ret = NULL_TREE;
11679
11680 switch (fcode)
11681 {
11682 case BUILT_IN_FPCLASSIFY:
11683 ret = fold_builtin_fpclassify (loc, args, nargs);
11684 break;
11685
11686 default:
11687 break;
11688 }
11689 if (ret)
11690 {
11691 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11692 SET_EXPR_LOCATION (ret, loc);
11693 TREE_NO_WARNING (ret) = 1;
11694 return ret;
11695 }
11696 return NULL_TREE;
11697 }
11698
11699 /* Initialize format string characters in the target charset. */
11700
11701 bool
11702 init_target_chars (void)
11703 {
11704 static bool init;
11705 if (!init)
11706 {
11707 target_newline = lang_hooks.to_target_charset ('\n');
11708 target_percent = lang_hooks.to_target_charset ('%');
11709 target_c = lang_hooks.to_target_charset ('c');
11710 target_s = lang_hooks.to_target_charset ('s');
11711 if (target_newline == 0 || target_percent == 0 || target_c == 0
11712 || target_s == 0)
11713 return false;
11714
11715 target_percent_c[0] = target_percent;
11716 target_percent_c[1] = target_c;
11717 target_percent_c[2] = '\0';
11718
11719 target_percent_s[0] = target_percent;
11720 target_percent_s[1] = target_s;
11721 target_percent_s[2] = '\0';
11722
11723 target_percent_s_newline[0] = target_percent;
11724 target_percent_s_newline[1] = target_s;
11725 target_percent_s_newline[2] = target_newline;
11726 target_percent_s_newline[3] = '\0';
11727
11728 init = true;
11729 }
11730 return true;
11731 }
11732
11733 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11734 and no overflow/underflow occurred. INEXACT is true if M was not
11735 exactly calculated. TYPE is the tree type for the result. This
11736 function assumes that you cleared the MPFR flags and then
11737 calculated M to see if anything subsequently set a flag prior to
11738 entering this function. Return NULL_TREE if any checks fail. */
11739
11740 static tree
11741 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11742 {
11743 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11744 overflow/underflow occurred. If -frounding-math, proceed iff the
11745 result of calling FUNC was exact. */
11746 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11747 && (!flag_rounding_math || !inexact))
11748 {
11749 REAL_VALUE_TYPE rr;
11750
11751 real_from_mpfr (&rr, m, type, GMP_RNDN);
11752 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11753 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11754 but the mpft_t is not, then we underflowed in the
11755 conversion. */
11756 if (real_isfinite (&rr)
11757 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11758 {
11759 REAL_VALUE_TYPE rmode;
11760
11761 real_convert (&rmode, TYPE_MODE (type), &rr);
11762 /* Proceed iff the specified mode can hold the value. */
11763 if (real_identical (&rmode, &rr))
11764 return build_real (type, rmode);
11765 }
11766 }
11767 return NULL_TREE;
11768 }
11769
11770 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11771 number and no overflow/underflow occurred. INEXACT is true if M
11772 was not exactly calculated. TYPE is the tree type for the result.
11773 This function assumes that you cleared the MPFR flags and then
11774 calculated M to see if anything subsequently set a flag prior to
11775 entering this function. Return NULL_TREE if any checks fail, if
11776 FORCE_CONVERT is true, then bypass the checks. */
11777
11778 static tree
11779 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11780 {
11781 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11782 overflow/underflow occurred. If -frounding-math, proceed iff the
11783 result of calling FUNC was exact. */
11784 if (force_convert
11785 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11786 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11787 && (!flag_rounding_math || !inexact)))
11788 {
11789 REAL_VALUE_TYPE re, im;
11790
11791 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
11792 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
11793 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11794 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11795 but the mpft_t is not, then we underflowed in the
11796 conversion. */
11797 if (force_convert
11798 || (real_isfinite (&re) && real_isfinite (&im)
11799 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11800 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11801 {
11802 REAL_VALUE_TYPE re_mode, im_mode;
11803
11804 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11805 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11806 /* Proceed iff the specified mode can hold the value. */
11807 if (force_convert
11808 || (real_identical (&re_mode, &re)
11809 && real_identical (&im_mode, &im)))
11810 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11811 build_real (TREE_TYPE (type), im_mode));
11812 }
11813 }
11814 return NULL_TREE;
11815 }
11816
11817 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11818 FUNC on it and return the resulting value as a tree with type TYPE.
11819 If MIN and/or MAX are not NULL, then the supplied ARG must be
11820 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11821 acceptable values, otherwise they are not. The mpfr precision is
11822 set to the precision of TYPE. We assume that function FUNC returns
11823 zero if the result could be calculated exactly within the requested
11824 precision. */
11825
11826 static tree
11827 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
11828 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
11829 bool inclusive)
11830 {
11831 tree result = NULL_TREE;
11832
11833 STRIP_NOPS (arg);
11834
11835 /* To proceed, MPFR must exactly represent the target floating point
11836 format, which only happens when the target base equals two. */
11837 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11838 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
11839 {
11840 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11841
11842 if (real_isfinite (ra)
11843 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
11844 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
11845 {
11846 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11847 const int prec = fmt->p;
11848 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11849 int inexact;
11850 mpfr_t m;
11851
11852 mpfr_init2 (m, prec);
11853 mpfr_from_real (m, ra, GMP_RNDN);
11854 mpfr_clear_flags ();
11855 inexact = func (m, m, rnd);
11856 result = do_mpfr_ckconv (m, type, inexact);
11857 mpfr_clear (m);
11858 }
11859 }
11860
11861 return result;
11862 }
11863
11864 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
11865 FUNC on it and return the resulting value as a tree with type TYPE.
11866 The mpfr precision is set to the precision of TYPE. We assume that
11867 function FUNC returns zero if the result could be calculated
11868 exactly within the requested precision. */
11869
11870 static tree
11871 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
11872 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11873 {
11874 tree result = NULL_TREE;
11875
11876 STRIP_NOPS (arg1);
11877 STRIP_NOPS (arg2);
11878
11879 /* To proceed, MPFR must exactly represent the target floating point
11880 format, which only happens when the target base equals two. */
11881 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11882 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11883 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
11884 {
11885 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11886 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11887
11888 if (real_isfinite (ra1) && real_isfinite (ra2))
11889 {
11890 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11891 const int prec = fmt->p;
11892 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11893 int inexact;
11894 mpfr_t m1, m2;
11895
11896 mpfr_inits2 (prec, m1, m2, NULL);
11897 mpfr_from_real (m1, ra1, GMP_RNDN);
11898 mpfr_from_real (m2, ra2, GMP_RNDN);
11899 mpfr_clear_flags ();
11900 inexact = func (m1, m1, m2, rnd);
11901 result = do_mpfr_ckconv (m1, type, inexact);
11902 mpfr_clears (m1, m2, NULL);
11903 }
11904 }
11905
11906 return result;
11907 }
11908
11909 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
11910 FUNC on it and return the resulting value as a tree with type TYPE.
11911 The mpfr precision is set to the precision of TYPE. We assume that
11912 function FUNC returns zero if the result could be calculated
11913 exactly within the requested precision. */
11914
11915 static tree
11916 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
11917 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11918 {
11919 tree result = NULL_TREE;
11920
11921 STRIP_NOPS (arg1);
11922 STRIP_NOPS (arg2);
11923 STRIP_NOPS (arg3);
11924
11925 /* To proceed, MPFR must exactly represent the target floating point
11926 format, which only happens when the target base equals two. */
11927 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11928 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11929 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
11930 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
11931 {
11932 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11933 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11934 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
11935
11936 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
11937 {
11938 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11939 const int prec = fmt->p;
11940 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11941 int inexact;
11942 mpfr_t m1, m2, m3;
11943
11944 mpfr_inits2 (prec, m1, m2, m3, NULL);
11945 mpfr_from_real (m1, ra1, GMP_RNDN);
11946 mpfr_from_real (m2, ra2, GMP_RNDN);
11947 mpfr_from_real (m3, ra3, GMP_RNDN);
11948 mpfr_clear_flags ();
11949 inexact = func (m1, m1, m2, m3, rnd);
11950 result = do_mpfr_ckconv (m1, type, inexact);
11951 mpfr_clears (m1, m2, m3, NULL);
11952 }
11953 }
11954
11955 return result;
11956 }
11957
11958 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11959 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
11960 If ARG_SINP and ARG_COSP are NULL then the result is returned
11961 as a complex value.
11962 The type is taken from the type of ARG and is used for setting the
11963 precision of the calculation and results. */
11964
11965 static tree
11966 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
11967 {
11968 tree const type = TREE_TYPE (arg);
11969 tree result = NULL_TREE;
11970
11971 STRIP_NOPS (arg);
11972
11973 /* To proceed, MPFR must exactly represent the target floating point
11974 format, which only happens when the target base equals two. */
11975 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11976 && TREE_CODE (arg) == REAL_CST
11977 && !TREE_OVERFLOW (arg))
11978 {
11979 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11980
11981 if (real_isfinite (ra))
11982 {
11983 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11984 const int prec = fmt->p;
11985 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11986 tree result_s, result_c;
11987 int inexact;
11988 mpfr_t m, ms, mc;
11989
11990 mpfr_inits2 (prec, m, ms, mc, NULL);
11991 mpfr_from_real (m, ra, GMP_RNDN);
11992 mpfr_clear_flags ();
11993 inexact = mpfr_sin_cos (ms, mc, m, rnd);
11994 result_s = do_mpfr_ckconv (ms, type, inexact);
11995 result_c = do_mpfr_ckconv (mc, type, inexact);
11996 mpfr_clears (m, ms, mc, NULL);
11997 if (result_s && result_c)
11998 {
11999 /* If we are to return in a complex value do so. */
12000 if (!arg_sinp && !arg_cosp)
12001 return build_complex (build_complex_type (type),
12002 result_c, result_s);
12003
12004 /* Dereference the sin/cos pointer arguments. */
12005 arg_sinp = build_fold_indirect_ref (arg_sinp);
12006 arg_cosp = build_fold_indirect_ref (arg_cosp);
12007 /* Proceed if valid pointer type were passed in. */
12008 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12009 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12010 {
12011 /* Set the values. */
12012 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12013 result_s);
12014 TREE_SIDE_EFFECTS (result_s) = 1;
12015 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12016 result_c);
12017 TREE_SIDE_EFFECTS (result_c) = 1;
12018 /* Combine the assignments into a compound expr. */
12019 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12020 result_s, result_c));
12021 }
12022 }
12023 }
12024 }
12025 return result;
12026 }
12027
12028 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12029 two-argument mpfr order N Bessel function FUNC on them and return
12030 the resulting value as a tree with type TYPE. The mpfr precision
12031 is set to the precision of TYPE. We assume that function FUNC
12032 returns zero if the result could be calculated exactly within the
12033 requested precision. */
12034 static tree
12035 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12036 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12037 const REAL_VALUE_TYPE *min, bool inclusive)
12038 {
12039 tree result = NULL_TREE;
12040
12041 STRIP_NOPS (arg1);
12042 STRIP_NOPS (arg2);
12043
12044 /* To proceed, MPFR must exactly represent the target floating point
12045 format, which only happens when the target base equals two. */
12046 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12047 && tree_fits_shwi_p (arg1)
12048 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12049 {
12050 const HOST_WIDE_INT n = tree_to_shwi (arg1);
12051 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12052
12053 if (n == (long)n
12054 && real_isfinite (ra)
12055 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12056 {
12057 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12058 const int prec = fmt->p;
12059 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12060 int inexact;
12061 mpfr_t m;
12062
12063 mpfr_init2 (m, prec);
12064 mpfr_from_real (m, ra, GMP_RNDN);
12065 mpfr_clear_flags ();
12066 inexact = func (m, n, m, rnd);
12067 result = do_mpfr_ckconv (m, type, inexact);
12068 mpfr_clear (m);
12069 }
12070 }
12071
12072 return result;
12073 }
12074
12075 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12076 the pointer *(ARG_QUO) and return the result. The type is taken
12077 from the type of ARG0 and is used for setting the precision of the
12078 calculation and results. */
12079
12080 static tree
12081 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12082 {
12083 tree const type = TREE_TYPE (arg0);
12084 tree result = NULL_TREE;
12085
12086 STRIP_NOPS (arg0);
12087 STRIP_NOPS (arg1);
12088
12089 /* To proceed, MPFR must exactly represent the target floating point
12090 format, which only happens when the target base equals two. */
12091 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12092 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12093 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12094 {
12095 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12096 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12097
12098 if (real_isfinite (ra0) && real_isfinite (ra1))
12099 {
12100 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12101 const int prec = fmt->p;
12102 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12103 tree result_rem;
12104 long integer_quo;
12105 mpfr_t m0, m1;
12106
12107 mpfr_inits2 (prec, m0, m1, NULL);
12108 mpfr_from_real (m0, ra0, GMP_RNDN);
12109 mpfr_from_real (m1, ra1, GMP_RNDN);
12110 mpfr_clear_flags ();
12111 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12112 /* Remquo is independent of the rounding mode, so pass
12113 inexact=0 to do_mpfr_ckconv(). */
12114 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12115 mpfr_clears (m0, m1, NULL);
12116 if (result_rem)
12117 {
12118 /* MPFR calculates quo in the host's long so it may
12119 return more bits in quo than the target int can hold
12120 if sizeof(host long) > sizeof(target int). This can
12121 happen even for native compilers in LP64 mode. In
12122 these cases, modulo the quo value with the largest
12123 number that the target int can hold while leaving one
12124 bit for the sign. */
12125 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12126 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12127
12128 /* Dereference the quo pointer argument. */
12129 arg_quo = build_fold_indirect_ref (arg_quo);
12130 /* Proceed iff a valid pointer type was passed in. */
12131 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12132 {
12133 /* Set the value. */
12134 tree result_quo
12135 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12136 build_int_cst (TREE_TYPE (arg_quo),
12137 integer_quo));
12138 TREE_SIDE_EFFECTS (result_quo) = 1;
12139 /* Combine the quo assignment with the rem. */
12140 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12141 result_quo, result_rem));
12142 }
12143 }
12144 }
12145 }
12146 return result;
12147 }
12148
12149 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12150 resulting value as a tree with type TYPE. The mpfr precision is
12151 set to the precision of TYPE. We assume that this mpfr function
12152 returns zero if the result could be calculated exactly within the
12153 requested precision. In addition, the integer pointer represented
12154 by ARG_SG will be dereferenced and set to the appropriate signgam
12155 (-1,1) value. */
12156
12157 static tree
12158 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12159 {
12160 tree result = NULL_TREE;
12161
12162 STRIP_NOPS (arg);
12163
12164 /* To proceed, MPFR must exactly represent the target floating point
12165 format, which only happens when the target base equals two. Also
12166 verify ARG is a constant and that ARG_SG is an int pointer. */
12167 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12168 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12169 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12170 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12171 {
12172 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12173
12174 /* In addition to NaN and Inf, the argument cannot be zero or a
12175 negative integer. */
12176 if (real_isfinite (ra)
12177 && ra->cl != rvc_zero
12178 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12179 {
12180 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12181 const int prec = fmt->p;
12182 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12183 int inexact, sg;
12184 mpfr_t m;
12185 tree result_lg;
12186
12187 mpfr_init2 (m, prec);
12188 mpfr_from_real (m, ra, GMP_RNDN);
12189 mpfr_clear_flags ();
12190 inexact = mpfr_lgamma (m, &sg, m, rnd);
12191 result_lg = do_mpfr_ckconv (m, type, inexact);
12192 mpfr_clear (m);
12193 if (result_lg)
12194 {
12195 tree result_sg;
12196
12197 /* Dereference the arg_sg pointer argument. */
12198 arg_sg = build_fold_indirect_ref (arg_sg);
12199 /* Assign the signgam value into *arg_sg. */
12200 result_sg = fold_build2 (MODIFY_EXPR,
12201 TREE_TYPE (arg_sg), arg_sg,
12202 build_int_cst (TREE_TYPE (arg_sg), sg));
12203 TREE_SIDE_EFFECTS (result_sg) = 1;
12204 /* Combine the signgam assignment with the lgamma result. */
12205 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12206 result_sg, result_lg));
12207 }
12208 }
12209 }
12210
12211 return result;
12212 }
12213
12214 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12215 function FUNC on it and return the resulting value as a tree with
12216 type TYPE. The mpfr precision is set to the precision of TYPE. We
12217 assume that function FUNC returns zero if the result could be
12218 calculated exactly within the requested precision. */
12219
12220 static tree
12221 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12222 {
12223 tree result = NULL_TREE;
12224
12225 STRIP_NOPS (arg);
12226
12227 /* To proceed, MPFR must exactly represent the target floating point
12228 format, which only happens when the target base equals two. */
12229 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12230 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12231 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12232 {
12233 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12234 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12235
12236 if (real_isfinite (re) && real_isfinite (im))
12237 {
12238 const struct real_format *const fmt =
12239 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12240 const int prec = fmt->p;
12241 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12242 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12243 int inexact;
12244 mpc_t m;
12245
12246 mpc_init2 (m, prec);
12247 mpfr_from_real (mpc_realref (m), re, rnd);
12248 mpfr_from_real (mpc_imagref (m), im, rnd);
12249 mpfr_clear_flags ();
12250 inexact = func (m, m, crnd);
12251 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
12252 mpc_clear (m);
12253 }
12254 }
12255
12256 return result;
12257 }
12258
12259 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12260 mpc function FUNC on it and return the resulting value as a tree
12261 with type TYPE. The mpfr precision is set to the precision of
12262 TYPE. We assume that function FUNC returns zero if the result
12263 could be calculated exactly within the requested precision. If
12264 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12265 in the arguments and/or results. */
12266
12267 tree
12268 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12269 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12270 {
12271 tree result = NULL_TREE;
12272
12273 STRIP_NOPS (arg0);
12274 STRIP_NOPS (arg1);
12275
12276 /* To proceed, MPFR must exactly represent the target floating point
12277 format, which only happens when the target base equals two. */
12278 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12279 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12280 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12281 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12282 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12283 {
12284 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12285 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12286 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12287 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12288
12289 if (do_nonfinite
12290 || (real_isfinite (re0) && real_isfinite (im0)
12291 && real_isfinite (re1) && real_isfinite (im1)))
12292 {
12293 const struct real_format *const fmt =
12294 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12295 const int prec = fmt->p;
12296 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12297 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12298 int inexact;
12299 mpc_t m0, m1;
12300
12301 mpc_init2 (m0, prec);
12302 mpc_init2 (m1, prec);
12303 mpfr_from_real (mpc_realref (m0), re0, rnd);
12304 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12305 mpfr_from_real (mpc_realref (m1), re1, rnd);
12306 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12307 mpfr_clear_flags ();
12308 inexact = func (m0, m0, m1, crnd);
12309 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12310 mpc_clear (m0);
12311 mpc_clear (m1);
12312 }
12313 }
12314
12315 return result;
12316 }
12317
12318 /* A wrapper function for builtin folding that prevents warnings for
12319 "statement without effect" and the like, caused by removing the
12320 call node earlier than the warning is generated. */
12321
12322 tree
12323 fold_call_stmt (gcall *stmt, bool ignore)
12324 {
12325 tree ret = NULL_TREE;
12326 tree fndecl = gimple_call_fndecl (stmt);
12327 location_t loc = gimple_location (stmt);
12328 if (fndecl
12329 && TREE_CODE (fndecl) == FUNCTION_DECL
12330 && DECL_BUILT_IN (fndecl)
12331 && !gimple_call_va_arg_pack_p (stmt))
12332 {
12333 int nargs = gimple_call_num_args (stmt);
12334 tree *args = (nargs > 0
12335 ? gimple_call_arg_ptr (stmt, 0)
12336 : &error_mark_node);
12337
12338 if (avoid_folding_inline_builtin (fndecl))
12339 return NULL_TREE;
12340 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12341 {
12342 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12343 }
12344 else
12345 {
12346 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
12347 if (ret)
12348 {
12349 /* Propagate location information from original call to
12350 expansion of builtin. Otherwise things like
12351 maybe_emit_chk_warning, that operate on the expansion
12352 of a builtin, will use the wrong location information. */
12353 if (gimple_has_location (stmt))
12354 {
12355 tree realret = ret;
12356 if (TREE_CODE (ret) == NOP_EXPR)
12357 realret = TREE_OPERAND (ret, 0);
12358 if (CAN_HAVE_LOCATION_P (realret)
12359 && !EXPR_HAS_LOCATION (realret))
12360 SET_EXPR_LOCATION (realret, loc);
12361 return realret;
12362 }
12363 return ret;
12364 }
12365 }
12366 }
12367 return NULL_TREE;
12368 }
12369
12370 /* Look up the function in builtin_decl that corresponds to DECL
12371 and set ASMSPEC as its user assembler name. DECL must be a
12372 function decl that declares a builtin. */
12373
12374 void
12375 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12376 {
12377 tree builtin;
12378 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12379 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12380 && asmspec != 0);
12381
12382 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12383 set_user_assembler_name (builtin, asmspec);
12384 switch (DECL_FUNCTION_CODE (decl))
12385 {
12386 case BUILT_IN_MEMCPY:
12387 init_block_move_fn (asmspec);
12388 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12389 break;
12390 case BUILT_IN_MEMSET:
12391 init_block_clear_fn (asmspec);
12392 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12393 break;
12394 case BUILT_IN_MEMMOVE:
12395 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12396 break;
12397 case BUILT_IN_MEMCMP:
12398 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12399 break;
12400 case BUILT_IN_ABORT:
12401 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12402 break;
12403 case BUILT_IN_FFS:
12404 if (INT_TYPE_SIZE < BITS_PER_WORD)
12405 {
12406 set_user_assembler_libfunc ("ffs", asmspec);
12407 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12408 MODE_INT, 0), "ffs");
12409 }
12410 break;
12411 default:
12412 break;
12413 }
12414 }
12415
12416 /* Return true if DECL is a builtin that expands to a constant or similarly
12417 simple code. */
12418 bool
12419 is_simple_builtin (tree decl)
12420 {
12421 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12422 switch (DECL_FUNCTION_CODE (decl))
12423 {
12424 /* Builtins that expand to constants. */
12425 case BUILT_IN_CONSTANT_P:
12426 case BUILT_IN_EXPECT:
12427 case BUILT_IN_OBJECT_SIZE:
12428 case BUILT_IN_UNREACHABLE:
12429 /* Simple register moves or loads from stack. */
12430 case BUILT_IN_ASSUME_ALIGNED:
12431 case BUILT_IN_RETURN_ADDRESS:
12432 case BUILT_IN_EXTRACT_RETURN_ADDR:
12433 case BUILT_IN_FROB_RETURN_ADDR:
12434 case BUILT_IN_RETURN:
12435 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12436 case BUILT_IN_FRAME_ADDRESS:
12437 case BUILT_IN_VA_END:
12438 case BUILT_IN_STACK_SAVE:
12439 case BUILT_IN_STACK_RESTORE:
12440 /* Exception state returns or moves registers around. */
12441 case BUILT_IN_EH_FILTER:
12442 case BUILT_IN_EH_POINTER:
12443 case BUILT_IN_EH_COPY_VALUES:
12444 return true;
12445
12446 default:
12447 return false;
12448 }
12449
12450 return false;
12451 }
12452
12453 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12454 most probably expanded inline into reasonably simple code. This is a
12455 superset of is_simple_builtin. */
12456 bool
12457 is_inexpensive_builtin (tree decl)
12458 {
12459 if (!decl)
12460 return false;
12461 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12462 return true;
12463 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12464 switch (DECL_FUNCTION_CODE (decl))
12465 {
12466 case BUILT_IN_ABS:
12467 case BUILT_IN_ALLOCA:
12468 case BUILT_IN_ALLOCA_WITH_ALIGN:
12469 case BUILT_IN_BSWAP16:
12470 case BUILT_IN_BSWAP32:
12471 case BUILT_IN_BSWAP64:
12472 case BUILT_IN_CLZ:
12473 case BUILT_IN_CLZIMAX:
12474 case BUILT_IN_CLZL:
12475 case BUILT_IN_CLZLL:
12476 case BUILT_IN_CTZ:
12477 case BUILT_IN_CTZIMAX:
12478 case BUILT_IN_CTZL:
12479 case BUILT_IN_CTZLL:
12480 case BUILT_IN_FFS:
12481 case BUILT_IN_FFSIMAX:
12482 case BUILT_IN_FFSL:
12483 case BUILT_IN_FFSLL:
12484 case BUILT_IN_IMAXABS:
12485 case BUILT_IN_FINITE:
12486 case BUILT_IN_FINITEF:
12487 case BUILT_IN_FINITEL:
12488 case BUILT_IN_FINITED32:
12489 case BUILT_IN_FINITED64:
12490 case BUILT_IN_FINITED128:
12491 case BUILT_IN_FPCLASSIFY:
12492 case BUILT_IN_ISFINITE:
12493 case BUILT_IN_ISINF_SIGN:
12494 case BUILT_IN_ISINF:
12495 case BUILT_IN_ISINFF:
12496 case BUILT_IN_ISINFL:
12497 case BUILT_IN_ISINFD32:
12498 case BUILT_IN_ISINFD64:
12499 case BUILT_IN_ISINFD128:
12500 case BUILT_IN_ISNAN:
12501 case BUILT_IN_ISNANF:
12502 case BUILT_IN_ISNANL:
12503 case BUILT_IN_ISNAND32:
12504 case BUILT_IN_ISNAND64:
12505 case BUILT_IN_ISNAND128:
12506 case BUILT_IN_ISNORMAL:
12507 case BUILT_IN_ISGREATER:
12508 case BUILT_IN_ISGREATEREQUAL:
12509 case BUILT_IN_ISLESS:
12510 case BUILT_IN_ISLESSEQUAL:
12511 case BUILT_IN_ISLESSGREATER:
12512 case BUILT_IN_ISUNORDERED:
12513 case BUILT_IN_VA_ARG_PACK:
12514 case BUILT_IN_VA_ARG_PACK_LEN:
12515 case BUILT_IN_VA_COPY:
12516 case BUILT_IN_TRAP:
12517 case BUILT_IN_SAVEREGS:
12518 case BUILT_IN_POPCOUNTL:
12519 case BUILT_IN_POPCOUNTLL:
12520 case BUILT_IN_POPCOUNTIMAX:
12521 case BUILT_IN_POPCOUNT:
12522 case BUILT_IN_PARITYL:
12523 case BUILT_IN_PARITYLL:
12524 case BUILT_IN_PARITYIMAX:
12525 case BUILT_IN_PARITY:
12526 case BUILT_IN_LABS:
12527 case BUILT_IN_LLABS:
12528 case BUILT_IN_PREFETCH:
12529 case BUILT_IN_ACC_ON_DEVICE:
12530 return true;
12531
12532 default:
12533 return is_simple_builtin (decl);
12534 }
12535
12536 return false;
12537 }