builtins.c (expand_cmpstr, [...]): New functions.
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "predict.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "rtl.h"
28 #include "alias.h"
29 #include "fold-const.h"
30 #include "stringpool.h"
31 #include "stor-layout.h"
32 #include "calls.h"
33 #include "varasm.h"
34 #include "tree-object-size.h"
35 #include "realmpfr.h"
36 #include "cfgrtl.h"
37 #include "internal-fn.h"
38 #include "flags.h"
39 #include "regs.h"
40 #include "except.h"
41 #include "insn-config.h"
42 #include "expmed.h"
43 #include "dojump.h"
44 #include "explow.h"
45 #include "emit-rtl.h"
46 #include "stmt.h"
47 #include "expr.h"
48 #include "insn-codes.h"
49 #include "optabs.h"
50 #include "libfuncs.h"
51 #include "recog.h"
52 #include "output.h"
53 #include "typeclass.h"
54 #include "tm_p.h"
55 #include "target.h"
56 #include "langhooks.h"
57 #include "tree-ssanames.h"
58 #include "tree-dfa.h"
59 #include "value-prof.h"
60 #include "diagnostic-core.h"
61 #include "builtins.h"
62 #include "asan.h"
63 #include "cilk.h"
64 #include "cgraph.h"
65 #include "tree-chkp.h"
66 #include "rtl-chkp.h"
67 #include "gomp-constants.h"
68
69
70 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
71
72 struct target_builtins default_target_builtins;
73 #if SWITCHABLE_TARGET
74 struct target_builtins *this_target_builtins = &default_target_builtins;
75 #endif
76
77 /* Define the names of the builtin function types and codes. */
78 const char *const built_in_class_names[BUILT_IN_LAST]
79 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
80
81 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
82 const char * built_in_names[(int) END_BUILTINS] =
83 {
84 #include "builtins.def"
85 };
86 #undef DEF_BUILTIN
87
88 /* Setup an array of builtin_info_type, make sure each element decl is
89 initialized to NULL_TREE. */
90 builtin_info_type builtin_info[(int)END_BUILTINS];
91
92 /* Non-zero if __builtin_constant_p should be folded right away. */
93 bool force_folding_builtin_constant_p;
94
95 static rtx c_readstr (const char *, machine_mode);
96 static int target_char_cast (tree, char *);
97 static rtx get_memory_rtx (tree, tree);
98 static int apply_args_size (void);
99 static int apply_result_size (void);
100 static rtx result_vector (int, rtx);
101 static void expand_builtin_prefetch (tree);
102 static rtx expand_builtin_apply_args (void);
103 static rtx expand_builtin_apply_args_1 (void);
104 static rtx expand_builtin_apply (rtx, rtx, rtx);
105 static void expand_builtin_return (rtx);
106 static enum type_class type_to_class (tree);
107 static rtx expand_builtin_classify_type (tree);
108 static void expand_errno_check (tree, rtx);
109 static rtx expand_builtin_mathfn (tree, rtx, rtx);
110 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
111 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
112 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
113 static rtx expand_builtin_interclass_mathfn (tree, rtx);
114 static rtx expand_builtin_sincos (tree);
115 static rtx expand_builtin_cexpi (tree, rtx);
116 static rtx expand_builtin_int_roundingfn (tree, rtx);
117 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
118 static rtx expand_builtin_next_arg (void);
119 static rtx expand_builtin_va_start (tree);
120 static rtx expand_builtin_va_end (tree);
121 static rtx expand_builtin_va_copy (tree);
122 static rtx expand_builtin_memcmp (tree, rtx, machine_mode);
123 static rtx expand_builtin_strcmp (tree, rtx);
124 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
125 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
126 static rtx expand_builtin_memcpy (tree, rtx);
127 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
128 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
129 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
130 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
131 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
132 machine_mode, int, tree);
133 static rtx expand_builtin_strcpy (tree, rtx);
134 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
135 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
136 static rtx expand_builtin_strncpy (tree, rtx);
137 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
138 static rtx expand_builtin_memset (tree, rtx, machine_mode);
139 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
140 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
141 static rtx expand_builtin_bzero (tree);
142 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
143 static rtx expand_builtin_alloca (tree, bool);
144 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
145 static rtx expand_builtin_frame_address (tree, tree);
146 static tree stabilize_va_list_loc (location_t, tree, int);
147 static rtx expand_builtin_expect (tree, rtx);
148 static tree fold_builtin_constant_p (tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (location_t, tree, tree);
151 static tree fold_builtin_inf (location_t, tree, int);
152 static tree fold_builtin_nan (tree, tree, int);
153 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
154 static bool validate_arg (const_tree, enum tree_code code);
155 static bool integer_valued_real_p (tree);
156 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
157 static rtx expand_builtin_fabs (tree, rtx, rtx);
158 static rtx expand_builtin_signbit (tree, rtx);
159 static tree fold_builtin_sqrt (location_t, tree, tree);
160 static tree fold_builtin_cbrt (location_t, tree, tree);
161 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
162 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
163 static tree fold_builtin_cos (location_t, tree, tree, tree);
164 static tree fold_builtin_cosh (location_t, tree, tree, tree);
165 static tree fold_builtin_tan (tree, tree);
166 static tree fold_builtin_trunc (location_t, tree, tree);
167 static tree fold_builtin_floor (location_t, tree, tree);
168 static tree fold_builtin_ceil (location_t, tree, tree);
169 static tree fold_builtin_round (location_t, tree, tree);
170 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
171 static tree fold_builtin_bitop (tree, tree);
172 static tree fold_builtin_strchr (location_t, tree, tree, tree);
173 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
174 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
175 static tree fold_builtin_strcmp (location_t, tree, tree);
176 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
177 static tree fold_builtin_signbit (location_t, tree, tree);
178 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
179 static tree fold_builtin_isascii (location_t, tree);
180 static tree fold_builtin_toascii (location_t, tree);
181 static tree fold_builtin_isdigit (location_t, tree);
182 static tree fold_builtin_fabs (location_t, tree, tree);
183 static tree fold_builtin_abs (location_t, tree, tree);
184 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
185 enum tree_code);
186 static tree fold_builtin_0 (location_t, tree);
187 static tree fold_builtin_1 (location_t, tree, tree);
188 static tree fold_builtin_2 (location_t, tree, tree, tree);
189 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
190 static tree fold_builtin_varargs (location_t, tree, tree*, int);
191
192 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
193 static tree fold_builtin_strstr (location_t, tree, tree, tree);
194 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
195 static tree fold_builtin_strspn (location_t, tree, tree);
196 static tree fold_builtin_strcspn (location_t, tree, tree);
197
198 static rtx expand_builtin_object_size (tree);
199 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
200 enum built_in_function);
201 static void maybe_emit_chk_warning (tree, enum built_in_function);
202 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
203 static void maybe_emit_free_warning (tree);
204 static tree fold_builtin_object_size (tree, tree);
205
206 unsigned HOST_WIDE_INT target_newline;
207 unsigned HOST_WIDE_INT target_percent;
208 static unsigned HOST_WIDE_INT target_c;
209 static unsigned HOST_WIDE_INT target_s;
210 char target_percent_c[3];
211 char target_percent_s[3];
212 char target_percent_s_newline[4];
213 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
214 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
215 static tree do_mpfr_arg2 (tree, tree, tree,
216 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
217 static tree do_mpfr_arg3 (tree, tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219 static tree do_mpfr_sincos (tree, tree, tree);
220 static tree do_mpfr_bessel_n (tree, tree, tree,
221 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
222 const REAL_VALUE_TYPE *, bool);
223 static tree do_mpfr_remquo (tree, tree, tree);
224 static tree do_mpfr_lgamma_r (tree, tree, tree);
225 static void expand_builtin_sync_synchronize (void);
226
227 /* Return true if NAME starts with __builtin_ or __sync_. */
228
229 static bool
230 is_builtin_name (const char *name)
231 {
232 if (strncmp (name, "__builtin_", 10) == 0)
233 return true;
234 if (strncmp (name, "__sync_", 7) == 0)
235 return true;
236 if (strncmp (name, "__atomic_", 9) == 0)
237 return true;
238 if (flag_cilkplus
239 && (!strcmp (name, "__cilkrts_detach")
240 || !strcmp (name, "__cilkrts_pop_frame")))
241 return true;
242 return false;
243 }
244
245
246 /* Return true if DECL is a function symbol representing a built-in. */
247
248 bool
249 is_builtin_fn (tree decl)
250 {
251 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
252 }
253
254 /* Return true if NODE should be considered for inline expansion regardless
255 of the optimization level. This means whenever a function is invoked with
256 its "internal" name, which normally contains the prefix "__builtin". */
257
258 static bool
259 called_as_built_in (tree node)
260 {
261 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
262 we want the name used to call the function, not the name it
263 will have. */
264 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
265 return is_builtin_name (name);
266 }
267
268 /* Compute values M and N such that M divides (address of EXP - N) and such
269 that N < M. If these numbers can be determined, store M in alignp and N in
270 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
271 *alignp and any bit-offset to *bitposp.
272
273 Note that the address (and thus the alignment) computed here is based
274 on the address to which a symbol resolves, whereas DECL_ALIGN is based
275 on the address at which an object is actually located. These two
276 addresses are not always the same. For example, on ARM targets,
277 the address &foo of a Thumb function foo() has the lowest bit set,
278 whereas foo() itself starts on an even address.
279
280 If ADDR_P is true we are taking the address of the memory reference EXP
281 and thus cannot rely on the access taking place. */
282
283 static bool
284 get_object_alignment_2 (tree exp, unsigned int *alignp,
285 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
286 {
287 HOST_WIDE_INT bitsize, bitpos;
288 tree offset;
289 machine_mode mode;
290 int unsignedp, volatilep;
291 unsigned int align = BITS_PER_UNIT;
292 bool known_alignment = false;
293
294 /* Get the innermost object and the constant (bitpos) and possibly
295 variable (offset) offset of the access. */
296 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
297 &mode, &unsignedp, &volatilep, true);
298
299 /* Extract alignment information from the innermost object and
300 possibly adjust bitpos and offset. */
301 if (TREE_CODE (exp) == FUNCTION_DECL)
302 {
303 /* Function addresses can encode extra information besides their
304 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
305 allows the low bit to be used as a virtual bit, we know
306 that the address itself must be at least 2-byte aligned. */
307 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
308 align = 2 * BITS_PER_UNIT;
309 }
310 else if (TREE_CODE (exp) == LABEL_DECL)
311 ;
312 else if (TREE_CODE (exp) == CONST_DECL)
313 {
314 /* The alignment of a CONST_DECL is determined by its initializer. */
315 exp = DECL_INITIAL (exp);
316 align = TYPE_ALIGN (TREE_TYPE (exp));
317 if (CONSTANT_CLASS_P (exp))
318 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
319
320 known_alignment = true;
321 }
322 else if (DECL_P (exp))
323 {
324 align = DECL_ALIGN (exp);
325 known_alignment = true;
326 }
327 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
328 {
329 align = TYPE_ALIGN (TREE_TYPE (exp));
330 }
331 else if (TREE_CODE (exp) == INDIRECT_REF
332 || TREE_CODE (exp) == MEM_REF
333 || TREE_CODE (exp) == TARGET_MEM_REF)
334 {
335 tree addr = TREE_OPERAND (exp, 0);
336 unsigned ptr_align;
337 unsigned HOST_WIDE_INT ptr_bitpos;
338 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
339
340 /* If the address is explicitely aligned, handle that. */
341 if (TREE_CODE (addr) == BIT_AND_EXPR
342 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
343 {
344 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
345 ptr_bitmask *= BITS_PER_UNIT;
346 align = ptr_bitmask & -ptr_bitmask;
347 addr = TREE_OPERAND (addr, 0);
348 }
349
350 known_alignment
351 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
352 align = MAX (ptr_align, align);
353
354 /* Re-apply explicit alignment to the bitpos. */
355 ptr_bitpos &= ptr_bitmask;
356
357 /* The alignment of the pointer operand in a TARGET_MEM_REF
358 has to take the variable offset parts into account. */
359 if (TREE_CODE (exp) == TARGET_MEM_REF)
360 {
361 if (TMR_INDEX (exp))
362 {
363 unsigned HOST_WIDE_INT step = 1;
364 if (TMR_STEP (exp))
365 step = TREE_INT_CST_LOW (TMR_STEP (exp));
366 align = MIN (align, (step & -step) * BITS_PER_UNIT);
367 }
368 if (TMR_INDEX2 (exp))
369 align = BITS_PER_UNIT;
370 known_alignment = false;
371 }
372
373 /* When EXP is an actual memory reference then we can use
374 TYPE_ALIGN of a pointer indirection to derive alignment.
375 Do so only if get_pointer_alignment_1 did not reveal absolute
376 alignment knowledge and if using that alignment would
377 improve the situation. */
378 if (!addr_p && !known_alignment
379 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
380 align = TYPE_ALIGN (TREE_TYPE (exp));
381 else
382 {
383 /* Else adjust bitpos accordingly. */
384 bitpos += ptr_bitpos;
385 if (TREE_CODE (exp) == MEM_REF
386 || TREE_CODE (exp) == TARGET_MEM_REF)
387 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
388 }
389 }
390 else if (TREE_CODE (exp) == STRING_CST)
391 {
392 /* STRING_CST are the only constant objects we allow to be not
393 wrapped inside a CONST_DECL. */
394 align = TYPE_ALIGN (TREE_TYPE (exp));
395 if (CONSTANT_CLASS_P (exp))
396 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
397
398 known_alignment = true;
399 }
400
401 /* If there is a non-constant offset part extract the maximum
402 alignment that can prevail. */
403 if (offset)
404 {
405 unsigned int trailing_zeros = tree_ctz (offset);
406 if (trailing_zeros < HOST_BITS_PER_INT)
407 {
408 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
409 if (inner)
410 align = MIN (align, inner);
411 }
412 }
413
414 *alignp = align;
415 *bitposp = bitpos & (*alignp - 1);
416 return known_alignment;
417 }
418
419 /* For a memory reference expression EXP compute values M and N such that M
420 divides (&EXP - N) and such that N < M. If these numbers can be determined,
421 store M in alignp and N in *BITPOSP and return true. Otherwise return false
422 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
423
424 bool
425 get_object_alignment_1 (tree exp, unsigned int *alignp,
426 unsigned HOST_WIDE_INT *bitposp)
427 {
428 return get_object_alignment_2 (exp, alignp, bitposp, false);
429 }
430
431 /* Return the alignment in bits of EXP, an object. */
432
433 unsigned int
434 get_object_alignment (tree exp)
435 {
436 unsigned HOST_WIDE_INT bitpos = 0;
437 unsigned int align;
438
439 get_object_alignment_1 (exp, &align, &bitpos);
440
441 /* align and bitpos now specify known low bits of the pointer.
442 ptr & (align - 1) == bitpos. */
443
444 if (bitpos != 0)
445 align = (bitpos & -bitpos);
446 return align;
447 }
448
449 /* For a pointer valued expression EXP compute values M and N such that M
450 divides (EXP - N) and such that N < M. If these numbers can be determined,
451 store M in alignp and N in *BITPOSP and return true. Return false if
452 the results are just a conservative approximation.
453
454 If EXP is not a pointer, false is returned too. */
455
456 bool
457 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
458 unsigned HOST_WIDE_INT *bitposp)
459 {
460 STRIP_NOPS (exp);
461
462 if (TREE_CODE (exp) == ADDR_EXPR)
463 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
464 alignp, bitposp, true);
465 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
466 {
467 unsigned int align;
468 unsigned HOST_WIDE_INT bitpos;
469 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
470 &align, &bitpos);
471 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
472 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
473 else
474 {
475 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
476 if (trailing_zeros < HOST_BITS_PER_INT)
477 {
478 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
479 if (inner)
480 align = MIN (align, inner);
481 }
482 }
483 *alignp = align;
484 *bitposp = bitpos & (align - 1);
485 return res;
486 }
487 else if (TREE_CODE (exp) == SSA_NAME
488 && POINTER_TYPE_P (TREE_TYPE (exp)))
489 {
490 unsigned int ptr_align, ptr_misalign;
491 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
492
493 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
494 {
495 *bitposp = ptr_misalign * BITS_PER_UNIT;
496 *alignp = ptr_align * BITS_PER_UNIT;
497 /* We cannot really tell whether this result is an approximation. */
498 return true;
499 }
500 else
501 {
502 *bitposp = 0;
503 *alignp = BITS_PER_UNIT;
504 return false;
505 }
506 }
507 else if (TREE_CODE (exp) == INTEGER_CST)
508 {
509 *alignp = BIGGEST_ALIGNMENT;
510 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
511 & (BIGGEST_ALIGNMENT - 1));
512 return true;
513 }
514
515 *bitposp = 0;
516 *alignp = BITS_PER_UNIT;
517 return false;
518 }
519
520 /* Return the alignment in bits of EXP, a pointer valued expression.
521 The alignment returned is, by default, the alignment of the thing that
522 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
523
524 Otherwise, look at the expression to see if we can do better, i.e., if the
525 expression is actually pointing at an object whose alignment is tighter. */
526
527 unsigned int
528 get_pointer_alignment (tree exp)
529 {
530 unsigned HOST_WIDE_INT bitpos = 0;
531 unsigned int align;
532
533 get_pointer_alignment_1 (exp, &align, &bitpos);
534
535 /* align and bitpos now specify known low bits of the pointer.
536 ptr & (align - 1) == bitpos. */
537
538 if (bitpos != 0)
539 align = (bitpos & -bitpos);
540
541 return align;
542 }
543
544 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
545 way, because it could contain a zero byte in the middle.
546 TREE_STRING_LENGTH is the size of the character array, not the string.
547
548 ONLY_VALUE should be nonzero if the result is not going to be emitted
549 into the instruction stream and zero if it is going to be expanded.
550 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
551 is returned, otherwise NULL, since
552 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
553 evaluate the side-effects.
554
555 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
556 accesses. Note that this implies the result is not going to be emitted
557 into the instruction stream.
558
559 The value returned is of type `ssizetype'.
560
561 Unfortunately, string_constant can't access the values of const char
562 arrays with initializers, so neither can we do so here. */
563
564 tree
565 c_strlen (tree src, int only_value)
566 {
567 tree offset_node;
568 HOST_WIDE_INT offset;
569 int max;
570 const char *ptr;
571 location_t loc;
572
573 STRIP_NOPS (src);
574 if (TREE_CODE (src) == COND_EXPR
575 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
576 {
577 tree len1, len2;
578
579 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
580 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
581 if (tree_int_cst_equal (len1, len2))
582 return len1;
583 }
584
585 if (TREE_CODE (src) == COMPOUND_EXPR
586 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
587 return c_strlen (TREE_OPERAND (src, 1), only_value);
588
589 loc = EXPR_LOC_OR_LOC (src, input_location);
590
591 src = string_constant (src, &offset_node);
592 if (src == 0)
593 return NULL_TREE;
594
595 max = TREE_STRING_LENGTH (src) - 1;
596 ptr = TREE_STRING_POINTER (src);
597
598 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
599 {
600 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
601 compute the offset to the following null if we don't know where to
602 start searching for it. */
603 int i;
604
605 for (i = 0; i < max; i++)
606 if (ptr[i] == 0)
607 return NULL_TREE;
608
609 /* We don't know the starting offset, but we do know that the string
610 has no internal zero bytes. We can assume that the offset falls
611 within the bounds of the string; otherwise, the programmer deserves
612 what he gets. Subtract the offset from the length of the string,
613 and return that. This would perhaps not be valid if we were dealing
614 with named arrays in addition to literal string constants. */
615
616 return size_diffop_loc (loc, size_int (max), offset_node);
617 }
618
619 /* We have a known offset into the string. Start searching there for
620 a null character if we can represent it as a single HOST_WIDE_INT. */
621 if (offset_node == 0)
622 offset = 0;
623 else if (! tree_fits_shwi_p (offset_node))
624 offset = -1;
625 else
626 offset = tree_to_shwi (offset_node);
627
628 /* If the offset is known to be out of bounds, warn, and call strlen at
629 runtime. */
630 if (offset < 0 || offset > max)
631 {
632 /* Suppress multiple warnings for propagated constant strings. */
633 if (only_value != 2
634 && !TREE_NO_WARNING (src))
635 {
636 warning_at (loc, 0, "offset outside bounds of constant string");
637 TREE_NO_WARNING (src) = 1;
638 }
639 return NULL_TREE;
640 }
641
642 /* Use strlen to search for the first zero byte. Since any strings
643 constructed with build_string will have nulls appended, we win even
644 if we get handed something like (char[4])"abcd".
645
646 Since OFFSET is our starting index into the string, no further
647 calculation is needed. */
648 return ssize_int (strlen (ptr + offset));
649 }
650
651 /* Return a char pointer for a C string if it is a string constant
652 or sum of string constant and integer constant. */
653
654 const char *
655 c_getstr (tree src)
656 {
657 tree offset_node;
658
659 src = string_constant (src, &offset_node);
660 if (src == 0)
661 return 0;
662
663 if (offset_node == 0)
664 return TREE_STRING_POINTER (src);
665 else if (!tree_fits_uhwi_p (offset_node)
666 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
667 return 0;
668
669 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
670 }
671
672 /* Return a constant integer corresponding to target reading
673 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
674
675 static rtx
676 c_readstr (const char *str, machine_mode mode)
677 {
678 HOST_WIDE_INT ch;
679 unsigned int i, j;
680 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
681
682 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
683 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
684 / HOST_BITS_PER_WIDE_INT;
685
686 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
687 for (i = 0; i < len; i++)
688 tmp[i] = 0;
689
690 ch = 1;
691 for (i = 0; i < GET_MODE_SIZE (mode); i++)
692 {
693 j = i;
694 if (WORDS_BIG_ENDIAN)
695 j = GET_MODE_SIZE (mode) - i - 1;
696 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
697 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
698 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
699 j *= BITS_PER_UNIT;
700
701 if (ch)
702 ch = (unsigned char) str[i];
703 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
704 }
705
706 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
707 return immed_wide_int_const (c, mode);
708 }
709
710 /* Cast a target constant CST to target CHAR and if that value fits into
711 host char type, return zero and put that value into variable pointed to by
712 P. */
713
714 static int
715 target_char_cast (tree cst, char *p)
716 {
717 unsigned HOST_WIDE_INT val, hostval;
718
719 if (TREE_CODE (cst) != INTEGER_CST
720 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
721 return 1;
722
723 /* Do not care if it fits or not right here. */
724 val = TREE_INT_CST_LOW (cst);
725
726 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
727 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
728
729 hostval = val;
730 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
731 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
732
733 if (val != hostval)
734 return 1;
735
736 *p = hostval;
737 return 0;
738 }
739
740 /* Similar to save_expr, but assumes that arbitrary code is not executed
741 in between the multiple evaluations. In particular, we assume that a
742 non-addressable local variable will not be modified. */
743
744 static tree
745 builtin_save_expr (tree exp)
746 {
747 if (TREE_CODE (exp) == SSA_NAME
748 || (TREE_ADDRESSABLE (exp) == 0
749 && (TREE_CODE (exp) == PARM_DECL
750 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
751 return exp;
752
753 return save_expr (exp);
754 }
755
756 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
757 times to get the address of either a higher stack frame, or a return
758 address located within it (depending on FNDECL_CODE). */
759
760 static rtx
761 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
762 {
763 int i;
764
765 #ifdef INITIAL_FRAME_ADDRESS_RTX
766 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
767 #else
768 rtx tem;
769
770 /* For a zero count with __builtin_return_address, we don't care what
771 frame address we return, because target-specific definitions will
772 override us. Therefore frame pointer elimination is OK, and using
773 the soft frame pointer is OK.
774
775 For a nonzero count, or a zero count with __builtin_frame_address,
776 we require a stable offset from the current frame pointer to the
777 previous one, so we must use the hard frame pointer, and
778 we must disable frame pointer elimination. */
779 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
780 tem = frame_pointer_rtx;
781 else
782 {
783 tem = hard_frame_pointer_rtx;
784
785 /* Tell reload not to eliminate the frame pointer. */
786 crtl->accesses_prior_frames = 1;
787 }
788 #endif
789
790 /* Some machines need special handling before we can access
791 arbitrary frames. For example, on the SPARC, we must first flush
792 all register windows to the stack. */
793 #ifdef SETUP_FRAME_ADDRESSES
794 if (count > 0)
795 SETUP_FRAME_ADDRESSES ();
796 #endif
797
798 /* On the SPARC, the return address is not in the frame, it is in a
799 register. There is no way to access it off of the current frame
800 pointer, but it can be accessed off the previous frame pointer by
801 reading the value from the register window save area. */
802 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
803 count--;
804
805 /* Scan back COUNT frames to the specified frame. */
806 for (i = 0; i < count; i++)
807 {
808 /* Assume the dynamic chain pointer is in the word that the
809 frame address points to, unless otherwise specified. */
810 #ifdef DYNAMIC_CHAIN_ADDRESS
811 tem = DYNAMIC_CHAIN_ADDRESS (tem);
812 #endif
813 tem = memory_address (Pmode, tem);
814 tem = gen_frame_mem (Pmode, tem);
815 tem = copy_to_reg (tem);
816 }
817
818 /* For __builtin_frame_address, return what we've got. But, on
819 the SPARC for example, we may have to add a bias. */
820 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
821 #ifdef FRAME_ADDR_RTX
822 return FRAME_ADDR_RTX (tem);
823 #else
824 return tem;
825 #endif
826
827 /* For __builtin_return_address, get the return address from that frame. */
828 #ifdef RETURN_ADDR_RTX
829 tem = RETURN_ADDR_RTX (count, tem);
830 #else
831 tem = memory_address (Pmode,
832 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
833 tem = gen_frame_mem (Pmode, tem);
834 #endif
835 return tem;
836 }
837
838 /* Alias set used for setjmp buffer. */
839 static alias_set_type setjmp_alias_set = -1;
840
841 /* Construct the leading half of a __builtin_setjmp call. Control will
842 return to RECEIVER_LABEL. This is also called directly by the SJLJ
843 exception handling code. */
844
845 void
846 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
847 {
848 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
849 rtx stack_save;
850 rtx mem;
851
852 if (setjmp_alias_set == -1)
853 setjmp_alias_set = new_alias_set ();
854
855 buf_addr = convert_memory_address (Pmode, buf_addr);
856
857 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
858
859 /* We store the frame pointer and the address of receiver_label in
860 the buffer and use the rest of it for the stack save area, which
861 is machine-dependent. */
862
863 mem = gen_rtx_MEM (Pmode, buf_addr);
864 set_mem_alias_set (mem, setjmp_alias_set);
865 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
866
867 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
868 GET_MODE_SIZE (Pmode))),
869 set_mem_alias_set (mem, setjmp_alias_set);
870
871 emit_move_insn (validize_mem (mem),
872 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
873
874 stack_save = gen_rtx_MEM (sa_mode,
875 plus_constant (Pmode, buf_addr,
876 2 * GET_MODE_SIZE (Pmode)));
877 set_mem_alias_set (stack_save, setjmp_alias_set);
878 emit_stack_save (SAVE_NONLOCAL, &stack_save);
879
880 /* If there is further processing to do, do it. */
881 if (targetm.have_builtin_setjmp_setup ())
882 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
883
884 /* We have a nonlocal label. */
885 cfun->has_nonlocal_label = 1;
886 }
887
888 /* Construct the trailing part of a __builtin_setjmp call. This is
889 also called directly by the SJLJ exception handling code.
890 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
891
892 void
893 expand_builtin_setjmp_receiver (rtx receiver_label)
894 {
895 rtx chain;
896
897 /* Mark the FP as used when we get here, so we have to make sure it's
898 marked as used by this function. */
899 emit_use (hard_frame_pointer_rtx);
900
901 /* Mark the static chain as clobbered here so life information
902 doesn't get messed up for it. */
903 chain = targetm.calls.static_chain (current_function_decl, true);
904 if (chain && REG_P (chain))
905 emit_clobber (chain);
906
907 /* Now put in the code to restore the frame pointer, and argument
908 pointer, if needed. */
909 if (! targetm.have_nonlocal_goto ())
910 {
911 /* First adjust our frame pointer to its actual value. It was
912 previously set to the start of the virtual area corresponding to
913 the stacked variables when we branched here and now needs to be
914 adjusted to the actual hardware fp value.
915
916 Assignments to virtual registers are converted by
917 instantiate_virtual_regs into the corresponding assignment
918 to the underlying register (fp in this case) that makes
919 the original assignment true.
920 So the following insn will actually be decrementing fp by
921 STARTING_FRAME_OFFSET. */
922 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
923
924 /* Restoring the frame pointer also modifies the hard frame pointer.
925 Mark it used (so that the previous assignment remains live once
926 the frame pointer is eliminated) and clobbered (to represent the
927 implicit update from the assignment). */
928 emit_use (hard_frame_pointer_rtx);
929 emit_clobber (hard_frame_pointer_rtx);
930 }
931
932 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
933 if (fixed_regs[ARG_POINTER_REGNUM])
934 {
935 #ifdef ELIMINABLE_REGS
936 /* If the argument pointer can be eliminated in favor of the
937 frame pointer, we don't need to restore it. We assume here
938 that if such an elimination is present, it can always be used.
939 This is the case on all known machines; if we don't make this
940 assumption, we do unnecessary saving on many machines. */
941 size_t i;
942 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
943
944 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
945 if (elim_regs[i].from == ARG_POINTER_REGNUM
946 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
947 break;
948
949 if (i == ARRAY_SIZE (elim_regs))
950 #endif
951 {
952 /* Now restore our arg pointer from the address at which it
953 was saved in our stack frame. */
954 emit_move_insn (crtl->args.internal_arg_pointer,
955 copy_to_reg (get_arg_pointer_save_area ()));
956 }
957 }
958 #endif
959
960 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
961 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
962 else if (targetm.have_nonlocal_goto_receiver ())
963 emit_insn (targetm.gen_nonlocal_goto_receiver ());
964 else
965 { /* Nothing */ }
966
967 /* We must not allow the code we just generated to be reordered by
968 scheduling. Specifically, the update of the frame pointer must
969 happen immediately, not later. */
970 emit_insn (gen_blockage ());
971 }
972
973 /* __builtin_longjmp is passed a pointer to an array of five words (not
974 all will be used on all machines). It operates similarly to the C
975 library function of the same name, but is more efficient. Much of
976 the code below is copied from the handling of non-local gotos. */
977
978 static void
979 expand_builtin_longjmp (rtx buf_addr, rtx value)
980 {
981 rtx fp, lab, stack;
982 rtx_insn *insn, *last;
983 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
984
985 /* DRAP is needed for stack realign if longjmp is expanded to current
986 function */
987 if (SUPPORTS_STACK_ALIGNMENT)
988 crtl->need_drap = true;
989
990 if (setjmp_alias_set == -1)
991 setjmp_alias_set = new_alias_set ();
992
993 buf_addr = convert_memory_address (Pmode, buf_addr);
994
995 buf_addr = force_reg (Pmode, buf_addr);
996
997 /* We require that the user must pass a second argument of 1, because
998 that is what builtin_setjmp will return. */
999 gcc_assert (value == const1_rtx);
1000
1001 last = get_last_insn ();
1002 if (targetm.have_builtin_longjmp ())
1003 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1004 else
1005 {
1006 fp = gen_rtx_MEM (Pmode, buf_addr);
1007 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1008 GET_MODE_SIZE (Pmode)));
1009
1010 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1011 2 * GET_MODE_SIZE (Pmode)));
1012 set_mem_alias_set (fp, setjmp_alias_set);
1013 set_mem_alias_set (lab, setjmp_alias_set);
1014 set_mem_alias_set (stack, setjmp_alias_set);
1015
1016 /* Pick up FP, label, and SP from the block and jump. This code is
1017 from expand_goto in stmt.c; see there for detailed comments. */
1018 if (targetm.have_nonlocal_goto ())
1019 /* We have to pass a value to the nonlocal_goto pattern that will
1020 get copied into the static_chain pointer, but it does not matter
1021 what that value is, because builtin_setjmp does not use it. */
1022 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1023 else
1024 {
1025 lab = copy_to_reg (lab);
1026
1027 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1028 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1029
1030 emit_move_insn (hard_frame_pointer_rtx, fp);
1031 emit_stack_restore (SAVE_NONLOCAL, stack);
1032
1033 emit_use (hard_frame_pointer_rtx);
1034 emit_use (stack_pointer_rtx);
1035 emit_indirect_jump (lab);
1036 }
1037 }
1038
1039 /* Search backwards and mark the jump insn as a non-local goto.
1040 Note that this precludes the use of __builtin_longjmp to a
1041 __builtin_setjmp target in the same function. However, we've
1042 already cautioned the user that these functions are for
1043 internal exception handling use only. */
1044 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1045 {
1046 gcc_assert (insn != last);
1047
1048 if (JUMP_P (insn))
1049 {
1050 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1051 break;
1052 }
1053 else if (CALL_P (insn))
1054 break;
1055 }
1056 }
1057
1058 static inline bool
1059 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1060 {
1061 return (iter->i < iter->n);
1062 }
1063
1064 /* This function validates the types of a function call argument list
1065 against a specified list of tree_codes. If the last specifier is a 0,
1066 that represents an ellipses, otherwise the last specifier must be a
1067 VOID_TYPE. */
1068
1069 static bool
1070 validate_arglist (const_tree callexpr, ...)
1071 {
1072 enum tree_code code;
1073 bool res = 0;
1074 va_list ap;
1075 const_call_expr_arg_iterator iter;
1076 const_tree arg;
1077
1078 va_start (ap, callexpr);
1079 init_const_call_expr_arg_iterator (callexpr, &iter);
1080
1081 do
1082 {
1083 code = (enum tree_code) va_arg (ap, int);
1084 switch (code)
1085 {
1086 case 0:
1087 /* This signifies an ellipses, any further arguments are all ok. */
1088 res = true;
1089 goto end;
1090 case VOID_TYPE:
1091 /* This signifies an endlink, if no arguments remain, return
1092 true, otherwise return false. */
1093 res = !more_const_call_expr_args_p (&iter);
1094 goto end;
1095 default:
1096 /* If no parameters remain or the parameter's code does not
1097 match the specified code, return false. Otherwise continue
1098 checking any remaining arguments. */
1099 arg = next_const_call_expr_arg (&iter);
1100 if (!validate_arg (arg, code))
1101 goto end;
1102 break;
1103 }
1104 }
1105 while (1);
1106
1107 /* We need gotos here since we can only have one VA_CLOSE in a
1108 function. */
1109 end: ;
1110 va_end (ap);
1111
1112 return res;
1113 }
1114
1115 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1116 and the address of the save area. */
1117
1118 static rtx
1119 expand_builtin_nonlocal_goto (tree exp)
1120 {
1121 tree t_label, t_save_area;
1122 rtx r_label, r_save_area, r_fp, r_sp;
1123 rtx_insn *insn;
1124
1125 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1126 return NULL_RTX;
1127
1128 t_label = CALL_EXPR_ARG (exp, 0);
1129 t_save_area = CALL_EXPR_ARG (exp, 1);
1130
1131 r_label = expand_normal (t_label);
1132 r_label = convert_memory_address (Pmode, r_label);
1133 r_save_area = expand_normal (t_save_area);
1134 r_save_area = convert_memory_address (Pmode, r_save_area);
1135 /* Copy the address of the save location to a register just in case it was
1136 based on the frame pointer. */
1137 r_save_area = copy_to_reg (r_save_area);
1138 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1139 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1140 plus_constant (Pmode, r_save_area,
1141 GET_MODE_SIZE (Pmode)));
1142
1143 crtl->has_nonlocal_goto = 1;
1144
1145 /* ??? We no longer need to pass the static chain value, afaik. */
1146 if (targetm.have_nonlocal_goto ())
1147 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1148 else
1149 {
1150 r_label = copy_to_reg (r_label);
1151
1152 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1153 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1154
1155 /* Restore frame pointer for containing function. */
1156 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1157 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1158
1159 /* USE of hard_frame_pointer_rtx added for consistency;
1160 not clear if really needed. */
1161 emit_use (hard_frame_pointer_rtx);
1162 emit_use (stack_pointer_rtx);
1163
1164 /* If the architecture is using a GP register, we must
1165 conservatively assume that the target function makes use of it.
1166 The prologue of functions with nonlocal gotos must therefore
1167 initialize the GP register to the appropriate value, and we
1168 must then make sure that this value is live at the point
1169 of the jump. (Note that this doesn't necessarily apply
1170 to targets with a nonlocal_goto pattern; they are free
1171 to implement it in their own way. Note also that this is
1172 a no-op if the GP register is a global invariant.) */
1173 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1174 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1175 emit_use (pic_offset_table_rtx);
1176
1177 emit_indirect_jump (r_label);
1178 }
1179
1180 /* Search backwards to the jump insn and mark it as a
1181 non-local goto. */
1182 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1183 {
1184 if (JUMP_P (insn))
1185 {
1186 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1187 break;
1188 }
1189 else if (CALL_P (insn))
1190 break;
1191 }
1192
1193 return const0_rtx;
1194 }
1195
1196 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1197 (not all will be used on all machines) that was passed to __builtin_setjmp.
1198 It updates the stack pointer in that block to the current value. This is
1199 also called directly by the SJLJ exception handling code. */
1200
1201 void
1202 expand_builtin_update_setjmp_buf (rtx buf_addr)
1203 {
1204 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1205 rtx stack_save
1206 = gen_rtx_MEM (sa_mode,
1207 memory_address
1208 (sa_mode,
1209 plus_constant (Pmode, buf_addr,
1210 2 * GET_MODE_SIZE (Pmode))));
1211
1212 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1213 }
1214
1215 /* Expand a call to __builtin_prefetch. For a target that does not support
1216 data prefetch, evaluate the memory address argument in case it has side
1217 effects. */
1218
1219 static void
1220 expand_builtin_prefetch (tree exp)
1221 {
1222 tree arg0, arg1, arg2;
1223 int nargs;
1224 rtx op0, op1, op2;
1225
1226 if (!validate_arglist (exp, POINTER_TYPE, 0))
1227 return;
1228
1229 arg0 = CALL_EXPR_ARG (exp, 0);
1230
1231 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1232 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1233 locality). */
1234 nargs = call_expr_nargs (exp);
1235 if (nargs > 1)
1236 arg1 = CALL_EXPR_ARG (exp, 1);
1237 else
1238 arg1 = integer_zero_node;
1239 if (nargs > 2)
1240 arg2 = CALL_EXPR_ARG (exp, 2);
1241 else
1242 arg2 = integer_three_node;
1243
1244 /* Argument 0 is an address. */
1245 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1246
1247 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1248 if (TREE_CODE (arg1) != INTEGER_CST)
1249 {
1250 error ("second argument to %<__builtin_prefetch%> must be a constant");
1251 arg1 = integer_zero_node;
1252 }
1253 op1 = expand_normal (arg1);
1254 /* Argument 1 must be either zero or one. */
1255 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1256 {
1257 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1258 " using zero");
1259 op1 = const0_rtx;
1260 }
1261
1262 /* Argument 2 (locality) must be a compile-time constant int. */
1263 if (TREE_CODE (arg2) != INTEGER_CST)
1264 {
1265 error ("third argument to %<__builtin_prefetch%> must be a constant");
1266 arg2 = integer_zero_node;
1267 }
1268 op2 = expand_normal (arg2);
1269 /* Argument 2 must be 0, 1, 2, or 3. */
1270 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1271 {
1272 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1273 op2 = const0_rtx;
1274 }
1275
1276 if (targetm.have_prefetch ())
1277 {
1278 struct expand_operand ops[3];
1279
1280 create_address_operand (&ops[0], op0);
1281 create_integer_operand (&ops[1], INTVAL (op1));
1282 create_integer_operand (&ops[2], INTVAL (op2));
1283 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1284 return;
1285 }
1286
1287 /* Don't do anything with direct references to volatile memory, but
1288 generate code to handle other side effects. */
1289 if (!MEM_P (op0) && side_effects_p (op0))
1290 emit_insn (op0);
1291 }
1292
1293 /* Get a MEM rtx for expression EXP which is the address of an operand
1294 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1295 the maximum length of the block of memory that might be accessed or
1296 NULL if unknown. */
1297
1298 static rtx
1299 get_memory_rtx (tree exp, tree len)
1300 {
1301 tree orig_exp = exp;
1302 rtx addr, mem;
1303
1304 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1305 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1306 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1307 exp = TREE_OPERAND (exp, 0);
1308
1309 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1310 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1311
1312 /* Get an expression we can use to find the attributes to assign to MEM.
1313 First remove any nops. */
1314 while (CONVERT_EXPR_P (exp)
1315 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1316 exp = TREE_OPERAND (exp, 0);
1317
1318 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1319 (as builtin stringops may alias with anything). */
1320 exp = fold_build2 (MEM_REF,
1321 build_array_type (char_type_node,
1322 build_range_type (sizetype,
1323 size_one_node, len)),
1324 exp, build_int_cst (ptr_type_node, 0));
1325
1326 /* If the MEM_REF has no acceptable address, try to get the base object
1327 from the original address we got, and build an all-aliasing
1328 unknown-sized access to that one. */
1329 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1330 set_mem_attributes (mem, exp, 0);
1331 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1332 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1333 0))))
1334 {
1335 exp = build_fold_addr_expr (exp);
1336 exp = fold_build2 (MEM_REF,
1337 build_array_type (char_type_node,
1338 build_range_type (sizetype,
1339 size_zero_node,
1340 NULL)),
1341 exp, build_int_cst (ptr_type_node, 0));
1342 set_mem_attributes (mem, exp, 0);
1343 }
1344 set_mem_alias_set (mem, 0);
1345 return mem;
1346 }
1347 \f
1348 /* Built-in functions to perform an untyped call and return. */
1349
1350 #define apply_args_mode \
1351 (this_target_builtins->x_apply_args_mode)
1352 #define apply_result_mode \
1353 (this_target_builtins->x_apply_result_mode)
1354
1355 /* Return the size required for the block returned by __builtin_apply_args,
1356 and initialize apply_args_mode. */
1357
1358 static int
1359 apply_args_size (void)
1360 {
1361 static int size = -1;
1362 int align;
1363 unsigned int regno;
1364 machine_mode mode;
1365
1366 /* The values computed by this function never change. */
1367 if (size < 0)
1368 {
1369 /* The first value is the incoming arg-pointer. */
1370 size = GET_MODE_SIZE (Pmode);
1371
1372 /* The second value is the structure value address unless this is
1373 passed as an "invisible" first argument. */
1374 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1375 size += GET_MODE_SIZE (Pmode);
1376
1377 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1378 if (FUNCTION_ARG_REGNO_P (regno))
1379 {
1380 mode = targetm.calls.get_raw_arg_mode (regno);
1381
1382 gcc_assert (mode != VOIDmode);
1383
1384 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1385 if (size % align != 0)
1386 size = CEIL (size, align) * align;
1387 size += GET_MODE_SIZE (mode);
1388 apply_args_mode[regno] = mode;
1389 }
1390 else
1391 {
1392 apply_args_mode[regno] = VOIDmode;
1393 }
1394 }
1395 return size;
1396 }
1397
1398 /* Return the size required for the block returned by __builtin_apply,
1399 and initialize apply_result_mode. */
1400
1401 static int
1402 apply_result_size (void)
1403 {
1404 static int size = -1;
1405 int align, regno;
1406 machine_mode mode;
1407
1408 /* The values computed by this function never change. */
1409 if (size < 0)
1410 {
1411 size = 0;
1412
1413 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1414 if (targetm.calls.function_value_regno_p (regno))
1415 {
1416 mode = targetm.calls.get_raw_result_mode (regno);
1417
1418 gcc_assert (mode != VOIDmode);
1419
1420 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1421 if (size % align != 0)
1422 size = CEIL (size, align) * align;
1423 size += GET_MODE_SIZE (mode);
1424 apply_result_mode[regno] = mode;
1425 }
1426 else
1427 apply_result_mode[regno] = VOIDmode;
1428
1429 /* Allow targets that use untyped_call and untyped_return to override
1430 the size so that machine-specific information can be stored here. */
1431 #ifdef APPLY_RESULT_SIZE
1432 size = APPLY_RESULT_SIZE;
1433 #endif
1434 }
1435 return size;
1436 }
1437
1438 /* Create a vector describing the result block RESULT. If SAVEP is true,
1439 the result block is used to save the values; otherwise it is used to
1440 restore the values. */
1441
1442 static rtx
1443 result_vector (int savep, rtx result)
1444 {
1445 int regno, size, align, nelts;
1446 machine_mode mode;
1447 rtx reg, mem;
1448 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1449
1450 size = nelts = 0;
1451 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1452 if ((mode = apply_result_mode[regno]) != VOIDmode)
1453 {
1454 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1455 if (size % align != 0)
1456 size = CEIL (size, align) * align;
1457 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1458 mem = adjust_address (result, mode, size);
1459 savevec[nelts++] = (savep
1460 ? gen_rtx_SET (mem, reg)
1461 : gen_rtx_SET (reg, mem));
1462 size += GET_MODE_SIZE (mode);
1463 }
1464 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1465 }
1466
1467 /* Save the state required to perform an untyped call with the same
1468 arguments as were passed to the current function. */
1469
1470 static rtx
1471 expand_builtin_apply_args_1 (void)
1472 {
1473 rtx registers, tem;
1474 int size, align, regno;
1475 machine_mode mode;
1476 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1477
1478 /* Create a block where the arg-pointer, structure value address,
1479 and argument registers can be saved. */
1480 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1481
1482 /* Walk past the arg-pointer and structure value address. */
1483 size = GET_MODE_SIZE (Pmode);
1484 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1485 size += GET_MODE_SIZE (Pmode);
1486
1487 /* Save each register used in calling a function to the block. */
1488 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1489 if ((mode = apply_args_mode[regno]) != VOIDmode)
1490 {
1491 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1492 if (size % align != 0)
1493 size = CEIL (size, align) * align;
1494
1495 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1496
1497 emit_move_insn (adjust_address (registers, mode, size), tem);
1498 size += GET_MODE_SIZE (mode);
1499 }
1500
1501 /* Save the arg pointer to the block. */
1502 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1503 /* We need the pointer as the caller actually passed them to us, not
1504 as we might have pretended they were passed. Make sure it's a valid
1505 operand, as emit_move_insn isn't expected to handle a PLUS. */
1506 if (STACK_GROWS_DOWNWARD)
1507 tem
1508 = force_operand (plus_constant (Pmode, tem,
1509 crtl->args.pretend_args_size),
1510 NULL_RTX);
1511 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1512
1513 size = GET_MODE_SIZE (Pmode);
1514
1515 /* Save the structure value address unless this is passed as an
1516 "invisible" first argument. */
1517 if (struct_incoming_value)
1518 {
1519 emit_move_insn (adjust_address (registers, Pmode, size),
1520 copy_to_reg (struct_incoming_value));
1521 size += GET_MODE_SIZE (Pmode);
1522 }
1523
1524 /* Return the address of the block. */
1525 return copy_addr_to_reg (XEXP (registers, 0));
1526 }
1527
1528 /* __builtin_apply_args returns block of memory allocated on
1529 the stack into which is stored the arg pointer, structure
1530 value address, static chain, and all the registers that might
1531 possibly be used in performing a function call. The code is
1532 moved to the start of the function so the incoming values are
1533 saved. */
1534
1535 static rtx
1536 expand_builtin_apply_args (void)
1537 {
1538 /* Don't do __builtin_apply_args more than once in a function.
1539 Save the result of the first call and reuse it. */
1540 if (apply_args_value != 0)
1541 return apply_args_value;
1542 {
1543 /* When this function is called, it means that registers must be
1544 saved on entry to this function. So we migrate the
1545 call to the first insn of this function. */
1546 rtx temp;
1547
1548 start_sequence ();
1549 temp = expand_builtin_apply_args_1 ();
1550 rtx_insn *seq = get_insns ();
1551 end_sequence ();
1552
1553 apply_args_value = temp;
1554
1555 /* Put the insns after the NOTE that starts the function.
1556 If this is inside a start_sequence, make the outer-level insn
1557 chain current, so the code is placed at the start of the
1558 function. If internal_arg_pointer is a non-virtual pseudo,
1559 it needs to be placed after the function that initializes
1560 that pseudo. */
1561 push_topmost_sequence ();
1562 if (REG_P (crtl->args.internal_arg_pointer)
1563 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1564 emit_insn_before (seq, parm_birth_insn);
1565 else
1566 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1567 pop_topmost_sequence ();
1568 return temp;
1569 }
1570 }
1571
1572 /* Perform an untyped call and save the state required to perform an
1573 untyped return of whatever value was returned by the given function. */
1574
1575 static rtx
1576 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1577 {
1578 int size, align, regno;
1579 machine_mode mode;
1580 rtx incoming_args, result, reg, dest, src;
1581 rtx_call_insn *call_insn;
1582 rtx old_stack_level = 0;
1583 rtx call_fusage = 0;
1584 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1585
1586 arguments = convert_memory_address (Pmode, arguments);
1587
1588 /* Create a block where the return registers can be saved. */
1589 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1590
1591 /* Fetch the arg pointer from the ARGUMENTS block. */
1592 incoming_args = gen_reg_rtx (Pmode);
1593 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1594 if (!STACK_GROWS_DOWNWARD)
1595 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1596 incoming_args, 0, OPTAB_LIB_WIDEN);
1597
1598 /* Push a new argument block and copy the arguments. Do not allow
1599 the (potential) memcpy call below to interfere with our stack
1600 manipulations. */
1601 do_pending_stack_adjust ();
1602 NO_DEFER_POP;
1603
1604 /* Save the stack with nonlocal if available. */
1605 if (targetm.have_save_stack_nonlocal ())
1606 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1607 else
1608 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1609
1610 /* Allocate a block of memory onto the stack and copy the memory
1611 arguments to the outgoing arguments address. We can pass TRUE
1612 as the 4th argument because we just saved the stack pointer
1613 and will restore it right after the call. */
1614 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1615
1616 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1617 may have already set current_function_calls_alloca to true.
1618 current_function_calls_alloca won't be set if argsize is zero,
1619 so we have to guarantee need_drap is true here. */
1620 if (SUPPORTS_STACK_ALIGNMENT)
1621 crtl->need_drap = true;
1622
1623 dest = virtual_outgoing_args_rtx;
1624 if (!STACK_GROWS_DOWNWARD)
1625 {
1626 if (CONST_INT_P (argsize))
1627 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1628 else
1629 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1630 }
1631 dest = gen_rtx_MEM (BLKmode, dest);
1632 set_mem_align (dest, PARM_BOUNDARY);
1633 src = gen_rtx_MEM (BLKmode, incoming_args);
1634 set_mem_align (src, PARM_BOUNDARY);
1635 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1636
1637 /* Refer to the argument block. */
1638 apply_args_size ();
1639 arguments = gen_rtx_MEM (BLKmode, arguments);
1640 set_mem_align (arguments, PARM_BOUNDARY);
1641
1642 /* Walk past the arg-pointer and structure value address. */
1643 size = GET_MODE_SIZE (Pmode);
1644 if (struct_value)
1645 size += GET_MODE_SIZE (Pmode);
1646
1647 /* Restore each of the registers previously saved. Make USE insns
1648 for each of these registers for use in making the call. */
1649 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1650 if ((mode = apply_args_mode[regno]) != VOIDmode)
1651 {
1652 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1653 if (size % align != 0)
1654 size = CEIL (size, align) * align;
1655 reg = gen_rtx_REG (mode, regno);
1656 emit_move_insn (reg, adjust_address (arguments, mode, size));
1657 use_reg (&call_fusage, reg);
1658 size += GET_MODE_SIZE (mode);
1659 }
1660
1661 /* Restore the structure value address unless this is passed as an
1662 "invisible" first argument. */
1663 size = GET_MODE_SIZE (Pmode);
1664 if (struct_value)
1665 {
1666 rtx value = gen_reg_rtx (Pmode);
1667 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1668 emit_move_insn (struct_value, value);
1669 if (REG_P (struct_value))
1670 use_reg (&call_fusage, struct_value);
1671 size += GET_MODE_SIZE (Pmode);
1672 }
1673
1674 /* All arguments and registers used for the call are set up by now! */
1675 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1676
1677 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1678 and we don't want to load it into a register as an optimization,
1679 because prepare_call_address already did it if it should be done. */
1680 if (GET_CODE (function) != SYMBOL_REF)
1681 function = memory_address (FUNCTION_MODE, function);
1682
1683 /* Generate the actual call instruction and save the return value. */
1684 if (targetm.have_untyped_call ())
1685 {
1686 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1687 emit_call_insn (targetm.gen_untyped_call (mem, result,
1688 result_vector (1, result)));
1689 }
1690 else
1691 #ifdef HAVE_call_value
1692 if (HAVE_call_value)
1693 {
1694 rtx valreg = 0;
1695
1696 /* Locate the unique return register. It is not possible to
1697 express a call that sets more than one return register using
1698 call_value; use untyped_call for that. In fact, untyped_call
1699 only needs to save the return registers in the given block. */
1700 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1701 if ((mode = apply_result_mode[regno]) != VOIDmode)
1702 {
1703 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1704
1705 valreg = gen_rtx_REG (mode, regno);
1706 }
1707
1708 emit_call_insn (GEN_CALL_VALUE (valreg,
1709 gen_rtx_MEM (FUNCTION_MODE, function),
1710 const0_rtx, NULL_RTX, const0_rtx));
1711
1712 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1713 }
1714 else
1715 #endif
1716 gcc_unreachable ();
1717
1718 /* Find the CALL insn we just emitted, and attach the register usage
1719 information. */
1720 call_insn = last_call_insn ();
1721 add_function_usage_to (call_insn, call_fusage);
1722
1723 /* Restore the stack. */
1724 if (targetm.have_save_stack_nonlocal ())
1725 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1726 else
1727 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1728 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1729
1730 OK_DEFER_POP;
1731
1732 /* Return the address of the result block. */
1733 result = copy_addr_to_reg (XEXP (result, 0));
1734 return convert_memory_address (ptr_mode, result);
1735 }
1736
1737 /* Perform an untyped return. */
1738
1739 static void
1740 expand_builtin_return (rtx result)
1741 {
1742 int size, align, regno;
1743 machine_mode mode;
1744 rtx reg;
1745 rtx_insn *call_fusage = 0;
1746
1747 result = convert_memory_address (Pmode, result);
1748
1749 apply_result_size ();
1750 result = gen_rtx_MEM (BLKmode, result);
1751
1752 if (targetm.have_untyped_return ())
1753 {
1754 rtx vector = result_vector (0, result);
1755 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1756 emit_barrier ();
1757 return;
1758 }
1759
1760 /* Restore the return value and note that each value is used. */
1761 size = 0;
1762 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1763 if ((mode = apply_result_mode[regno]) != VOIDmode)
1764 {
1765 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1766 if (size % align != 0)
1767 size = CEIL (size, align) * align;
1768 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1769 emit_move_insn (reg, adjust_address (result, mode, size));
1770
1771 push_to_sequence (call_fusage);
1772 emit_use (reg);
1773 call_fusage = get_insns ();
1774 end_sequence ();
1775 size += GET_MODE_SIZE (mode);
1776 }
1777
1778 /* Put the USE insns before the return. */
1779 emit_insn (call_fusage);
1780
1781 /* Return whatever values was restored by jumping directly to the end
1782 of the function. */
1783 expand_naked_return ();
1784 }
1785
1786 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1787
1788 static enum type_class
1789 type_to_class (tree type)
1790 {
1791 switch (TREE_CODE (type))
1792 {
1793 case VOID_TYPE: return void_type_class;
1794 case INTEGER_TYPE: return integer_type_class;
1795 case ENUMERAL_TYPE: return enumeral_type_class;
1796 case BOOLEAN_TYPE: return boolean_type_class;
1797 case POINTER_TYPE: return pointer_type_class;
1798 case REFERENCE_TYPE: return reference_type_class;
1799 case OFFSET_TYPE: return offset_type_class;
1800 case REAL_TYPE: return real_type_class;
1801 case COMPLEX_TYPE: return complex_type_class;
1802 case FUNCTION_TYPE: return function_type_class;
1803 case METHOD_TYPE: return method_type_class;
1804 case RECORD_TYPE: return record_type_class;
1805 case UNION_TYPE:
1806 case QUAL_UNION_TYPE: return union_type_class;
1807 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1808 ? string_type_class : array_type_class);
1809 case LANG_TYPE: return lang_type_class;
1810 default: return no_type_class;
1811 }
1812 }
1813
1814 /* Expand a call EXP to __builtin_classify_type. */
1815
1816 static rtx
1817 expand_builtin_classify_type (tree exp)
1818 {
1819 if (call_expr_nargs (exp))
1820 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1821 return GEN_INT (no_type_class);
1822 }
1823
1824 /* This helper macro, meant to be used in mathfn_built_in below,
1825 determines which among a set of three builtin math functions is
1826 appropriate for a given type mode. The `F' and `L' cases are
1827 automatically generated from the `double' case. */
1828 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1829 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1830 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1831 fcodel = BUILT_IN_MATHFN##L ; break;
1832 /* Similar to above, but appends _R after any F/L suffix. */
1833 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1834 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1835 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1836 fcodel = BUILT_IN_MATHFN##L_R ; break;
1837
1838 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1839 if available. If IMPLICIT is true use the implicit builtin declaration,
1840 otherwise use the explicit declaration. If we can't do the conversion,
1841 return zero. */
1842
1843 static tree
1844 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1845 {
1846 enum built_in_function fcode, fcodef, fcodel, fcode2;
1847
1848 switch (fn)
1849 {
1850 CASE_MATHFN (BUILT_IN_ACOS)
1851 CASE_MATHFN (BUILT_IN_ACOSH)
1852 CASE_MATHFN (BUILT_IN_ASIN)
1853 CASE_MATHFN (BUILT_IN_ASINH)
1854 CASE_MATHFN (BUILT_IN_ATAN)
1855 CASE_MATHFN (BUILT_IN_ATAN2)
1856 CASE_MATHFN (BUILT_IN_ATANH)
1857 CASE_MATHFN (BUILT_IN_CBRT)
1858 CASE_MATHFN (BUILT_IN_CEIL)
1859 CASE_MATHFN (BUILT_IN_CEXPI)
1860 CASE_MATHFN (BUILT_IN_COPYSIGN)
1861 CASE_MATHFN (BUILT_IN_COS)
1862 CASE_MATHFN (BUILT_IN_COSH)
1863 CASE_MATHFN (BUILT_IN_DREM)
1864 CASE_MATHFN (BUILT_IN_ERF)
1865 CASE_MATHFN (BUILT_IN_ERFC)
1866 CASE_MATHFN (BUILT_IN_EXP)
1867 CASE_MATHFN (BUILT_IN_EXP10)
1868 CASE_MATHFN (BUILT_IN_EXP2)
1869 CASE_MATHFN (BUILT_IN_EXPM1)
1870 CASE_MATHFN (BUILT_IN_FABS)
1871 CASE_MATHFN (BUILT_IN_FDIM)
1872 CASE_MATHFN (BUILT_IN_FLOOR)
1873 CASE_MATHFN (BUILT_IN_FMA)
1874 CASE_MATHFN (BUILT_IN_FMAX)
1875 CASE_MATHFN (BUILT_IN_FMIN)
1876 CASE_MATHFN (BUILT_IN_FMOD)
1877 CASE_MATHFN (BUILT_IN_FREXP)
1878 CASE_MATHFN (BUILT_IN_GAMMA)
1879 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1880 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1881 CASE_MATHFN (BUILT_IN_HYPOT)
1882 CASE_MATHFN (BUILT_IN_ILOGB)
1883 CASE_MATHFN (BUILT_IN_ICEIL)
1884 CASE_MATHFN (BUILT_IN_IFLOOR)
1885 CASE_MATHFN (BUILT_IN_INF)
1886 CASE_MATHFN (BUILT_IN_IRINT)
1887 CASE_MATHFN (BUILT_IN_IROUND)
1888 CASE_MATHFN (BUILT_IN_ISINF)
1889 CASE_MATHFN (BUILT_IN_J0)
1890 CASE_MATHFN (BUILT_IN_J1)
1891 CASE_MATHFN (BUILT_IN_JN)
1892 CASE_MATHFN (BUILT_IN_LCEIL)
1893 CASE_MATHFN (BUILT_IN_LDEXP)
1894 CASE_MATHFN (BUILT_IN_LFLOOR)
1895 CASE_MATHFN (BUILT_IN_LGAMMA)
1896 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1897 CASE_MATHFN (BUILT_IN_LLCEIL)
1898 CASE_MATHFN (BUILT_IN_LLFLOOR)
1899 CASE_MATHFN (BUILT_IN_LLRINT)
1900 CASE_MATHFN (BUILT_IN_LLROUND)
1901 CASE_MATHFN (BUILT_IN_LOG)
1902 CASE_MATHFN (BUILT_IN_LOG10)
1903 CASE_MATHFN (BUILT_IN_LOG1P)
1904 CASE_MATHFN (BUILT_IN_LOG2)
1905 CASE_MATHFN (BUILT_IN_LOGB)
1906 CASE_MATHFN (BUILT_IN_LRINT)
1907 CASE_MATHFN (BUILT_IN_LROUND)
1908 CASE_MATHFN (BUILT_IN_MODF)
1909 CASE_MATHFN (BUILT_IN_NAN)
1910 CASE_MATHFN (BUILT_IN_NANS)
1911 CASE_MATHFN (BUILT_IN_NEARBYINT)
1912 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1913 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1914 CASE_MATHFN (BUILT_IN_POW)
1915 CASE_MATHFN (BUILT_IN_POWI)
1916 CASE_MATHFN (BUILT_IN_POW10)
1917 CASE_MATHFN (BUILT_IN_REMAINDER)
1918 CASE_MATHFN (BUILT_IN_REMQUO)
1919 CASE_MATHFN (BUILT_IN_RINT)
1920 CASE_MATHFN (BUILT_IN_ROUND)
1921 CASE_MATHFN (BUILT_IN_SCALB)
1922 CASE_MATHFN (BUILT_IN_SCALBLN)
1923 CASE_MATHFN (BUILT_IN_SCALBN)
1924 CASE_MATHFN (BUILT_IN_SIGNBIT)
1925 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1926 CASE_MATHFN (BUILT_IN_SIN)
1927 CASE_MATHFN (BUILT_IN_SINCOS)
1928 CASE_MATHFN (BUILT_IN_SINH)
1929 CASE_MATHFN (BUILT_IN_SQRT)
1930 CASE_MATHFN (BUILT_IN_TAN)
1931 CASE_MATHFN (BUILT_IN_TANH)
1932 CASE_MATHFN (BUILT_IN_TGAMMA)
1933 CASE_MATHFN (BUILT_IN_TRUNC)
1934 CASE_MATHFN (BUILT_IN_Y0)
1935 CASE_MATHFN (BUILT_IN_Y1)
1936 CASE_MATHFN (BUILT_IN_YN)
1937
1938 default:
1939 return NULL_TREE;
1940 }
1941
1942 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1943 fcode2 = fcode;
1944 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1945 fcode2 = fcodef;
1946 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1947 fcode2 = fcodel;
1948 else
1949 return NULL_TREE;
1950
1951 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1952 return NULL_TREE;
1953
1954 return builtin_decl_explicit (fcode2);
1955 }
1956
1957 /* Like mathfn_built_in_1(), but always use the implicit array. */
1958
1959 tree
1960 mathfn_built_in (tree type, enum built_in_function fn)
1961 {
1962 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1963 }
1964
1965 /* If errno must be maintained, expand the RTL to check if the result,
1966 TARGET, of a built-in function call, EXP, is NaN, and if so set
1967 errno to EDOM. */
1968
1969 static void
1970 expand_errno_check (tree exp, rtx target)
1971 {
1972 rtx_code_label *lab = gen_label_rtx ();
1973
1974 /* Test the result; if it is NaN, set errno=EDOM because
1975 the argument was not in the domain. */
1976 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1977 NULL_RTX, NULL, lab,
1978 /* The jump is very likely. */
1979 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1980
1981 #ifdef TARGET_EDOM
1982 /* If this built-in doesn't throw an exception, set errno directly. */
1983 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1984 {
1985 #ifdef GEN_ERRNO_RTX
1986 rtx errno_rtx = GEN_ERRNO_RTX;
1987 #else
1988 rtx errno_rtx
1989 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1990 #endif
1991 emit_move_insn (errno_rtx,
1992 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1993 emit_label (lab);
1994 return;
1995 }
1996 #endif
1997
1998 /* Make sure the library call isn't expanded as a tail call. */
1999 CALL_EXPR_TAILCALL (exp) = 0;
2000
2001 /* We can't set errno=EDOM directly; let the library call do it.
2002 Pop the arguments right away in case the call gets deleted. */
2003 NO_DEFER_POP;
2004 expand_call (exp, target, 0);
2005 OK_DEFER_POP;
2006 emit_label (lab);
2007 }
2008
2009 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2010 Return NULL_RTX if a normal call should be emitted rather than expanding
2011 the function in-line. EXP is the expression that is a call to the builtin
2012 function; if convenient, the result should be placed in TARGET.
2013 SUBTARGET may be used as the target for computing one of EXP's operands. */
2014
2015 static rtx
2016 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2017 {
2018 optab builtin_optab;
2019 rtx op0;
2020 rtx_insn *insns;
2021 tree fndecl = get_callee_fndecl (exp);
2022 machine_mode mode;
2023 bool errno_set = false;
2024 bool try_widening = false;
2025 tree arg;
2026
2027 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2028 return NULL_RTX;
2029
2030 arg = CALL_EXPR_ARG (exp, 0);
2031
2032 switch (DECL_FUNCTION_CODE (fndecl))
2033 {
2034 CASE_FLT_FN (BUILT_IN_SQRT):
2035 errno_set = ! tree_expr_nonnegative_p (arg);
2036 try_widening = true;
2037 builtin_optab = sqrt_optab;
2038 break;
2039 CASE_FLT_FN (BUILT_IN_EXP):
2040 errno_set = true; builtin_optab = exp_optab; break;
2041 CASE_FLT_FN (BUILT_IN_EXP10):
2042 CASE_FLT_FN (BUILT_IN_POW10):
2043 errno_set = true; builtin_optab = exp10_optab; break;
2044 CASE_FLT_FN (BUILT_IN_EXP2):
2045 errno_set = true; builtin_optab = exp2_optab; break;
2046 CASE_FLT_FN (BUILT_IN_EXPM1):
2047 errno_set = true; builtin_optab = expm1_optab; break;
2048 CASE_FLT_FN (BUILT_IN_LOGB):
2049 errno_set = true; builtin_optab = logb_optab; break;
2050 CASE_FLT_FN (BUILT_IN_LOG):
2051 errno_set = true; builtin_optab = log_optab; break;
2052 CASE_FLT_FN (BUILT_IN_LOG10):
2053 errno_set = true; builtin_optab = log10_optab; break;
2054 CASE_FLT_FN (BUILT_IN_LOG2):
2055 errno_set = true; builtin_optab = log2_optab; break;
2056 CASE_FLT_FN (BUILT_IN_LOG1P):
2057 errno_set = true; builtin_optab = log1p_optab; break;
2058 CASE_FLT_FN (BUILT_IN_ASIN):
2059 builtin_optab = asin_optab; break;
2060 CASE_FLT_FN (BUILT_IN_ACOS):
2061 builtin_optab = acos_optab; break;
2062 CASE_FLT_FN (BUILT_IN_TAN):
2063 builtin_optab = tan_optab; break;
2064 CASE_FLT_FN (BUILT_IN_ATAN):
2065 builtin_optab = atan_optab; break;
2066 CASE_FLT_FN (BUILT_IN_FLOOR):
2067 builtin_optab = floor_optab; break;
2068 CASE_FLT_FN (BUILT_IN_CEIL):
2069 builtin_optab = ceil_optab; break;
2070 CASE_FLT_FN (BUILT_IN_TRUNC):
2071 builtin_optab = btrunc_optab; break;
2072 CASE_FLT_FN (BUILT_IN_ROUND):
2073 builtin_optab = round_optab; break;
2074 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2075 builtin_optab = nearbyint_optab;
2076 if (flag_trapping_math)
2077 break;
2078 /* Else fallthrough and expand as rint. */
2079 CASE_FLT_FN (BUILT_IN_RINT):
2080 builtin_optab = rint_optab; break;
2081 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2082 builtin_optab = significand_optab; break;
2083 default:
2084 gcc_unreachable ();
2085 }
2086
2087 /* Make a suitable register to place result in. */
2088 mode = TYPE_MODE (TREE_TYPE (exp));
2089
2090 if (! flag_errno_math || ! HONOR_NANS (mode))
2091 errno_set = false;
2092
2093 /* Before working hard, check whether the instruction is available, but try
2094 to widen the mode for specific operations. */
2095 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2096 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2097 && (!errno_set || !optimize_insn_for_size_p ()))
2098 {
2099 rtx result = gen_reg_rtx (mode);
2100
2101 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2102 need to expand the argument again. This way, we will not perform
2103 side-effects more the once. */
2104 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2105
2106 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2107
2108 start_sequence ();
2109
2110 /* Compute into RESULT.
2111 Set RESULT to wherever the result comes back. */
2112 result = expand_unop (mode, builtin_optab, op0, result, 0);
2113
2114 if (result != 0)
2115 {
2116 if (errno_set)
2117 expand_errno_check (exp, result);
2118
2119 /* Output the entire sequence. */
2120 insns = get_insns ();
2121 end_sequence ();
2122 emit_insn (insns);
2123 return result;
2124 }
2125
2126 /* If we were unable to expand via the builtin, stop the sequence
2127 (without outputting the insns) and call to the library function
2128 with the stabilized argument list. */
2129 end_sequence ();
2130 }
2131
2132 return expand_call (exp, target, target == const0_rtx);
2133 }
2134
2135 /* Expand a call to the builtin binary math functions (pow and atan2).
2136 Return NULL_RTX if a normal call should be emitted rather than expanding the
2137 function in-line. EXP is the expression that is a call to the builtin
2138 function; if convenient, the result should be placed in TARGET.
2139 SUBTARGET may be used as the target for computing one of EXP's
2140 operands. */
2141
2142 static rtx
2143 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2144 {
2145 optab builtin_optab;
2146 rtx op0, op1, result;
2147 rtx_insn *insns;
2148 int op1_type = REAL_TYPE;
2149 tree fndecl = get_callee_fndecl (exp);
2150 tree arg0, arg1;
2151 machine_mode mode;
2152 bool errno_set = true;
2153
2154 switch (DECL_FUNCTION_CODE (fndecl))
2155 {
2156 CASE_FLT_FN (BUILT_IN_SCALBN):
2157 CASE_FLT_FN (BUILT_IN_SCALBLN):
2158 CASE_FLT_FN (BUILT_IN_LDEXP):
2159 op1_type = INTEGER_TYPE;
2160 default:
2161 break;
2162 }
2163
2164 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2165 return NULL_RTX;
2166
2167 arg0 = CALL_EXPR_ARG (exp, 0);
2168 arg1 = CALL_EXPR_ARG (exp, 1);
2169
2170 switch (DECL_FUNCTION_CODE (fndecl))
2171 {
2172 CASE_FLT_FN (BUILT_IN_POW):
2173 builtin_optab = pow_optab; break;
2174 CASE_FLT_FN (BUILT_IN_ATAN2):
2175 builtin_optab = atan2_optab; break;
2176 CASE_FLT_FN (BUILT_IN_SCALB):
2177 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2178 return 0;
2179 builtin_optab = scalb_optab; break;
2180 CASE_FLT_FN (BUILT_IN_SCALBN):
2181 CASE_FLT_FN (BUILT_IN_SCALBLN):
2182 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2183 return 0;
2184 /* Fall through... */
2185 CASE_FLT_FN (BUILT_IN_LDEXP):
2186 builtin_optab = ldexp_optab; break;
2187 CASE_FLT_FN (BUILT_IN_FMOD):
2188 builtin_optab = fmod_optab; break;
2189 CASE_FLT_FN (BUILT_IN_REMAINDER):
2190 CASE_FLT_FN (BUILT_IN_DREM):
2191 builtin_optab = remainder_optab; break;
2192 default:
2193 gcc_unreachable ();
2194 }
2195
2196 /* Make a suitable register to place result in. */
2197 mode = TYPE_MODE (TREE_TYPE (exp));
2198
2199 /* Before working hard, check whether the instruction is available. */
2200 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2201 return NULL_RTX;
2202
2203 result = gen_reg_rtx (mode);
2204
2205 if (! flag_errno_math || ! HONOR_NANS (mode))
2206 errno_set = false;
2207
2208 if (errno_set && optimize_insn_for_size_p ())
2209 return 0;
2210
2211 /* Always stabilize the argument list. */
2212 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2213 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2214
2215 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2216 op1 = expand_normal (arg1);
2217
2218 start_sequence ();
2219
2220 /* Compute into RESULT.
2221 Set RESULT to wherever the result comes back. */
2222 result = expand_binop (mode, builtin_optab, op0, op1,
2223 result, 0, OPTAB_DIRECT);
2224
2225 /* If we were unable to expand via the builtin, stop the sequence
2226 (without outputting the insns) and call to the library function
2227 with the stabilized argument list. */
2228 if (result == 0)
2229 {
2230 end_sequence ();
2231 return expand_call (exp, target, target == const0_rtx);
2232 }
2233
2234 if (errno_set)
2235 expand_errno_check (exp, result);
2236
2237 /* Output the entire sequence. */
2238 insns = get_insns ();
2239 end_sequence ();
2240 emit_insn (insns);
2241
2242 return result;
2243 }
2244
2245 /* Expand a call to the builtin trinary math functions (fma).
2246 Return NULL_RTX if a normal call should be emitted rather than expanding the
2247 function in-line. EXP is the expression that is a call to the builtin
2248 function; if convenient, the result should be placed in TARGET.
2249 SUBTARGET may be used as the target for computing one of EXP's
2250 operands. */
2251
2252 static rtx
2253 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2254 {
2255 optab builtin_optab;
2256 rtx op0, op1, op2, result;
2257 rtx_insn *insns;
2258 tree fndecl = get_callee_fndecl (exp);
2259 tree arg0, arg1, arg2;
2260 machine_mode mode;
2261
2262 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2263 return NULL_RTX;
2264
2265 arg0 = CALL_EXPR_ARG (exp, 0);
2266 arg1 = CALL_EXPR_ARG (exp, 1);
2267 arg2 = CALL_EXPR_ARG (exp, 2);
2268
2269 switch (DECL_FUNCTION_CODE (fndecl))
2270 {
2271 CASE_FLT_FN (BUILT_IN_FMA):
2272 builtin_optab = fma_optab; break;
2273 default:
2274 gcc_unreachable ();
2275 }
2276
2277 /* Make a suitable register to place result in. */
2278 mode = TYPE_MODE (TREE_TYPE (exp));
2279
2280 /* Before working hard, check whether the instruction is available. */
2281 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2282 return NULL_RTX;
2283
2284 result = gen_reg_rtx (mode);
2285
2286 /* Always stabilize the argument list. */
2287 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2288 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2289 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2290
2291 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2292 op1 = expand_normal (arg1);
2293 op2 = expand_normal (arg2);
2294
2295 start_sequence ();
2296
2297 /* Compute into RESULT.
2298 Set RESULT to wherever the result comes back. */
2299 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2300 result, 0);
2301
2302 /* If we were unable to expand via the builtin, stop the sequence
2303 (without outputting the insns) and call to the library function
2304 with the stabilized argument list. */
2305 if (result == 0)
2306 {
2307 end_sequence ();
2308 return expand_call (exp, target, target == const0_rtx);
2309 }
2310
2311 /* Output the entire sequence. */
2312 insns = get_insns ();
2313 end_sequence ();
2314 emit_insn (insns);
2315
2316 return result;
2317 }
2318
2319 /* Expand a call to the builtin sin and cos math functions.
2320 Return NULL_RTX if a normal call should be emitted rather than expanding the
2321 function in-line. EXP is the expression that is a call to the builtin
2322 function; if convenient, the result should be placed in TARGET.
2323 SUBTARGET may be used as the target for computing one of EXP's
2324 operands. */
2325
2326 static rtx
2327 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2328 {
2329 optab builtin_optab;
2330 rtx op0;
2331 rtx_insn *insns;
2332 tree fndecl = get_callee_fndecl (exp);
2333 machine_mode mode;
2334 tree arg;
2335
2336 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2337 return NULL_RTX;
2338
2339 arg = CALL_EXPR_ARG (exp, 0);
2340
2341 switch (DECL_FUNCTION_CODE (fndecl))
2342 {
2343 CASE_FLT_FN (BUILT_IN_SIN):
2344 CASE_FLT_FN (BUILT_IN_COS):
2345 builtin_optab = sincos_optab; break;
2346 default:
2347 gcc_unreachable ();
2348 }
2349
2350 /* Make a suitable register to place result in. */
2351 mode = TYPE_MODE (TREE_TYPE (exp));
2352
2353 /* Check if sincos insn is available, otherwise fallback
2354 to sin or cos insn. */
2355 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2356 switch (DECL_FUNCTION_CODE (fndecl))
2357 {
2358 CASE_FLT_FN (BUILT_IN_SIN):
2359 builtin_optab = sin_optab; break;
2360 CASE_FLT_FN (BUILT_IN_COS):
2361 builtin_optab = cos_optab; break;
2362 default:
2363 gcc_unreachable ();
2364 }
2365
2366 /* Before working hard, check whether the instruction is available. */
2367 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2368 {
2369 rtx result = gen_reg_rtx (mode);
2370
2371 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2372 need to expand the argument again. This way, we will not perform
2373 side-effects more the once. */
2374 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2375
2376 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2377
2378 start_sequence ();
2379
2380 /* Compute into RESULT.
2381 Set RESULT to wherever the result comes back. */
2382 if (builtin_optab == sincos_optab)
2383 {
2384 int ok;
2385
2386 switch (DECL_FUNCTION_CODE (fndecl))
2387 {
2388 CASE_FLT_FN (BUILT_IN_SIN):
2389 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2390 break;
2391 CASE_FLT_FN (BUILT_IN_COS):
2392 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2393 break;
2394 default:
2395 gcc_unreachable ();
2396 }
2397 gcc_assert (ok);
2398 }
2399 else
2400 result = expand_unop (mode, builtin_optab, op0, result, 0);
2401
2402 if (result != 0)
2403 {
2404 /* Output the entire sequence. */
2405 insns = get_insns ();
2406 end_sequence ();
2407 emit_insn (insns);
2408 return result;
2409 }
2410
2411 /* If we were unable to expand via the builtin, stop the sequence
2412 (without outputting the insns) and call to the library function
2413 with the stabilized argument list. */
2414 end_sequence ();
2415 }
2416
2417 return expand_call (exp, target, target == const0_rtx);
2418 }
2419
2420 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2421 return an RTL instruction code that implements the functionality.
2422 If that isn't possible or available return CODE_FOR_nothing. */
2423
2424 static enum insn_code
2425 interclass_mathfn_icode (tree arg, tree fndecl)
2426 {
2427 bool errno_set = false;
2428 optab builtin_optab = unknown_optab;
2429 machine_mode mode;
2430
2431 switch (DECL_FUNCTION_CODE (fndecl))
2432 {
2433 CASE_FLT_FN (BUILT_IN_ILOGB):
2434 errno_set = true; builtin_optab = ilogb_optab; break;
2435 CASE_FLT_FN (BUILT_IN_ISINF):
2436 builtin_optab = isinf_optab; break;
2437 case BUILT_IN_ISNORMAL:
2438 case BUILT_IN_ISFINITE:
2439 CASE_FLT_FN (BUILT_IN_FINITE):
2440 case BUILT_IN_FINITED32:
2441 case BUILT_IN_FINITED64:
2442 case BUILT_IN_FINITED128:
2443 case BUILT_IN_ISINFD32:
2444 case BUILT_IN_ISINFD64:
2445 case BUILT_IN_ISINFD128:
2446 /* These builtins have no optabs (yet). */
2447 break;
2448 default:
2449 gcc_unreachable ();
2450 }
2451
2452 /* There's no easy way to detect the case we need to set EDOM. */
2453 if (flag_errno_math && errno_set)
2454 return CODE_FOR_nothing;
2455
2456 /* Optab mode depends on the mode of the input argument. */
2457 mode = TYPE_MODE (TREE_TYPE (arg));
2458
2459 if (builtin_optab)
2460 return optab_handler (builtin_optab, mode);
2461 return CODE_FOR_nothing;
2462 }
2463
2464 /* Expand a call to one of the builtin math functions that operate on
2465 floating point argument and output an integer result (ilogb, isinf,
2466 isnan, etc).
2467 Return 0 if a normal call should be emitted rather than expanding the
2468 function in-line. EXP is the expression that is a call to the builtin
2469 function; if convenient, the result should be placed in TARGET. */
2470
2471 static rtx
2472 expand_builtin_interclass_mathfn (tree exp, rtx target)
2473 {
2474 enum insn_code icode = CODE_FOR_nothing;
2475 rtx op0;
2476 tree fndecl = get_callee_fndecl (exp);
2477 machine_mode mode;
2478 tree arg;
2479
2480 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2481 return NULL_RTX;
2482
2483 arg = CALL_EXPR_ARG (exp, 0);
2484 icode = interclass_mathfn_icode (arg, fndecl);
2485 mode = TYPE_MODE (TREE_TYPE (arg));
2486
2487 if (icode != CODE_FOR_nothing)
2488 {
2489 struct expand_operand ops[1];
2490 rtx_insn *last = get_last_insn ();
2491 tree orig_arg = arg;
2492
2493 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2494 need to expand the argument again. This way, we will not perform
2495 side-effects more the once. */
2496 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2497
2498 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2499
2500 if (mode != GET_MODE (op0))
2501 op0 = convert_to_mode (mode, op0, 0);
2502
2503 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2504 if (maybe_legitimize_operands (icode, 0, 1, ops)
2505 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2506 return ops[0].value;
2507
2508 delete_insns_since (last);
2509 CALL_EXPR_ARG (exp, 0) = orig_arg;
2510 }
2511
2512 return NULL_RTX;
2513 }
2514
2515 /* Expand a call to the builtin sincos math function.
2516 Return NULL_RTX if a normal call should be emitted rather than expanding the
2517 function in-line. EXP is the expression that is a call to the builtin
2518 function. */
2519
2520 static rtx
2521 expand_builtin_sincos (tree exp)
2522 {
2523 rtx op0, op1, op2, target1, target2;
2524 machine_mode mode;
2525 tree arg, sinp, cosp;
2526 int result;
2527 location_t loc = EXPR_LOCATION (exp);
2528 tree alias_type, alias_off;
2529
2530 if (!validate_arglist (exp, REAL_TYPE,
2531 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2532 return NULL_RTX;
2533
2534 arg = CALL_EXPR_ARG (exp, 0);
2535 sinp = CALL_EXPR_ARG (exp, 1);
2536 cosp = CALL_EXPR_ARG (exp, 2);
2537
2538 /* Make a suitable register to place result in. */
2539 mode = TYPE_MODE (TREE_TYPE (arg));
2540
2541 /* Check if sincos insn is available, otherwise emit the call. */
2542 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2543 return NULL_RTX;
2544
2545 target1 = gen_reg_rtx (mode);
2546 target2 = gen_reg_rtx (mode);
2547
2548 op0 = expand_normal (arg);
2549 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2550 alias_off = build_int_cst (alias_type, 0);
2551 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2552 sinp, alias_off));
2553 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2554 cosp, alias_off));
2555
2556 /* Compute into target1 and target2.
2557 Set TARGET to wherever the result comes back. */
2558 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2559 gcc_assert (result);
2560
2561 /* Move target1 and target2 to the memory locations indicated
2562 by op1 and op2. */
2563 emit_move_insn (op1, target1);
2564 emit_move_insn (op2, target2);
2565
2566 return const0_rtx;
2567 }
2568
2569 /* Expand a call to the internal cexpi builtin to the sincos math function.
2570 EXP is the expression that is a call to the builtin function; if convenient,
2571 the result should be placed in TARGET. */
2572
2573 static rtx
2574 expand_builtin_cexpi (tree exp, rtx target)
2575 {
2576 tree fndecl = get_callee_fndecl (exp);
2577 tree arg, type;
2578 machine_mode mode;
2579 rtx op0, op1, op2;
2580 location_t loc = EXPR_LOCATION (exp);
2581
2582 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2583 return NULL_RTX;
2584
2585 arg = CALL_EXPR_ARG (exp, 0);
2586 type = TREE_TYPE (arg);
2587 mode = TYPE_MODE (TREE_TYPE (arg));
2588
2589 /* Try expanding via a sincos optab, fall back to emitting a libcall
2590 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2591 is only generated from sincos, cexp or if we have either of them. */
2592 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2593 {
2594 op1 = gen_reg_rtx (mode);
2595 op2 = gen_reg_rtx (mode);
2596
2597 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2598
2599 /* Compute into op1 and op2. */
2600 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2601 }
2602 else if (targetm.libc_has_function (function_sincos))
2603 {
2604 tree call, fn = NULL_TREE;
2605 tree top1, top2;
2606 rtx op1a, op2a;
2607
2608 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2609 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2610 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2611 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2612 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2613 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2614 else
2615 gcc_unreachable ();
2616
2617 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2618 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2619 op1a = copy_addr_to_reg (XEXP (op1, 0));
2620 op2a = copy_addr_to_reg (XEXP (op2, 0));
2621 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2622 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2623
2624 /* Make sure not to fold the sincos call again. */
2625 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2626 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2627 call, 3, arg, top1, top2));
2628 }
2629 else
2630 {
2631 tree call, fn = NULL_TREE, narg;
2632 tree ctype = build_complex_type (type);
2633
2634 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2635 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2636 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2637 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2638 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2639 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2640 else
2641 gcc_unreachable ();
2642
2643 /* If we don't have a decl for cexp create one. This is the
2644 friendliest fallback if the user calls __builtin_cexpi
2645 without full target C99 function support. */
2646 if (fn == NULL_TREE)
2647 {
2648 tree fntype;
2649 const char *name = NULL;
2650
2651 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2652 name = "cexpf";
2653 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2654 name = "cexp";
2655 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2656 name = "cexpl";
2657
2658 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2659 fn = build_fn_decl (name, fntype);
2660 }
2661
2662 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2663 build_real (type, dconst0), arg);
2664
2665 /* Make sure not to fold the cexp call again. */
2666 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2667 return expand_expr (build_call_nary (ctype, call, 1, narg),
2668 target, VOIDmode, EXPAND_NORMAL);
2669 }
2670
2671 /* Now build the proper return type. */
2672 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2673 make_tree (TREE_TYPE (arg), op2),
2674 make_tree (TREE_TYPE (arg), op1)),
2675 target, VOIDmode, EXPAND_NORMAL);
2676 }
2677
2678 /* Conveniently construct a function call expression. FNDECL names the
2679 function to be called, N is the number of arguments, and the "..."
2680 parameters are the argument expressions. Unlike build_call_exr
2681 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2682
2683 static tree
2684 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2685 {
2686 va_list ap;
2687 tree fntype = TREE_TYPE (fndecl);
2688 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2689
2690 va_start (ap, n);
2691 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2692 va_end (ap);
2693 SET_EXPR_LOCATION (fn, loc);
2694 return fn;
2695 }
2696
2697 /* Expand a call to one of the builtin rounding functions gcc defines
2698 as an extension (lfloor and lceil). As these are gcc extensions we
2699 do not need to worry about setting errno to EDOM.
2700 If expanding via optab fails, lower expression to (int)(floor(x)).
2701 EXP is the expression that is a call to the builtin function;
2702 if convenient, the result should be placed in TARGET. */
2703
2704 static rtx
2705 expand_builtin_int_roundingfn (tree exp, rtx target)
2706 {
2707 convert_optab builtin_optab;
2708 rtx op0, tmp;
2709 rtx_insn *insns;
2710 tree fndecl = get_callee_fndecl (exp);
2711 enum built_in_function fallback_fn;
2712 tree fallback_fndecl;
2713 machine_mode mode;
2714 tree arg;
2715
2716 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2717 gcc_unreachable ();
2718
2719 arg = CALL_EXPR_ARG (exp, 0);
2720
2721 switch (DECL_FUNCTION_CODE (fndecl))
2722 {
2723 CASE_FLT_FN (BUILT_IN_ICEIL):
2724 CASE_FLT_FN (BUILT_IN_LCEIL):
2725 CASE_FLT_FN (BUILT_IN_LLCEIL):
2726 builtin_optab = lceil_optab;
2727 fallback_fn = BUILT_IN_CEIL;
2728 break;
2729
2730 CASE_FLT_FN (BUILT_IN_IFLOOR):
2731 CASE_FLT_FN (BUILT_IN_LFLOOR):
2732 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2733 builtin_optab = lfloor_optab;
2734 fallback_fn = BUILT_IN_FLOOR;
2735 break;
2736
2737 default:
2738 gcc_unreachable ();
2739 }
2740
2741 /* Make a suitable register to place result in. */
2742 mode = TYPE_MODE (TREE_TYPE (exp));
2743
2744 target = gen_reg_rtx (mode);
2745
2746 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2747 need to expand the argument again. This way, we will not perform
2748 side-effects more the once. */
2749 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2750
2751 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2752
2753 start_sequence ();
2754
2755 /* Compute into TARGET. */
2756 if (expand_sfix_optab (target, op0, builtin_optab))
2757 {
2758 /* Output the entire sequence. */
2759 insns = get_insns ();
2760 end_sequence ();
2761 emit_insn (insns);
2762 return target;
2763 }
2764
2765 /* If we were unable to expand via the builtin, stop the sequence
2766 (without outputting the insns). */
2767 end_sequence ();
2768
2769 /* Fall back to floating point rounding optab. */
2770 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2771
2772 /* For non-C99 targets we may end up without a fallback fndecl here
2773 if the user called __builtin_lfloor directly. In this case emit
2774 a call to the floor/ceil variants nevertheless. This should result
2775 in the best user experience for not full C99 targets. */
2776 if (fallback_fndecl == NULL_TREE)
2777 {
2778 tree fntype;
2779 const char *name = NULL;
2780
2781 switch (DECL_FUNCTION_CODE (fndecl))
2782 {
2783 case BUILT_IN_ICEIL:
2784 case BUILT_IN_LCEIL:
2785 case BUILT_IN_LLCEIL:
2786 name = "ceil";
2787 break;
2788 case BUILT_IN_ICEILF:
2789 case BUILT_IN_LCEILF:
2790 case BUILT_IN_LLCEILF:
2791 name = "ceilf";
2792 break;
2793 case BUILT_IN_ICEILL:
2794 case BUILT_IN_LCEILL:
2795 case BUILT_IN_LLCEILL:
2796 name = "ceill";
2797 break;
2798 case BUILT_IN_IFLOOR:
2799 case BUILT_IN_LFLOOR:
2800 case BUILT_IN_LLFLOOR:
2801 name = "floor";
2802 break;
2803 case BUILT_IN_IFLOORF:
2804 case BUILT_IN_LFLOORF:
2805 case BUILT_IN_LLFLOORF:
2806 name = "floorf";
2807 break;
2808 case BUILT_IN_IFLOORL:
2809 case BUILT_IN_LFLOORL:
2810 case BUILT_IN_LLFLOORL:
2811 name = "floorl";
2812 break;
2813 default:
2814 gcc_unreachable ();
2815 }
2816
2817 fntype = build_function_type_list (TREE_TYPE (arg),
2818 TREE_TYPE (arg), NULL_TREE);
2819 fallback_fndecl = build_fn_decl (name, fntype);
2820 }
2821
2822 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2823
2824 tmp = expand_normal (exp);
2825 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2826
2827 /* Truncate the result of floating point optab to integer
2828 via expand_fix (). */
2829 target = gen_reg_rtx (mode);
2830 expand_fix (target, tmp, 0);
2831
2832 return target;
2833 }
2834
2835 /* Expand a call to one of the builtin math functions doing integer
2836 conversion (lrint).
2837 Return 0 if a normal call should be emitted rather than expanding the
2838 function in-line. EXP is the expression that is a call to the builtin
2839 function; if convenient, the result should be placed in TARGET. */
2840
2841 static rtx
2842 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2843 {
2844 convert_optab builtin_optab;
2845 rtx op0;
2846 rtx_insn *insns;
2847 tree fndecl = get_callee_fndecl (exp);
2848 tree arg;
2849 machine_mode mode;
2850 enum built_in_function fallback_fn = BUILT_IN_NONE;
2851
2852 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2853 gcc_unreachable ();
2854
2855 arg = CALL_EXPR_ARG (exp, 0);
2856
2857 switch (DECL_FUNCTION_CODE (fndecl))
2858 {
2859 CASE_FLT_FN (BUILT_IN_IRINT):
2860 fallback_fn = BUILT_IN_LRINT;
2861 /* FALLTHRU */
2862 CASE_FLT_FN (BUILT_IN_LRINT):
2863 CASE_FLT_FN (BUILT_IN_LLRINT):
2864 builtin_optab = lrint_optab;
2865 break;
2866
2867 CASE_FLT_FN (BUILT_IN_IROUND):
2868 fallback_fn = BUILT_IN_LROUND;
2869 /* FALLTHRU */
2870 CASE_FLT_FN (BUILT_IN_LROUND):
2871 CASE_FLT_FN (BUILT_IN_LLROUND):
2872 builtin_optab = lround_optab;
2873 break;
2874
2875 default:
2876 gcc_unreachable ();
2877 }
2878
2879 /* There's no easy way to detect the case we need to set EDOM. */
2880 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2881 return NULL_RTX;
2882
2883 /* Make a suitable register to place result in. */
2884 mode = TYPE_MODE (TREE_TYPE (exp));
2885
2886 /* There's no easy way to detect the case we need to set EDOM. */
2887 if (!flag_errno_math)
2888 {
2889 rtx result = gen_reg_rtx (mode);
2890
2891 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2892 need to expand the argument again. This way, we will not perform
2893 side-effects more the once. */
2894 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2895
2896 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2897
2898 start_sequence ();
2899
2900 if (expand_sfix_optab (result, op0, builtin_optab))
2901 {
2902 /* Output the entire sequence. */
2903 insns = get_insns ();
2904 end_sequence ();
2905 emit_insn (insns);
2906 return result;
2907 }
2908
2909 /* If we were unable to expand via the builtin, stop the sequence
2910 (without outputting the insns) and call to the library function
2911 with the stabilized argument list. */
2912 end_sequence ();
2913 }
2914
2915 if (fallback_fn != BUILT_IN_NONE)
2916 {
2917 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2918 targets, (int) round (x) should never be transformed into
2919 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2920 a call to lround in the hope that the target provides at least some
2921 C99 functions. This should result in the best user experience for
2922 not full C99 targets. */
2923 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2924 fallback_fn, 0);
2925
2926 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2927 fallback_fndecl, 1, arg);
2928
2929 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2930 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2931 return convert_to_mode (mode, target, 0);
2932 }
2933
2934 return expand_call (exp, target, target == const0_rtx);
2935 }
2936
2937 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2938 a normal call should be emitted rather than expanding the function
2939 in-line. EXP is the expression that is a call to the builtin
2940 function; if convenient, the result should be placed in TARGET. */
2941
2942 static rtx
2943 expand_builtin_powi (tree exp, rtx target)
2944 {
2945 tree arg0, arg1;
2946 rtx op0, op1;
2947 machine_mode mode;
2948 machine_mode mode2;
2949
2950 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2951 return NULL_RTX;
2952
2953 arg0 = CALL_EXPR_ARG (exp, 0);
2954 arg1 = CALL_EXPR_ARG (exp, 1);
2955 mode = TYPE_MODE (TREE_TYPE (exp));
2956
2957 /* Emit a libcall to libgcc. */
2958
2959 /* Mode of the 2nd argument must match that of an int. */
2960 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2961
2962 if (target == NULL_RTX)
2963 target = gen_reg_rtx (mode);
2964
2965 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2966 if (GET_MODE (op0) != mode)
2967 op0 = convert_to_mode (mode, op0, 0);
2968 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2969 if (GET_MODE (op1) != mode2)
2970 op1 = convert_to_mode (mode2, op1, 0);
2971
2972 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2973 target, LCT_CONST, mode, 2,
2974 op0, mode, op1, mode2);
2975
2976 return target;
2977 }
2978
2979 /* Expand expression EXP which is a call to the strlen builtin. Return
2980 NULL_RTX if we failed the caller should emit a normal call, otherwise
2981 try to get the result in TARGET, if convenient. */
2982
2983 static rtx
2984 expand_builtin_strlen (tree exp, rtx target,
2985 machine_mode target_mode)
2986 {
2987 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2988 return NULL_RTX;
2989 else
2990 {
2991 struct expand_operand ops[4];
2992 rtx pat;
2993 tree len;
2994 tree src = CALL_EXPR_ARG (exp, 0);
2995 rtx src_reg;
2996 rtx_insn *before_strlen;
2997 machine_mode insn_mode = target_mode;
2998 enum insn_code icode = CODE_FOR_nothing;
2999 unsigned int align;
3000
3001 /* If the length can be computed at compile-time, return it. */
3002 len = c_strlen (src, 0);
3003 if (len)
3004 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3005
3006 /* If the length can be computed at compile-time and is constant
3007 integer, but there are side-effects in src, evaluate
3008 src for side-effects, then return len.
3009 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3010 can be optimized into: i++; x = 3; */
3011 len = c_strlen (src, 1);
3012 if (len && TREE_CODE (len) == INTEGER_CST)
3013 {
3014 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3015 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3016 }
3017
3018 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3019
3020 /* If SRC is not a pointer type, don't do this operation inline. */
3021 if (align == 0)
3022 return NULL_RTX;
3023
3024 /* Bail out if we can't compute strlen in the right mode. */
3025 while (insn_mode != VOIDmode)
3026 {
3027 icode = optab_handler (strlen_optab, insn_mode);
3028 if (icode != CODE_FOR_nothing)
3029 break;
3030
3031 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3032 }
3033 if (insn_mode == VOIDmode)
3034 return NULL_RTX;
3035
3036 /* Make a place to hold the source address. We will not expand
3037 the actual source until we are sure that the expansion will
3038 not fail -- there are trees that cannot be expanded twice. */
3039 src_reg = gen_reg_rtx (Pmode);
3040
3041 /* Mark the beginning of the strlen sequence so we can emit the
3042 source operand later. */
3043 before_strlen = get_last_insn ();
3044
3045 create_output_operand (&ops[0], target, insn_mode);
3046 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3047 create_integer_operand (&ops[2], 0);
3048 create_integer_operand (&ops[3], align);
3049 if (!maybe_expand_insn (icode, 4, ops))
3050 return NULL_RTX;
3051
3052 /* Now that we are assured of success, expand the source. */
3053 start_sequence ();
3054 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3055 if (pat != src_reg)
3056 {
3057 #ifdef POINTERS_EXTEND_UNSIGNED
3058 if (GET_MODE (pat) != Pmode)
3059 pat = convert_to_mode (Pmode, pat,
3060 POINTERS_EXTEND_UNSIGNED);
3061 #endif
3062 emit_move_insn (src_reg, pat);
3063 }
3064 pat = get_insns ();
3065 end_sequence ();
3066
3067 if (before_strlen)
3068 emit_insn_after (pat, before_strlen);
3069 else
3070 emit_insn_before (pat, get_insns ());
3071
3072 /* Return the value in the proper mode for this function. */
3073 if (GET_MODE (ops[0].value) == target_mode)
3074 target = ops[0].value;
3075 else if (target != 0)
3076 convert_move (target, ops[0].value, 0);
3077 else
3078 target = convert_to_mode (target_mode, ops[0].value, 0);
3079
3080 return target;
3081 }
3082 }
3083
3084 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3085 bytes from constant string DATA + OFFSET and return it as target
3086 constant. */
3087
3088 static rtx
3089 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3090 machine_mode mode)
3091 {
3092 const char *str = (const char *) data;
3093
3094 gcc_assert (offset >= 0
3095 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3096 <= strlen (str) + 1));
3097
3098 return c_readstr (str + offset, mode);
3099 }
3100
3101 /* LEN specify length of the block of memcpy/memset operation.
3102 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3103 In some cases we can make very likely guess on max size, then we
3104 set it into PROBABLE_MAX_SIZE. */
3105
3106 static void
3107 determine_block_size (tree len, rtx len_rtx,
3108 unsigned HOST_WIDE_INT *min_size,
3109 unsigned HOST_WIDE_INT *max_size,
3110 unsigned HOST_WIDE_INT *probable_max_size)
3111 {
3112 if (CONST_INT_P (len_rtx))
3113 {
3114 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3115 return;
3116 }
3117 else
3118 {
3119 wide_int min, max;
3120 enum value_range_type range_type = VR_UNDEFINED;
3121
3122 /* Determine bounds from the type. */
3123 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3124 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3125 else
3126 *min_size = 0;
3127 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3128 *probable_max_size = *max_size
3129 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3130 else
3131 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3132
3133 if (TREE_CODE (len) == SSA_NAME)
3134 range_type = get_range_info (len, &min, &max);
3135 if (range_type == VR_RANGE)
3136 {
3137 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3138 *min_size = min.to_uhwi ();
3139 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3140 *probable_max_size = *max_size = max.to_uhwi ();
3141 }
3142 else if (range_type == VR_ANTI_RANGE)
3143 {
3144 /* Anti range 0...N lets us to determine minimal size to N+1. */
3145 if (min == 0)
3146 {
3147 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3148 *min_size = max.to_uhwi () + 1;
3149 }
3150 /* Code like
3151
3152 int n;
3153 if (n < 100)
3154 memcpy (a, b, n)
3155
3156 Produce anti range allowing negative values of N. We still
3157 can use the information and make a guess that N is not negative.
3158 */
3159 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3160 *probable_max_size = min.to_uhwi () - 1;
3161 }
3162 }
3163 gcc_checking_assert (*max_size <=
3164 (unsigned HOST_WIDE_INT)
3165 GET_MODE_MASK (GET_MODE (len_rtx)));
3166 }
3167
3168 /* Helper function to do the actual work for expand_builtin_memcpy. */
3169
3170 static rtx
3171 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3172 {
3173 const char *src_str;
3174 unsigned int src_align = get_pointer_alignment (src);
3175 unsigned int dest_align = get_pointer_alignment (dest);
3176 rtx dest_mem, src_mem, dest_addr, len_rtx;
3177 HOST_WIDE_INT expected_size = -1;
3178 unsigned int expected_align = 0;
3179 unsigned HOST_WIDE_INT min_size;
3180 unsigned HOST_WIDE_INT max_size;
3181 unsigned HOST_WIDE_INT probable_max_size;
3182
3183 /* If DEST is not a pointer type, call the normal function. */
3184 if (dest_align == 0)
3185 return NULL_RTX;
3186
3187 /* If either SRC is not a pointer type, don't do this
3188 operation in-line. */
3189 if (src_align == 0)
3190 return NULL_RTX;
3191
3192 if (currently_expanding_gimple_stmt)
3193 stringop_block_profile (currently_expanding_gimple_stmt,
3194 &expected_align, &expected_size);
3195
3196 if (expected_align < dest_align)
3197 expected_align = dest_align;
3198 dest_mem = get_memory_rtx (dest, len);
3199 set_mem_align (dest_mem, dest_align);
3200 len_rtx = expand_normal (len);
3201 determine_block_size (len, len_rtx, &min_size, &max_size,
3202 &probable_max_size);
3203 src_str = c_getstr (src);
3204
3205 /* If SRC is a string constant and block move would be done
3206 by pieces, we can avoid loading the string from memory
3207 and only stored the computed constants. */
3208 if (src_str
3209 && CONST_INT_P (len_rtx)
3210 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3211 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3212 CONST_CAST (char *, src_str),
3213 dest_align, false))
3214 {
3215 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3216 builtin_memcpy_read_str,
3217 CONST_CAST (char *, src_str),
3218 dest_align, false, 0);
3219 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3220 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3221 return dest_mem;
3222 }
3223
3224 src_mem = get_memory_rtx (src, len);
3225 set_mem_align (src_mem, src_align);
3226
3227 /* Copy word part most expediently. */
3228 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3229 CALL_EXPR_TAILCALL (exp)
3230 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3231 expected_align, expected_size,
3232 min_size, max_size, probable_max_size);
3233
3234 if (dest_addr == 0)
3235 {
3236 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3237 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3238 }
3239
3240 return dest_addr;
3241 }
3242
3243 /* Expand a call EXP to the memcpy builtin.
3244 Return NULL_RTX if we failed, the caller should emit a normal call,
3245 otherwise try to get the result in TARGET, if convenient (and in
3246 mode MODE if that's convenient). */
3247
3248 static rtx
3249 expand_builtin_memcpy (tree exp, rtx target)
3250 {
3251 if (!validate_arglist (exp,
3252 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3253 return NULL_RTX;
3254 else
3255 {
3256 tree dest = CALL_EXPR_ARG (exp, 0);
3257 tree src = CALL_EXPR_ARG (exp, 1);
3258 tree len = CALL_EXPR_ARG (exp, 2);
3259 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3260 }
3261 }
3262
3263 /* Expand an instrumented call EXP to the memcpy builtin.
3264 Return NULL_RTX if we failed, the caller should emit a normal call,
3265 otherwise try to get the result in TARGET, if convenient (and in
3266 mode MODE if that's convenient). */
3267
3268 static rtx
3269 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3270 {
3271 if (!validate_arglist (exp,
3272 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3273 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3274 INTEGER_TYPE, VOID_TYPE))
3275 return NULL_RTX;
3276 else
3277 {
3278 tree dest = CALL_EXPR_ARG (exp, 0);
3279 tree src = CALL_EXPR_ARG (exp, 2);
3280 tree len = CALL_EXPR_ARG (exp, 4);
3281 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3282
3283 /* Return src bounds with the result. */
3284 if (res)
3285 {
3286 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3287 expand_normal (CALL_EXPR_ARG (exp, 1)));
3288 res = chkp_join_splitted_slot (res, bnd);
3289 }
3290 return res;
3291 }
3292 }
3293
3294 /* Expand a call EXP to the mempcpy builtin.
3295 Return NULL_RTX if we failed; the caller should emit a normal call,
3296 otherwise try to get the result in TARGET, if convenient (and in
3297 mode MODE if that's convenient). If ENDP is 0 return the
3298 destination pointer, if ENDP is 1 return the end pointer ala
3299 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3300 stpcpy. */
3301
3302 static rtx
3303 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3304 {
3305 if (!validate_arglist (exp,
3306 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3307 return NULL_RTX;
3308 else
3309 {
3310 tree dest = CALL_EXPR_ARG (exp, 0);
3311 tree src = CALL_EXPR_ARG (exp, 1);
3312 tree len = CALL_EXPR_ARG (exp, 2);
3313 return expand_builtin_mempcpy_args (dest, src, len,
3314 target, mode, /*endp=*/ 1,
3315 exp);
3316 }
3317 }
3318
3319 /* Expand an instrumented call EXP to the mempcpy builtin.
3320 Return NULL_RTX if we failed, the caller should emit a normal call,
3321 otherwise try to get the result in TARGET, if convenient (and in
3322 mode MODE if that's convenient). */
3323
3324 static rtx
3325 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3326 {
3327 if (!validate_arglist (exp,
3328 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3329 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3330 INTEGER_TYPE, VOID_TYPE))
3331 return NULL_RTX;
3332 else
3333 {
3334 tree dest = CALL_EXPR_ARG (exp, 0);
3335 tree src = CALL_EXPR_ARG (exp, 2);
3336 tree len = CALL_EXPR_ARG (exp, 4);
3337 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3338 mode, 1, exp);
3339
3340 /* Return src bounds with the result. */
3341 if (res)
3342 {
3343 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3344 expand_normal (CALL_EXPR_ARG (exp, 1)));
3345 res = chkp_join_splitted_slot (res, bnd);
3346 }
3347 return res;
3348 }
3349 }
3350
3351 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3352 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3353 so that this can also be called without constructing an actual CALL_EXPR.
3354 The other arguments and return value are the same as for
3355 expand_builtin_mempcpy. */
3356
3357 static rtx
3358 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3359 rtx target, machine_mode mode, int endp,
3360 tree orig_exp)
3361 {
3362 tree fndecl = get_callee_fndecl (orig_exp);
3363
3364 /* If return value is ignored, transform mempcpy into memcpy. */
3365 if (target == const0_rtx
3366 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3367 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3368 {
3369 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3370 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3371 dest, src, len);
3372 return expand_expr (result, target, mode, EXPAND_NORMAL);
3373 }
3374 else if (target == const0_rtx
3375 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3376 {
3377 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3378 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3379 dest, src, len);
3380 return expand_expr (result, target, mode, EXPAND_NORMAL);
3381 }
3382 else
3383 {
3384 const char *src_str;
3385 unsigned int src_align = get_pointer_alignment (src);
3386 unsigned int dest_align = get_pointer_alignment (dest);
3387 rtx dest_mem, src_mem, len_rtx;
3388
3389 /* If either SRC or DEST is not a pointer type, don't do this
3390 operation in-line. */
3391 if (dest_align == 0 || src_align == 0)
3392 return NULL_RTX;
3393
3394 /* If LEN is not constant, call the normal function. */
3395 if (! tree_fits_uhwi_p (len))
3396 return NULL_RTX;
3397
3398 len_rtx = expand_normal (len);
3399 src_str = c_getstr (src);
3400
3401 /* If SRC is a string constant and block move would be done
3402 by pieces, we can avoid loading the string from memory
3403 and only stored the computed constants. */
3404 if (src_str
3405 && CONST_INT_P (len_rtx)
3406 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3407 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3408 CONST_CAST (char *, src_str),
3409 dest_align, false))
3410 {
3411 dest_mem = get_memory_rtx (dest, len);
3412 set_mem_align (dest_mem, dest_align);
3413 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3414 builtin_memcpy_read_str,
3415 CONST_CAST (char *, src_str),
3416 dest_align, false, endp);
3417 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3418 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3419 return dest_mem;
3420 }
3421
3422 if (CONST_INT_P (len_rtx)
3423 && can_move_by_pieces (INTVAL (len_rtx),
3424 MIN (dest_align, src_align)))
3425 {
3426 dest_mem = get_memory_rtx (dest, len);
3427 set_mem_align (dest_mem, dest_align);
3428 src_mem = get_memory_rtx (src, len);
3429 set_mem_align (src_mem, src_align);
3430 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3431 MIN (dest_align, src_align), endp);
3432 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3433 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3434 return dest_mem;
3435 }
3436
3437 return NULL_RTX;
3438 }
3439 }
3440
3441 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3442 we failed, the caller should emit a normal call, otherwise try to
3443 get the result in TARGET, if convenient. If ENDP is 0 return the
3444 destination pointer, if ENDP is 1 return the end pointer ala
3445 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3446 stpcpy. */
3447
3448 static rtx
3449 expand_movstr (tree dest, tree src, rtx target, int endp)
3450 {
3451 struct expand_operand ops[3];
3452 rtx dest_mem;
3453 rtx src_mem;
3454
3455 if (!targetm.have_movstr ())
3456 return NULL_RTX;
3457
3458 dest_mem = get_memory_rtx (dest, NULL);
3459 src_mem = get_memory_rtx (src, NULL);
3460 if (!endp)
3461 {
3462 target = force_reg (Pmode, XEXP (dest_mem, 0));
3463 dest_mem = replace_equiv_address (dest_mem, target);
3464 }
3465
3466 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3467 create_fixed_operand (&ops[1], dest_mem);
3468 create_fixed_operand (&ops[2], src_mem);
3469 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3470 return NULL_RTX;
3471
3472 if (endp && target != const0_rtx)
3473 {
3474 target = ops[0].value;
3475 /* movstr is supposed to set end to the address of the NUL
3476 terminator. If the caller requested a mempcpy-like return value,
3477 adjust it. */
3478 if (endp == 1)
3479 {
3480 rtx tem = plus_constant (GET_MODE (target),
3481 gen_lowpart (GET_MODE (target), target), 1);
3482 emit_move_insn (target, force_operand (tem, NULL_RTX));
3483 }
3484 }
3485 return target;
3486 }
3487
3488 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3489 NULL_RTX if we failed the caller should emit a normal call, otherwise
3490 try to get the result in TARGET, if convenient (and in mode MODE if that's
3491 convenient). */
3492
3493 static rtx
3494 expand_builtin_strcpy (tree exp, rtx target)
3495 {
3496 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3497 {
3498 tree dest = CALL_EXPR_ARG (exp, 0);
3499 tree src = CALL_EXPR_ARG (exp, 1);
3500 return expand_builtin_strcpy_args (dest, src, target);
3501 }
3502 return NULL_RTX;
3503 }
3504
3505 /* Helper function to do the actual work for expand_builtin_strcpy. The
3506 arguments to the builtin_strcpy call DEST and SRC are broken out
3507 so that this can also be called without constructing an actual CALL_EXPR.
3508 The other arguments and return value are the same as for
3509 expand_builtin_strcpy. */
3510
3511 static rtx
3512 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3513 {
3514 return expand_movstr (dest, src, target, /*endp=*/0);
3515 }
3516
3517 /* Expand a call EXP to the stpcpy builtin.
3518 Return NULL_RTX if we failed the caller should emit a normal call,
3519 otherwise try to get the result in TARGET, if convenient (and in
3520 mode MODE if that's convenient). */
3521
3522 static rtx
3523 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3524 {
3525 tree dst, src;
3526 location_t loc = EXPR_LOCATION (exp);
3527
3528 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3529 return NULL_RTX;
3530
3531 dst = CALL_EXPR_ARG (exp, 0);
3532 src = CALL_EXPR_ARG (exp, 1);
3533
3534 /* If return value is ignored, transform stpcpy into strcpy. */
3535 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3536 {
3537 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3538 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3539 return expand_expr (result, target, mode, EXPAND_NORMAL);
3540 }
3541 else
3542 {
3543 tree len, lenp1;
3544 rtx ret;
3545
3546 /* Ensure we get an actual string whose length can be evaluated at
3547 compile-time, not an expression containing a string. This is
3548 because the latter will potentially produce pessimized code
3549 when used to produce the return value. */
3550 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3551 return expand_movstr (dst, src, target, /*endp=*/2);
3552
3553 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3554 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3555 target, mode, /*endp=*/2,
3556 exp);
3557
3558 if (ret)
3559 return ret;
3560
3561 if (TREE_CODE (len) == INTEGER_CST)
3562 {
3563 rtx len_rtx = expand_normal (len);
3564
3565 if (CONST_INT_P (len_rtx))
3566 {
3567 ret = expand_builtin_strcpy_args (dst, src, target);
3568
3569 if (ret)
3570 {
3571 if (! target)
3572 {
3573 if (mode != VOIDmode)
3574 target = gen_reg_rtx (mode);
3575 else
3576 target = gen_reg_rtx (GET_MODE (ret));
3577 }
3578 if (GET_MODE (target) != GET_MODE (ret))
3579 ret = gen_lowpart (GET_MODE (target), ret);
3580
3581 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3582 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3583 gcc_assert (ret);
3584
3585 return target;
3586 }
3587 }
3588 }
3589
3590 return expand_movstr (dst, src, target, /*endp=*/2);
3591 }
3592 }
3593
3594 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3595 bytes from constant string DATA + OFFSET and return it as target
3596 constant. */
3597
3598 rtx
3599 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3600 machine_mode mode)
3601 {
3602 const char *str = (const char *) data;
3603
3604 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3605 return const0_rtx;
3606
3607 return c_readstr (str + offset, mode);
3608 }
3609
3610 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3611 NULL_RTX if we failed the caller should emit a normal call. */
3612
3613 static rtx
3614 expand_builtin_strncpy (tree exp, rtx target)
3615 {
3616 location_t loc = EXPR_LOCATION (exp);
3617
3618 if (validate_arglist (exp,
3619 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3620 {
3621 tree dest = CALL_EXPR_ARG (exp, 0);
3622 tree src = CALL_EXPR_ARG (exp, 1);
3623 tree len = CALL_EXPR_ARG (exp, 2);
3624 tree slen = c_strlen (src, 1);
3625
3626 /* We must be passed a constant len and src parameter. */
3627 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3628 return NULL_RTX;
3629
3630 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3631
3632 /* We're required to pad with trailing zeros if the requested
3633 len is greater than strlen(s2)+1. In that case try to
3634 use store_by_pieces, if it fails, punt. */
3635 if (tree_int_cst_lt (slen, len))
3636 {
3637 unsigned int dest_align = get_pointer_alignment (dest);
3638 const char *p = c_getstr (src);
3639 rtx dest_mem;
3640
3641 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3642 || !can_store_by_pieces (tree_to_uhwi (len),
3643 builtin_strncpy_read_str,
3644 CONST_CAST (char *, p),
3645 dest_align, false))
3646 return NULL_RTX;
3647
3648 dest_mem = get_memory_rtx (dest, len);
3649 store_by_pieces (dest_mem, tree_to_uhwi (len),
3650 builtin_strncpy_read_str,
3651 CONST_CAST (char *, p), dest_align, false, 0);
3652 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3653 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3654 return dest_mem;
3655 }
3656 }
3657 return NULL_RTX;
3658 }
3659
3660 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3661 bytes from constant string DATA + OFFSET and return it as target
3662 constant. */
3663
3664 rtx
3665 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3666 machine_mode mode)
3667 {
3668 const char *c = (const char *) data;
3669 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3670
3671 memset (p, *c, GET_MODE_SIZE (mode));
3672
3673 return c_readstr (p, mode);
3674 }
3675
3676 /* Callback routine for store_by_pieces. Return the RTL of a register
3677 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3678 char value given in the RTL register data. For example, if mode is
3679 4 bytes wide, return the RTL for 0x01010101*data. */
3680
3681 static rtx
3682 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3683 machine_mode mode)
3684 {
3685 rtx target, coeff;
3686 size_t size;
3687 char *p;
3688
3689 size = GET_MODE_SIZE (mode);
3690 if (size == 1)
3691 return (rtx) data;
3692
3693 p = XALLOCAVEC (char, size);
3694 memset (p, 1, size);
3695 coeff = c_readstr (p, mode);
3696
3697 target = convert_to_mode (mode, (rtx) data, 1);
3698 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3699 return force_reg (mode, target);
3700 }
3701
3702 /* Expand expression EXP, which is a call to the memset builtin. Return
3703 NULL_RTX if we failed the caller should emit a normal call, otherwise
3704 try to get the result in TARGET, if convenient (and in mode MODE if that's
3705 convenient). */
3706
3707 static rtx
3708 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3709 {
3710 if (!validate_arglist (exp,
3711 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3712 return NULL_RTX;
3713 else
3714 {
3715 tree dest = CALL_EXPR_ARG (exp, 0);
3716 tree val = CALL_EXPR_ARG (exp, 1);
3717 tree len = CALL_EXPR_ARG (exp, 2);
3718 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3719 }
3720 }
3721
3722 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3723 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3724 try to get the result in TARGET, if convenient (and in mode MODE if that's
3725 convenient). */
3726
3727 static rtx
3728 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3729 {
3730 if (!validate_arglist (exp,
3731 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3732 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3733 return NULL_RTX;
3734 else
3735 {
3736 tree dest = CALL_EXPR_ARG (exp, 0);
3737 tree val = CALL_EXPR_ARG (exp, 2);
3738 tree len = CALL_EXPR_ARG (exp, 3);
3739 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3740
3741 /* Return src bounds with the result. */
3742 if (res)
3743 {
3744 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3745 expand_normal (CALL_EXPR_ARG (exp, 1)));
3746 res = chkp_join_splitted_slot (res, bnd);
3747 }
3748 return res;
3749 }
3750 }
3751
3752 /* Helper function to do the actual work for expand_builtin_memset. The
3753 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3754 so that this can also be called without constructing an actual CALL_EXPR.
3755 The other arguments and return value are the same as for
3756 expand_builtin_memset. */
3757
3758 static rtx
3759 expand_builtin_memset_args (tree dest, tree val, tree len,
3760 rtx target, machine_mode mode, tree orig_exp)
3761 {
3762 tree fndecl, fn;
3763 enum built_in_function fcode;
3764 machine_mode val_mode;
3765 char c;
3766 unsigned int dest_align;
3767 rtx dest_mem, dest_addr, len_rtx;
3768 HOST_WIDE_INT expected_size = -1;
3769 unsigned int expected_align = 0;
3770 unsigned HOST_WIDE_INT min_size;
3771 unsigned HOST_WIDE_INT max_size;
3772 unsigned HOST_WIDE_INT probable_max_size;
3773
3774 dest_align = get_pointer_alignment (dest);
3775
3776 /* If DEST is not a pointer type, don't do this operation in-line. */
3777 if (dest_align == 0)
3778 return NULL_RTX;
3779
3780 if (currently_expanding_gimple_stmt)
3781 stringop_block_profile (currently_expanding_gimple_stmt,
3782 &expected_align, &expected_size);
3783
3784 if (expected_align < dest_align)
3785 expected_align = dest_align;
3786
3787 /* If the LEN parameter is zero, return DEST. */
3788 if (integer_zerop (len))
3789 {
3790 /* Evaluate and ignore VAL in case it has side-effects. */
3791 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3792 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3793 }
3794
3795 /* Stabilize the arguments in case we fail. */
3796 dest = builtin_save_expr (dest);
3797 val = builtin_save_expr (val);
3798 len = builtin_save_expr (len);
3799
3800 len_rtx = expand_normal (len);
3801 determine_block_size (len, len_rtx, &min_size, &max_size,
3802 &probable_max_size);
3803 dest_mem = get_memory_rtx (dest, len);
3804 val_mode = TYPE_MODE (unsigned_char_type_node);
3805
3806 if (TREE_CODE (val) != INTEGER_CST)
3807 {
3808 rtx val_rtx;
3809
3810 val_rtx = expand_normal (val);
3811 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3812
3813 /* Assume that we can memset by pieces if we can store
3814 * the coefficients by pieces (in the required modes).
3815 * We can't pass builtin_memset_gen_str as that emits RTL. */
3816 c = 1;
3817 if (tree_fits_uhwi_p (len)
3818 && can_store_by_pieces (tree_to_uhwi (len),
3819 builtin_memset_read_str, &c, dest_align,
3820 true))
3821 {
3822 val_rtx = force_reg (val_mode, val_rtx);
3823 store_by_pieces (dest_mem, tree_to_uhwi (len),
3824 builtin_memset_gen_str, val_rtx, dest_align,
3825 true, 0);
3826 }
3827 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3828 dest_align, expected_align,
3829 expected_size, min_size, max_size,
3830 probable_max_size))
3831 goto do_libcall;
3832
3833 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3834 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3835 return dest_mem;
3836 }
3837
3838 if (target_char_cast (val, &c))
3839 goto do_libcall;
3840
3841 if (c)
3842 {
3843 if (tree_fits_uhwi_p (len)
3844 && can_store_by_pieces (tree_to_uhwi (len),
3845 builtin_memset_read_str, &c, dest_align,
3846 true))
3847 store_by_pieces (dest_mem, tree_to_uhwi (len),
3848 builtin_memset_read_str, &c, dest_align, true, 0);
3849 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3850 gen_int_mode (c, val_mode),
3851 dest_align, expected_align,
3852 expected_size, min_size, max_size,
3853 probable_max_size))
3854 goto do_libcall;
3855
3856 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3857 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3858 return dest_mem;
3859 }
3860
3861 set_mem_align (dest_mem, dest_align);
3862 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3863 CALL_EXPR_TAILCALL (orig_exp)
3864 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3865 expected_align, expected_size,
3866 min_size, max_size,
3867 probable_max_size);
3868
3869 if (dest_addr == 0)
3870 {
3871 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3872 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3873 }
3874
3875 return dest_addr;
3876
3877 do_libcall:
3878 fndecl = get_callee_fndecl (orig_exp);
3879 fcode = DECL_FUNCTION_CODE (fndecl);
3880 if (fcode == BUILT_IN_MEMSET
3881 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3882 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3883 dest, val, len);
3884 else if (fcode == BUILT_IN_BZERO)
3885 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3886 dest, len);
3887 else
3888 gcc_unreachable ();
3889 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3890 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3891 return expand_call (fn, target, target == const0_rtx);
3892 }
3893
3894 /* Expand expression EXP, which is a call to the bzero builtin. Return
3895 NULL_RTX if we failed the caller should emit a normal call. */
3896
3897 static rtx
3898 expand_builtin_bzero (tree exp)
3899 {
3900 tree dest, size;
3901 location_t loc = EXPR_LOCATION (exp);
3902
3903 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3904 return NULL_RTX;
3905
3906 dest = CALL_EXPR_ARG (exp, 0);
3907 size = CALL_EXPR_ARG (exp, 1);
3908
3909 /* New argument list transforming bzero(ptr x, int y) to
3910 memset(ptr x, int 0, size_t y). This is done this way
3911 so that if it isn't expanded inline, we fallback to
3912 calling bzero instead of memset. */
3913
3914 return expand_builtin_memset_args (dest, integer_zero_node,
3915 fold_convert_loc (loc,
3916 size_type_node, size),
3917 const0_rtx, VOIDmode, exp);
3918 }
3919
3920 /* Try to expand cmpstr operation ICODE with the given operands.
3921 Return the result rtx on success, otherwise return null. */
3922
3923 static rtx
3924 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3925 HOST_WIDE_INT align)
3926 {
3927 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3928
3929 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3930 target = NULL_RTX;
3931
3932 struct expand_operand ops[4];
3933 create_output_operand (&ops[0], target, insn_mode);
3934 create_fixed_operand (&ops[1], arg1_rtx);
3935 create_fixed_operand (&ops[2], arg2_rtx);
3936 create_integer_operand (&ops[3], align);
3937 if (maybe_expand_insn (icode, 4, ops))
3938 return ops[0].value;
3939 return NULL_RTX;
3940 }
3941
3942 /* Try to expand cmpstrn operation ICODE with the given operands.
3943 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
3944 otherwise return null. */
3945
3946 static rtx
3947 expand_cmpstrn (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3948 tree arg3_type, rtx arg3_rtx, HOST_WIDE_INT align)
3949 {
3950 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3951
3952 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3953 target = NULL_RTX;
3954
3955 struct expand_operand ops[5];
3956 create_output_operand (&ops[0], target, insn_mode);
3957 create_fixed_operand (&ops[1], arg1_rtx);
3958 create_fixed_operand (&ops[2], arg2_rtx);
3959 create_convert_operand_from (&ops[3], arg3_rtx, TYPE_MODE (arg3_type),
3960 TYPE_UNSIGNED (arg3_type));
3961 create_integer_operand (&ops[4], align);
3962 if (maybe_expand_insn (icode, 5, ops))
3963 return ops[0].value;
3964 return NULL_RTX;
3965 }
3966
3967 /* Expand expression EXP, which is a call to the memcmp built-in function.
3968 Return NULL_RTX if we failed and the caller should emit a normal call,
3969 otherwise try to get the result in TARGET, if convenient (and in mode
3970 MODE, if that's convenient). */
3971
3972 static rtx
3973 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3974 ATTRIBUTE_UNUSED machine_mode mode)
3975 {
3976 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3977
3978 if (!validate_arglist (exp,
3979 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3980 return NULL_RTX;
3981
3982 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3983 implementing memcmp because it will stop if it encounters two
3984 zero bytes. */
3985 #if defined HAVE_cmpmemsi
3986 {
3987 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3988 rtx result;
3989 rtx insn;
3990 tree arg1 = CALL_EXPR_ARG (exp, 0);
3991 tree arg2 = CALL_EXPR_ARG (exp, 1);
3992 tree len = CALL_EXPR_ARG (exp, 2);
3993
3994 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3995 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3996 machine_mode insn_mode;
3997
3998 if (HAVE_cmpmemsi)
3999 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4000 else
4001 return NULL_RTX;
4002
4003 /* If we don't have POINTER_TYPE, call the function. */
4004 if (arg1_align == 0 || arg2_align == 0)
4005 return NULL_RTX;
4006
4007 /* Make a place to write the result of the instruction. */
4008 result = target;
4009 if (! (result != 0
4010 && REG_P (result) && GET_MODE (result) == insn_mode
4011 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4012 result = gen_reg_rtx (insn_mode);
4013
4014 arg1_rtx = get_memory_rtx (arg1, len);
4015 arg2_rtx = get_memory_rtx (arg2, len);
4016 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4017
4018 /* Set MEM_SIZE as appropriate. */
4019 if (CONST_INT_P (arg3_rtx))
4020 {
4021 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
4022 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
4023 }
4024
4025 if (HAVE_cmpmemsi)
4026 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4027 GEN_INT (MIN (arg1_align, arg2_align)));
4028 else
4029 gcc_unreachable ();
4030
4031 if (insn)
4032 emit_insn (insn);
4033 else
4034 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4035 TYPE_MODE (integer_type_node), 3,
4036 XEXP (arg1_rtx, 0), Pmode,
4037 XEXP (arg2_rtx, 0), Pmode,
4038 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4039 TYPE_UNSIGNED (sizetype)),
4040 TYPE_MODE (sizetype));
4041
4042 /* Return the value in the proper mode for this function. */
4043 mode = TYPE_MODE (TREE_TYPE (exp));
4044 if (GET_MODE (result) == mode)
4045 return result;
4046 else if (target != 0)
4047 {
4048 convert_move (target, result, 0);
4049 return target;
4050 }
4051 else
4052 return convert_to_mode (mode, result, 0);
4053 }
4054 #endif /* HAVE_cmpmemsi. */
4055
4056 return NULL_RTX;
4057 }
4058
4059 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4060 if we failed the caller should emit a normal call, otherwise try to get
4061 the result in TARGET, if convenient. */
4062
4063 static rtx
4064 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4065 {
4066 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4067 return NULL_RTX;
4068
4069 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4070 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4071 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
4072 {
4073 rtx arg1_rtx, arg2_rtx;
4074 tree fndecl, fn;
4075 tree arg1 = CALL_EXPR_ARG (exp, 0);
4076 tree arg2 = CALL_EXPR_ARG (exp, 1);
4077 rtx result = NULL_RTX;
4078
4079 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4080 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4081
4082 /* If we don't have POINTER_TYPE, call the function. */
4083 if (arg1_align == 0 || arg2_align == 0)
4084 return NULL_RTX;
4085
4086 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4087 arg1 = builtin_save_expr (arg1);
4088 arg2 = builtin_save_expr (arg2);
4089
4090 arg1_rtx = get_memory_rtx (arg1, NULL);
4091 arg2_rtx = get_memory_rtx (arg2, NULL);
4092
4093 /* Try to call cmpstrsi. */
4094 if (cmpstr_icode != CODE_FOR_nothing)
4095 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4096 MIN (arg1_align, arg2_align));
4097
4098 /* Try to determine at least one length and call cmpstrnsi. */
4099 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4100 {
4101 tree len;
4102 rtx arg3_rtx;
4103
4104 tree len1 = c_strlen (arg1, 1);
4105 tree len2 = c_strlen (arg2, 1);
4106
4107 if (len1)
4108 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4109 if (len2)
4110 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4111
4112 /* If we don't have a constant length for the first, use the length
4113 of the second, if we know it. We don't require a constant for
4114 this case; some cost analysis could be done if both are available
4115 but neither is constant. For now, assume they're equally cheap,
4116 unless one has side effects. If both strings have constant lengths,
4117 use the smaller. */
4118
4119 if (!len1)
4120 len = len2;
4121 else if (!len2)
4122 len = len1;
4123 else if (TREE_SIDE_EFFECTS (len1))
4124 len = len2;
4125 else if (TREE_SIDE_EFFECTS (len2))
4126 len = len1;
4127 else if (TREE_CODE (len1) != INTEGER_CST)
4128 len = len2;
4129 else if (TREE_CODE (len2) != INTEGER_CST)
4130 len = len1;
4131 else if (tree_int_cst_lt (len1, len2))
4132 len = len1;
4133 else
4134 len = len2;
4135
4136 /* If both arguments have side effects, we cannot optimize. */
4137 if (len && !TREE_SIDE_EFFECTS (len))
4138 {
4139 arg3_rtx = expand_normal (len);
4140 result = expand_cmpstrn (cmpstrn_icode, target, arg1_rtx,
4141 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4142 MIN (arg1_align, arg2_align));
4143 }
4144 }
4145
4146 if (result)
4147 {
4148 /* Return the value in the proper mode for this function. */
4149 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4150 if (GET_MODE (result) == mode)
4151 return result;
4152 if (target == 0)
4153 return convert_to_mode (mode, result, 0);
4154 convert_move (target, result, 0);
4155 return target;
4156 }
4157
4158 /* Expand the library call ourselves using a stabilized argument
4159 list to avoid re-evaluating the function's arguments twice. */
4160 fndecl = get_callee_fndecl (exp);
4161 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4162 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4163 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4164 return expand_call (fn, target, target == const0_rtx);
4165 }
4166 return NULL_RTX;
4167 }
4168
4169 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4170 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4171 the result in TARGET, if convenient. */
4172
4173 static rtx
4174 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4175 ATTRIBUTE_UNUSED machine_mode mode)
4176 {
4177 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4178
4179 if (!validate_arglist (exp,
4180 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4181 return NULL_RTX;
4182
4183 /* If c_strlen can determine an expression for one of the string
4184 lengths, and it doesn't have side effects, then emit cmpstrnsi
4185 using length MIN(strlen(string)+1, arg3). */
4186 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4187 if (cmpstrn_icode != CODE_FOR_nothing)
4188 {
4189 tree len, len1, len2;
4190 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4191 rtx result;
4192 tree fndecl, fn;
4193 tree arg1 = CALL_EXPR_ARG (exp, 0);
4194 tree arg2 = CALL_EXPR_ARG (exp, 1);
4195 tree arg3 = CALL_EXPR_ARG (exp, 2);
4196
4197 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4198 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4199
4200 len1 = c_strlen (arg1, 1);
4201 len2 = c_strlen (arg2, 1);
4202
4203 if (len1)
4204 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4205 if (len2)
4206 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4207
4208 /* If we don't have a constant length for the first, use the length
4209 of the second, if we know it. We don't require a constant for
4210 this case; some cost analysis could be done if both are available
4211 but neither is constant. For now, assume they're equally cheap,
4212 unless one has side effects. If both strings have constant lengths,
4213 use the smaller. */
4214
4215 if (!len1)
4216 len = len2;
4217 else if (!len2)
4218 len = len1;
4219 else if (TREE_SIDE_EFFECTS (len1))
4220 len = len2;
4221 else if (TREE_SIDE_EFFECTS (len2))
4222 len = len1;
4223 else if (TREE_CODE (len1) != INTEGER_CST)
4224 len = len2;
4225 else if (TREE_CODE (len2) != INTEGER_CST)
4226 len = len1;
4227 else if (tree_int_cst_lt (len1, len2))
4228 len = len1;
4229 else
4230 len = len2;
4231
4232 /* If both arguments have side effects, we cannot optimize. */
4233 if (!len || TREE_SIDE_EFFECTS (len))
4234 return NULL_RTX;
4235
4236 /* The actual new length parameter is MIN(len,arg3). */
4237 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4238 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4239
4240 /* If we don't have POINTER_TYPE, call the function. */
4241 if (arg1_align == 0 || arg2_align == 0)
4242 return NULL_RTX;
4243
4244 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4245 arg1 = builtin_save_expr (arg1);
4246 arg2 = builtin_save_expr (arg2);
4247 len = builtin_save_expr (len);
4248
4249 arg1_rtx = get_memory_rtx (arg1, len);
4250 arg2_rtx = get_memory_rtx (arg2, len);
4251 arg3_rtx = expand_normal (len);
4252 result = expand_cmpstrn (cmpstrn_icode, target, arg1_rtx, arg2_rtx,
4253 TREE_TYPE (len), arg3_rtx,
4254 MIN (arg1_align, arg2_align));
4255 if (result)
4256 {
4257 /* Return the value in the proper mode for this function. */
4258 mode = TYPE_MODE (TREE_TYPE (exp));
4259 if (GET_MODE (result) == mode)
4260 return result;
4261 if (target == 0)
4262 return convert_to_mode (mode, result, 0);
4263 convert_move (target, result, 0);
4264 return target;
4265 }
4266
4267 /* Expand the library call ourselves using a stabilized argument
4268 list to avoid re-evaluating the function's arguments twice. */
4269 fndecl = get_callee_fndecl (exp);
4270 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4271 arg1, arg2, len);
4272 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4273 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4274 return expand_call (fn, target, target == const0_rtx);
4275 }
4276 return NULL_RTX;
4277 }
4278
4279 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4280 if that's convenient. */
4281
4282 rtx
4283 expand_builtin_saveregs (void)
4284 {
4285 rtx val;
4286 rtx_insn *seq;
4287
4288 /* Don't do __builtin_saveregs more than once in a function.
4289 Save the result of the first call and reuse it. */
4290 if (saveregs_value != 0)
4291 return saveregs_value;
4292
4293 /* When this function is called, it means that registers must be
4294 saved on entry to this function. So we migrate the call to the
4295 first insn of this function. */
4296
4297 start_sequence ();
4298
4299 /* Do whatever the machine needs done in this case. */
4300 val = targetm.calls.expand_builtin_saveregs ();
4301
4302 seq = get_insns ();
4303 end_sequence ();
4304
4305 saveregs_value = val;
4306
4307 /* Put the insns after the NOTE that starts the function. If this
4308 is inside a start_sequence, make the outer-level insn chain current, so
4309 the code is placed at the start of the function. */
4310 push_topmost_sequence ();
4311 emit_insn_after (seq, entry_of_function ());
4312 pop_topmost_sequence ();
4313
4314 return val;
4315 }
4316
4317 /* Expand a call to __builtin_next_arg. */
4318
4319 static rtx
4320 expand_builtin_next_arg (void)
4321 {
4322 /* Checking arguments is already done in fold_builtin_next_arg
4323 that must be called before this function. */
4324 return expand_binop (ptr_mode, add_optab,
4325 crtl->args.internal_arg_pointer,
4326 crtl->args.arg_offset_rtx,
4327 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4328 }
4329
4330 /* Make it easier for the backends by protecting the valist argument
4331 from multiple evaluations. */
4332
4333 static tree
4334 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4335 {
4336 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4337
4338 /* The current way of determining the type of valist is completely
4339 bogus. We should have the information on the va builtin instead. */
4340 if (!vatype)
4341 vatype = targetm.fn_abi_va_list (cfun->decl);
4342
4343 if (TREE_CODE (vatype) == ARRAY_TYPE)
4344 {
4345 if (TREE_SIDE_EFFECTS (valist))
4346 valist = save_expr (valist);
4347
4348 /* For this case, the backends will be expecting a pointer to
4349 vatype, but it's possible we've actually been given an array
4350 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4351 So fix it. */
4352 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4353 {
4354 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4355 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4356 }
4357 }
4358 else
4359 {
4360 tree pt = build_pointer_type (vatype);
4361
4362 if (! needs_lvalue)
4363 {
4364 if (! TREE_SIDE_EFFECTS (valist))
4365 return valist;
4366
4367 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4368 TREE_SIDE_EFFECTS (valist) = 1;
4369 }
4370
4371 if (TREE_SIDE_EFFECTS (valist))
4372 valist = save_expr (valist);
4373 valist = fold_build2_loc (loc, MEM_REF,
4374 vatype, valist, build_int_cst (pt, 0));
4375 }
4376
4377 return valist;
4378 }
4379
4380 /* The "standard" definition of va_list is void*. */
4381
4382 tree
4383 std_build_builtin_va_list (void)
4384 {
4385 return ptr_type_node;
4386 }
4387
4388 /* The "standard" abi va_list is va_list_type_node. */
4389
4390 tree
4391 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4392 {
4393 return va_list_type_node;
4394 }
4395
4396 /* The "standard" type of va_list is va_list_type_node. */
4397
4398 tree
4399 std_canonical_va_list_type (tree type)
4400 {
4401 tree wtype, htype;
4402
4403 if (INDIRECT_REF_P (type))
4404 type = TREE_TYPE (type);
4405 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4406 type = TREE_TYPE (type);
4407 wtype = va_list_type_node;
4408 htype = type;
4409 /* Treat structure va_list types. */
4410 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4411 htype = TREE_TYPE (htype);
4412 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4413 {
4414 /* If va_list is an array type, the argument may have decayed
4415 to a pointer type, e.g. by being passed to another function.
4416 In that case, unwrap both types so that we can compare the
4417 underlying records. */
4418 if (TREE_CODE (htype) == ARRAY_TYPE
4419 || POINTER_TYPE_P (htype))
4420 {
4421 wtype = TREE_TYPE (wtype);
4422 htype = TREE_TYPE (htype);
4423 }
4424 }
4425 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4426 return va_list_type_node;
4427
4428 return NULL_TREE;
4429 }
4430
4431 /* The "standard" implementation of va_start: just assign `nextarg' to
4432 the variable. */
4433
4434 void
4435 std_expand_builtin_va_start (tree valist, rtx nextarg)
4436 {
4437 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4438 convert_move (va_r, nextarg, 0);
4439
4440 /* We do not have any valid bounds for the pointer, so
4441 just store zero bounds for it. */
4442 if (chkp_function_instrumented_p (current_function_decl))
4443 chkp_expand_bounds_reset_for_mem (valist,
4444 make_tree (TREE_TYPE (valist),
4445 nextarg));
4446 }
4447
4448 /* Expand EXP, a call to __builtin_va_start. */
4449
4450 static rtx
4451 expand_builtin_va_start (tree exp)
4452 {
4453 rtx nextarg;
4454 tree valist;
4455 location_t loc = EXPR_LOCATION (exp);
4456
4457 if (call_expr_nargs (exp) < 2)
4458 {
4459 error_at (loc, "too few arguments to function %<va_start%>");
4460 return const0_rtx;
4461 }
4462
4463 if (fold_builtin_next_arg (exp, true))
4464 return const0_rtx;
4465
4466 nextarg = expand_builtin_next_arg ();
4467 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4468
4469 if (targetm.expand_builtin_va_start)
4470 targetm.expand_builtin_va_start (valist, nextarg);
4471 else
4472 std_expand_builtin_va_start (valist, nextarg);
4473
4474 return const0_rtx;
4475 }
4476
4477 /* Expand EXP, a call to __builtin_va_end. */
4478
4479 static rtx
4480 expand_builtin_va_end (tree exp)
4481 {
4482 tree valist = CALL_EXPR_ARG (exp, 0);
4483
4484 /* Evaluate for side effects, if needed. I hate macros that don't
4485 do that. */
4486 if (TREE_SIDE_EFFECTS (valist))
4487 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4488
4489 return const0_rtx;
4490 }
4491
4492 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4493 builtin rather than just as an assignment in stdarg.h because of the
4494 nastiness of array-type va_list types. */
4495
4496 static rtx
4497 expand_builtin_va_copy (tree exp)
4498 {
4499 tree dst, src, t;
4500 location_t loc = EXPR_LOCATION (exp);
4501
4502 dst = CALL_EXPR_ARG (exp, 0);
4503 src = CALL_EXPR_ARG (exp, 1);
4504
4505 dst = stabilize_va_list_loc (loc, dst, 1);
4506 src = stabilize_va_list_loc (loc, src, 0);
4507
4508 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4509
4510 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4511 {
4512 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4513 TREE_SIDE_EFFECTS (t) = 1;
4514 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4515 }
4516 else
4517 {
4518 rtx dstb, srcb, size;
4519
4520 /* Evaluate to pointers. */
4521 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4522 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4523 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4524 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4525
4526 dstb = convert_memory_address (Pmode, dstb);
4527 srcb = convert_memory_address (Pmode, srcb);
4528
4529 /* "Dereference" to BLKmode memories. */
4530 dstb = gen_rtx_MEM (BLKmode, dstb);
4531 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4532 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4533 srcb = gen_rtx_MEM (BLKmode, srcb);
4534 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4535 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4536
4537 /* Copy. */
4538 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4539 }
4540
4541 return const0_rtx;
4542 }
4543
4544 /* Expand a call to one of the builtin functions __builtin_frame_address or
4545 __builtin_return_address. */
4546
4547 static rtx
4548 expand_builtin_frame_address (tree fndecl, tree exp)
4549 {
4550 /* The argument must be a nonnegative integer constant.
4551 It counts the number of frames to scan up the stack.
4552 The value is either the frame pointer value or the return
4553 address saved in that frame. */
4554 if (call_expr_nargs (exp) == 0)
4555 /* Warning about missing arg was already issued. */
4556 return const0_rtx;
4557 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4558 {
4559 error ("invalid argument to %qD", fndecl);
4560 return const0_rtx;
4561 }
4562 else
4563 {
4564 /* Number of frames to scan up the stack. */
4565 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4566
4567 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4568
4569 /* Some ports cannot access arbitrary stack frames. */
4570 if (tem == NULL)
4571 {
4572 warning (0, "unsupported argument to %qD", fndecl);
4573 return const0_rtx;
4574 }
4575
4576 if (count)
4577 {
4578 /* Warn since no effort is made to ensure that any frame
4579 beyond the current one exists or can be safely reached. */
4580 warning (OPT_Wframe_address, "calling %qD with "
4581 "a nonzero argument is unsafe", fndecl);
4582 }
4583
4584 /* For __builtin_frame_address, return what we've got. */
4585 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4586 return tem;
4587
4588 if (!REG_P (tem)
4589 && ! CONSTANT_P (tem))
4590 tem = copy_addr_to_reg (tem);
4591 return tem;
4592 }
4593 }
4594
4595 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4596 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4597 is the same as for allocate_dynamic_stack_space. */
4598
4599 static rtx
4600 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4601 {
4602 rtx op0;
4603 rtx result;
4604 bool valid_arglist;
4605 unsigned int align;
4606 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4607 == BUILT_IN_ALLOCA_WITH_ALIGN);
4608
4609 valid_arglist
4610 = (alloca_with_align
4611 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4612 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4613
4614 if (!valid_arglist)
4615 return NULL_RTX;
4616
4617 /* Compute the argument. */
4618 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4619
4620 /* Compute the alignment. */
4621 align = (alloca_with_align
4622 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4623 : BIGGEST_ALIGNMENT);
4624
4625 /* Allocate the desired space. */
4626 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4627 result = convert_memory_address (ptr_mode, result);
4628
4629 return result;
4630 }
4631
4632 /* Expand a call to bswap builtin in EXP.
4633 Return NULL_RTX if a normal call should be emitted rather than expanding the
4634 function in-line. If convenient, the result should be placed in TARGET.
4635 SUBTARGET may be used as the target for computing one of EXP's operands. */
4636
4637 static rtx
4638 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4639 rtx subtarget)
4640 {
4641 tree arg;
4642 rtx op0;
4643
4644 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4645 return NULL_RTX;
4646
4647 arg = CALL_EXPR_ARG (exp, 0);
4648 op0 = expand_expr (arg,
4649 subtarget && GET_MODE (subtarget) == target_mode
4650 ? subtarget : NULL_RTX,
4651 target_mode, EXPAND_NORMAL);
4652 if (GET_MODE (op0) != target_mode)
4653 op0 = convert_to_mode (target_mode, op0, 1);
4654
4655 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4656
4657 gcc_assert (target);
4658
4659 return convert_to_mode (target_mode, target, 1);
4660 }
4661
4662 /* Expand a call to a unary builtin in EXP.
4663 Return NULL_RTX if a normal call should be emitted rather than expanding the
4664 function in-line. If convenient, the result should be placed in TARGET.
4665 SUBTARGET may be used as the target for computing one of EXP's operands. */
4666
4667 static rtx
4668 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4669 rtx subtarget, optab op_optab)
4670 {
4671 rtx op0;
4672
4673 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4674 return NULL_RTX;
4675
4676 /* Compute the argument. */
4677 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4678 (subtarget
4679 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4680 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4681 VOIDmode, EXPAND_NORMAL);
4682 /* Compute op, into TARGET if possible.
4683 Set TARGET to wherever the result comes back. */
4684 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4685 op_optab, op0, target, op_optab != clrsb_optab);
4686 gcc_assert (target);
4687
4688 return convert_to_mode (target_mode, target, 0);
4689 }
4690
4691 /* Expand a call to __builtin_expect. We just return our argument
4692 as the builtin_expect semantic should've been already executed by
4693 tree branch prediction pass. */
4694
4695 static rtx
4696 expand_builtin_expect (tree exp, rtx target)
4697 {
4698 tree arg;
4699
4700 if (call_expr_nargs (exp) < 2)
4701 return const0_rtx;
4702 arg = CALL_EXPR_ARG (exp, 0);
4703
4704 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4705 /* When guessing was done, the hints should be already stripped away. */
4706 gcc_assert (!flag_guess_branch_prob
4707 || optimize == 0 || seen_error ());
4708 return target;
4709 }
4710
4711 /* Expand a call to __builtin_assume_aligned. We just return our first
4712 argument as the builtin_assume_aligned semantic should've been already
4713 executed by CCP. */
4714
4715 static rtx
4716 expand_builtin_assume_aligned (tree exp, rtx target)
4717 {
4718 if (call_expr_nargs (exp) < 2)
4719 return const0_rtx;
4720 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4721 EXPAND_NORMAL);
4722 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4723 && (call_expr_nargs (exp) < 3
4724 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4725 return target;
4726 }
4727
4728 void
4729 expand_builtin_trap (void)
4730 {
4731 if (targetm.have_trap ())
4732 {
4733 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4734 /* For trap insns when not accumulating outgoing args force
4735 REG_ARGS_SIZE note to prevent crossjumping of calls with
4736 different args sizes. */
4737 if (!ACCUMULATE_OUTGOING_ARGS)
4738 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4739 }
4740 else
4741 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4742 emit_barrier ();
4743 }
4744
4745 /* Expand a call to __builtin_unreachable. We do nothing except emit
4746 a barrier saying that control flow will not pass here.
4747
4748 It is the responsibility of the program being compiled to ensure
4749 that control flow does never reach __builtin_unreachable. */
4750 static void
4751 expand_builtin_unreachable (void)
4752 {
4753 emit_barrier ();
4754 }
4755
4756 /* Expand EXP, a call to fabs, fabsf or fabsl.
4757 Return NULL_RTX if a normal call should be emitted rather than expanding
4758 the function inline. If convenient, the result should be placed
4759 in TARGET. SUBTARGET may be used as the target for computing
4760 the operand. */
4761
4762 static rtx
4763 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4764 {
4765 machine_mode mode;
4766 tree arg;
4767 rtx op0;
4768
4769 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4770 return NULL_RTX;
4771
4772 arg = CALL_EXPR_ARG (exp, 0);
4773 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4774 mode = TYPE_MODE (TREE_TYPE (arg));
4775 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4776 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4777 }
4778
4779 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4780 Return NULL is a normal call should be emitted rather than expanding the
4781 function inline. If convenient, the result should be placed in TARGET.
4782 SUBTARGET may be used as the target for computing the operand. */
4783
4784 static rtx
4785 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4786 {
4787 rtx op0, op1;
4788 tree arg;
4789
4790 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4791 return NULL_RTX;
4792
4793 arg = CALL_EXPR_ARG (exp, 0);
4794 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4795
4796 arg = CALL_EXPR_ARG (exp, 1);
4797 op1 = expand_normal (arg);
4798
4799 return expand_copysign (op0, op1, target);
4800 }
4801
4802 /* Expand a call to __builtin___clear_cache. */
4803
4804 static rtx
4805 expand_builtin___clear_cache (tree exp)
4806 {
4807 if (!targetm.code_for_clear_cache)
4808 {
4809 #ifdef CLEAR_INSN_CACHE
4810 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4811 does something. Just do the default expansion to a call to
4812 __clear_cache(). */
4813 return NULL_RTX;
4814 #else
4815 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4816 does nothing. There is no need to call it. Do nothing. */
4817 return const0_rtx;
4818 #endif /* CLEAR_INSN_CACHE */
4819 }
4820
4821 /* We have a "clear_cache" insn, and it will handle everything. */
4822 tree begin, end;
4823 rtx begin_rtx, end_rtx;
4824
4825 /* We must not expand to a library call. If we did, any
4826 fallback library function in libgcc that might contain a call to
4827 __builtin___clear_cache() would recurse infinitely. */
4828 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4829 {
4830 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4831 return const0_rtx;
4832 }
4833
4834 if (targetm.have_clear_cache ())
4835 {
4836 struct expand_operand ops[2];
4837
4838 begin = CALL_EXPR_ARG (exp, 0);
4839 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4840
4841 end = CALL_EXPR_ARG (exp, 1);
4842 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4843
4844 create_address_operand (&ops[0], begin_rtx);
4845 create_address_operand (&ops[1], end_rtx);
4846 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4847 return const0_rtx;
4848 }
4849 return const0_rtx;
4850 }
4851
4852 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4853
4854 static rtx
4855 round_trampoline_addr (rtx tramp)
4856 {
4857 rtx temp, addend, mask;
4858
4859 /* If we don't need too much alignment, we'll have been guaranteed
4860 proper alignment by get_trampoline_type. */
4861 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4862 return tramp;
4863
4864 /* Round address up to desired boundary. */
4865 temp = gen_reg_rtx (Pmode);
4866 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4867 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4868
4869 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4870 temp, 0, OPTAB_LIB_WIDEN);
4871 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4872 temp, 0, OPTAB_LIB_WIDEN);
4873
4874 return tramp;
4875 }
4876
4877 static rtx
4878 expand_builtin_init_trampoline (tree exp, bool onstack)
4879 {
4880 tree t_tramp, t_func, t_chain;
4881 rtx m_tramp, r_tramp, r_chain, tmp;
4882
4883 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4884 POINTER_TYPE, VOID_TYPE))
4885 return NULL_RTX;
4886
4887 t_tramp = CALL_EXPR_ARG (exp, 0);
4888 t_func = CALL_EXPR_ARG (exp, 1);
4889 t_chain = CALL_EXPR_ARG (exp, 2);
4890
4891 r_tramp = expand_normal (t_tramp);
4892 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4893 MEM_NOTRAP_P (m_tramp) = 1;
4894
4895 /* If ONSTACK, the TRAMP argument should be the address of a field
4896 within the local function's FRAME decl. Either way, let's see if
4897 we can fill in the MEM_ATTRs for this memory. */
4898 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4899 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4900
4901 /* Creator of a heap trampoline is responsible for making sure the
4902 address is aligned to at least STACK_BOUNDARY. Normally malloc
4903 will ensure this anyhow. */
4904 tmp = round_trampoline_addr (r_tramp);
4905 if (tmp != r_tramp)
4906 {
4907 m_tramp = change_address (m_tramp, BLKmode, tmp);
4908 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4909 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4910 }
4911
4912 /* The FUNC argument should be the address of the nested function.
4913 Extract the actual function decl to pass to the hook. */
4914 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4915 t_func = TREE_OPERAND (t_func, 0);
4916 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4917
4918 r_chain = expand_normal (t_chain);
4919
4920 /* Generate insns to initialize the trampoline. */
4921 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4922
4923 if (onstack)
4924 {
4925 trampolines_created = 1;
4926
4927 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4928 "trampoline generated for nested function %qD", t_func);
4929 }
4930
4931 return const0_rtx;
4932 }
4933
4934 static rtx
4935 expand_builtin_adjust_trampoline (tree exp)
4936 {
4937 rtx tramp;
4938
4939 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4940 return NULL_RTX;
4941
4942 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4943 tramp = round_trampoline_addr (tramp);
4944 if (targetm.calls.trampoline_adjust_address)
4945 tramp = targetm.calls.trampoline_adjust_address (tramp);
4946
4947 return tramp;
4948 }
4949
4950 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4951 function. The function first checks whether the back end provides
4952 an insn to implement signbit for the respective mode. If not, it
4953 checks whether the floating point format of the value is such that
4954 the sign bit can be extracted. If that is not the case, error out.
4955 EXP is the expression that is a call to the builtin function; if
4956 convenient, the result should be placed in TARGET. */
4957 static rtx
4958 expand_builtin_signbit (tree exp, rtx target)
4959 {
4960 const struct real_format *fmt;
4961 machine_mode fmode, imode, rmode;
4962 tree arg;
4963 int word, bitpos;
4964 enum insn_code icode;
4965 rtx temp;
4966 location_t loc = EXPR_LOCATION (exp);
4967
4968 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4969 return NULL_RTX;
4970
4971 arg = CALL_EXPR_ARG (exp, 0);
4972 fmode = TYPE_MODE (TREE_TYPE (arg));
4973 rmode = TYPE_MODE (TREE_TYPE (exp));
4974 fmt = REAL_MODE_FORMAT (fmode);
4975
4976 arg = builtin_save_expr (arg);
4977
4978 /* Expand the argument yielding a RTX expression. */
4979 temp = expand_normal (arg);
4980
4981 /* Check if the back end provides an insn that handles signbit for the
4982 argument's mode. */
4983 icode = optab_handler (signbit_optab, fmode);
4984 if (icode != CODE_FOR_nothing)
4985 {
4986 rtx_insn *last = get_last_insn ();
4987 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4988 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4989 return target;
4990 delete_insns_since (last);
4991 }
4992
4993 /* For floating point formats without a sign bit, implement signbit
4994 as "ARG < 0.0". */
4995 bitpos = fmt->signbit_ro;
4996 if (bitpos < 0)
4997 {
4998 /* But we can't do this if the format supports signed zero. */
4999 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5000
5001 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5002 build_real (TREE_TYPE (arg), dconst0));
5003 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5004 }
5005
5006 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5007 {
5008 imode = int_mode_for_mode (fmode);
5009 gcc_assert (imode != BLKmode);
5010 temp = gen_lowpart (imode, temp);
5011 }
5012 else
5013 {
5014 imode = word_mode;
5015 /* Handle targets with different FP word orders. */
5016 if (FLOAT_WORDS_BIG_ENDIAN)
5017 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5018 else
5019 word = bitpos / BITS_PER_WORD;
5020 temp = operand_subword_force (temp, word, fmode);
5021 bitpos = bitpos % BITS_PER_WORD;
5022 }
5023
5024 /* Force the intermediate word_mode (or narrower) result into a
5025 register. This avoids attempting to create paradoxical SUBREGs
5026 of floating point modes below. */
5027 temp = force_reg (imode, temp);
5028
5029 /* If the bitpos is within the "result mode" lowpart, the operation
5030 can be implement with a single bitwise AND. Otherwise, we need
5031 a right shift and an AND. */
5032
5033 if (bitpos < GET_MODE_BITSIZE (rmode))
5034 {
5035 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5036
5037 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5038 temp = gen_lowpart (rmode, temp);
5039 temp = expand_binop (rmode, and_optab, temp,
5040 immed_wide_int_const (mask, rmode),
5041 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5042 }
5043 else
5044 {
5045 /* Perform a logical right shift to place the signbit in the least
5046 significant bit, then truncate the result to the desired mode
5047 and mask just this bit. */
5048 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5049 temp = gen_lowpart (rmode, temp);
5050 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5051 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5052 }
5053
5054 return temp;
5055 }
5056
5057 /* Expand fork or exec calls. TARGET is the desired target of the
5058 call. EXP is the call. FN is the
5059 identificator of the actual function. IGNORE is nonzero if the
5060 value is to be ignored. */
5061
5062 static rtx
5063 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5064 {
5065 tree id, decl;
5066 tree call;
5067
5068 /* If we are not profiling, just call the function. */
5069 if (!profile_arc_flag)
5070 return NULL_RTX;
5071
5072 /* Otherwise call the wrapper. This should be equivalent for the rest of
5073 compiler, so the code does not diverge, and the wrapper may run the
5074 code necessary for keeping the profiling sane. */
5075
5076 switch (DECL_FUNCTION_CODE (fn))
5077 {
5078 case BUILT_IN_FORK:
5079 id = get_identifier ("__gcov_fork");
5080 break;
5081
5082 case BUILT_IN_EXECL:
5083 id = get_identifier ("__gcov_execl");
5084 break;
5085
5086 case BUILT_IN_EXECV:
5087 id = get_identifier ("__gcov_execv");
5088 break;
5089
5090 case BUILT_IN_EXECLP:
5091 id = get_identifier ("__gcov_execlp");
5092 break;
5093
5094 case BUILT_IN_EXECLE:
5095 id = get_identifier ("__gcov_execle");
5096 break;
5097
5098 case BUILT_IN_EXECVP:
5099 id = get_identifier ("__gcov_execvp");
5100 break;
5101
5102 case BUILT_IN_EXECVE:
5103 id = get_identifier ("__gcov_execve");
5104 break;
5105
5106 default:
5107 gcc_unreachable ();
5108 }
5109
5110 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5111 FUNCTION_DECL, id, TREE_TYPE (fn));
5112 DECL_EXTERNAL (decl) = 1;
5113 TREE_PUBLIC (decl) = 1;
5114 DECL_ARTIFICIAL (decl) = 1;
5115 TREE_NOTHROW (decl) = 1;
5116 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5117 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5118 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5119 return expand_call (call, target, ignore);
5120 }
5121
5122
5123 \f
5124 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5125 the pointer in these functions is void*, the tree optimizers may remove
5126 casts. The mode computed in expand_builtin isn't reliable either, due
5127 to __sync_bool_compare_and_swap.
5128
5129 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5130 group of builtins. This gives us log2 of the mode size. */
5131
5132 static inline machine_mode
5133 get_builtin_sync_mode (int fcode_diff)
5134 {
5135 /* The size is not negotiable, so ask not to get BLKmode in return
5136 if the target indicates that a smaller size would be better. */
5137 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5138 }
5139
5140 /* Expand the memory expression LOC and return the appropriate memory operand
5141 for the builtin_sync operations. */
5142
5143 static rtx
5144 get_builtin_sync_mem (tree loc, machine_mode mode)
5145 {
5146 rtx addr, mem;
5147
5148 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5149 addr = convert_memory_address (Pmode, addr);
5150
5151 /* Note that we explicitly do not want any alias information for this
5152 memory, so that we kill all other live memories. Otherwise we don't
5153 satisfy the full barrier semantics of the intrinsic. */
5154 mem = validize_mem (gen_rtx_MEM (mode, addr));
5155
5156 /* The alignment needs to be at least according to that of the mode. */
5157 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5158 get_pointer_alignment (loc)));
5159 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5160 MEM_VOLATILE_P (mem) = 1;
5161
5162 return mem;
5163 }
5164
5165 /* Make sure an argument is in the right mode.
5166 EXP is the tree argument.
5167 MODE is the mode it should be in. */
5168
5169 static rtx
5170 expand_expr_force_mode (tree exp, machine_mode mode)
5171 {
5172 rtx val;
5173 machine_mode old_mode;
5174
5175 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5176 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5177 of CONST_INTs, where we know the old_mode only from the call argument. */
5178
5179 old_mode = GET_MODE (val);
5180 if (old_mode == VOIDmode)
5181 old_mode = TYPE_MODE (TREE_TYPE (exp));
5182 val = convert_modes (mode, old_mode, val, 1);
5183 return val;
5184 }
5185
5186
5187 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5188 EXP is the CALL_EXPR. CODE is the rtx code
5189 that corresponds to the arithmetic or logical operation from the name;
5190 an exception here is that NOT actually means NAND. TARGET is an optional
5191 place for us to store the results; AFTER is true if this is the
5192 fetch_and_xxx form. */
5193
5194 static rtx
5195 expand_builtin_sync_operation (machine_mode mode, tree exp,
5196 enum rtx_code code, bool after,
5197 rtx target)
5198 {
5199 rtx val, mem;
5200 location_t loc = EXPR_LOCATION (exp);
5201
5202 if (code == NOT && warn_sync_nand)
5203 {
5204 tree fndecl = get_callee_fndecl (exp);
5205 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5206
5207 static bool warned_f_a_n, warned_n_a_f;
5208
5209 switch (fcode)
5210 {
5211 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5212 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5213 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5214 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5215 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5216 if (warned_f_a_n)
5217 break;
5218
5219 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5220 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5221 warned_f_a_n = true;
5222 break;
5223
5224 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5225 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5226 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5227 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5228 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5229 if (warned_n_a_f)
5230 break;
5231
5232 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5233 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5234 warned_n_a_f = true;
5235 break;
5236
5237 default:
5238 gcc_unreachable ();
5239 }
5240 }
5241
5242 /* Expand the operands. */
5243 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5244 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5245
5246 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5247 after);
5248 }
5249
5250 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5251 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5252 true if this is the boolean form. TARGET is a place for us to store the
5253 results; this is NOT optional if IS_BOOL is true. */
5254
5255 static rtx
5256 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5257 bool is_bool, rtx target)
5258 {
5259 rtx old_val, new_val, mem;
5260 rtx *pbool, *poval;
5261
5262 /* Expand the operands. */
5263 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5264 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5265 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5266
5267 pbool = poval = NULL;
5268 if (target != const0_rtx)
5269 {
5270 if (is_bool)
5271 pbool = &target;
5272 else
5273 poval = &target;
5274 }
5275 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5276 false, MEMMODEL_SYNC_SEQ_CST,
5277 MEMMODEL_SYNC_SEQ_CST))
5278 return NULL_RTX;
5279
5280 return target;
5281 }
5282
5283 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5284 general form is actually an atomic exchange, and some targets only
5285 support a reduced form with the second argument being a constant 1.
5286 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5287 the results. */
5288
5289 static rtx
5290 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5291 rtx target)
5292 {
5293 rtx val, mem;
5294
5295 /* Expand the operands. */
5296 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5297 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5298
5299 return expand_sync_lock_test_and_set (target, mem, val);
5300 }
5301
5302 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5303
5304 static void
5305 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5306 {
5307 rtx mem;
5308
5309 /* Expand the operands. */
5310 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5311
5312 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5313 }
5314
5315 /* Given an integer representing an ``enum memmodel'', verify its
5316 correctness and return the memory model enum. */
5317
5318 static enum memmodel
5319 get_memmodel (tree exp)
5320 {
5321 rtx op;
5322 unsigned HOST_WIDE_INT val;
5323
5324 /* If the parameter is not a constant, it's a run time value so we'll just
5325 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5326 if (TREE_CODE (exp) != INTEGER_CST)
5327 return MEMMODEL_SEQ_CST;
5328
5329 op = expand_normal (exp);
5330
5331 val = INTVAL (op);
5332 if (targetm.memmodel_check)
5333 val = targetm.memmodel_check (val);
5334 else if (val & ~MEMMODEL_MASK)
5335 {
5336 warning (OPT_Winvalid_memory_model,
5337 "Unknown architecture specifier in memory model to builtin.");
5338 return MEMMODEL_SEQ_CST;
5339 }
5340
5341 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5342 if (memmodel_base (val) >= MEMMODEL_LAST)
5343 {
5344 warning (OPT_Winvalid_memory_model,
5345 "invalid memory model argument to builtin");
5346 return MEMMODEL_SEQ_CST;
5347 }
5348
5349 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5350 be conservative and promote consume to acquire. */
5351 if (val == MEMMODEL_CONSUME)
5352 val = MEMMODEL_ACQUIRE;
5353
5354 return (enum memmodel) val;
5355 }
5356
5357 /* Expand the __atomic_exchange intrinsic:
5358 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5359 EXP is the CALL_EXPR.
5360 TARGET is an optional place for us to store the results. */
5361
5362 static rtx
5363 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5364 {
5365 rtx val, mem;
5366 enum memmodel model;
5367
5368 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5369
5370 if (!flag_inline_atomics)
5371 return NULL_RTX;
5372
5373 /* Expand the operands. */
5374 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5375 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5376
5377 return expand_atomic_exchange (target, mem, val, model);
5378 }
5379
5380 /* Expand the __atomic_compare_exchange intrinsic:
5381 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5382 TYPE desired, BOOL weak,
5383 enum memmodel success,
5384 enum memmodel failure)
5385 EXP is the CALL_EXPR.
5386 TARGET is an optional place for us to store the results. */
5387
5388 static rtx
5389 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5390 rtx target)
5391 {
5392 rtx expect, desired, mem, oldval;
5393 rtx_code_label *label;
5394 enum memmodel success, failure;
5395 tree weak;
5396 bool is_weak;
5397
5398 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5399 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5400
5401 if (failure > success)
5402 {
5403 warning (OPT_Winvalid_memory_model,
5404 "failure memory model cannot be stronger than success memory "
5405 "model for %<__atomic_compare_exchange%>");
5406 success = MEMMODEL_SEQ_CST;
5407 }
5408
5409 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5410 {
5411 warning (OPT_Winvalid_memory_model,
5412 "invalid failure memory model for "
5413 "%<__atomic_compare_exchange%>");
5414 failure = MEMMODEL_SEQ_CST;
5415 success = MEMMODEL_SEQ_CST;
5416 }
5417
5418
5419 if (!flag_inline_atomics)
5420 return NULL_RTX;
5421
5422 /* Expand the operands. */
5423 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5424
5425 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5426 expect = convert_memory_address (Pmode, expect);
5427 expect = gen_rtx_MEM (mode, expect);
5428 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5429
5430 weak = CALL_EXPR_ARG (exp, 3);
5431 is_weak = false;
5432 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5433 is_weak = true;
5434
5435 if (target == const0_rtx)
5436 target = NULL;
5437
5438 /* Lest the rtl backend create a race condition with an imporoper store
5439 to memory, always create a new pseudo for OLDVAL. */
5440 oldval = NULL;
5441
5442 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5443 is_weak, success, failure))
5444 return NULL_RTX;
5445
5446 /* Conditionally store back to EXPECT, lest we create a race condition
5447 with an improper store to memory. */
5448 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5449 the normal case where EXPECT is totally private, i.e. a register. At
5450 which point the store can be unconditional. */
5451 label = gen_label_rtx ();
5452 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5453 GET_MODE (target), 1, label);
5454 emit_move_insn (expect, oldval);
5455 emit_label (label);
5456
5457 return target;
5458 }
5459
5460 /* Expand the __atomic_load intrinsic:
5461 TYPE __atomic_load (TYPE *object, enum memmodel)
5462 EXP is the CALL_EXPR.
5463 TARGET is an optional place for us to store the results. */
5464
5465 static rtx
5466 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5467 {
5468 rtx mem;
5469 enum memmodel model;
5470
5471 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5472 if (is_mm_release (model) || is_mm_acq_rel (model))
5473 {
5474 warning (OPT_Winvalid_memory_model,
5475 "invalid memory model for %<__atomic_load%>");
5476 model = MEMMODEL_SEQ_CST;
5477 }
5478
5479 if (!flag_inline_atomics)
5480 return NULL_RTX;
5481
5482 /* Expand the operand. */
5483 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5484
5485 return expand_atomic_load (target, mem, model);
5486 }
5487
5488
5489 /* Expand the __atomic_store intrinsic:
5490 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5491 EXP is the CALL_EXPR.
5492 TARGET is an optional place for us to store the results. */
5493
5494 static rtx
5495 expand_builtin_atomic_store (machine_mode mode, tree exp)
5496 {
5497 rtx mem, val;
5498 enum memmodel model;
5499
5500 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5501 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5502 || is_mm_release (model)))
5503 {
5504 warning (OPT_Winvalid_memory_model,
5505 "invalid memory model for %<__atomic_store%>");
5506 model = MEMMODEL_SEQ_CST;
5507 }
5508
5509 if (!flag_inline_atomics)
5510 return NULL_RTX;
5511
5512 /* Expand the operands. */
5513 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5514 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5515
5516 return expand_atomic_store (mem, val, model, false);
5517 }
5518
5519 /* Expand the __atomic_fetch_XXX intrinsic:
5520 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5521 EXP is the CALL_EXPR.
5522 TARGET is an optional place for us to store the results.
5523 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5524 FETCH_AFTER is true if returning the result of the operation.
5525 FETCH_AFTER is false if returning the value before the operation.
5526 IGNORE is true if the result is not used.
5527 EXT_CALL is the correct builtin for an external call if this cannot be
5528 resolved to an instruction sequence. */
5529
5530 static rtx
5531 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5532 enum rtx_code code, bool fetch_after,
5533 bool ignore, enum built_in_function ext_call)
5534 {
5535 rtx val, mem, ret;
5536 enum memmodel model;
5537 tree fndecl;
5538 tree addr;
5539
5540 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5541
5542 /* Expand the operands. */
5543 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5544 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5545
5546 /* Only try generating instructions if inlining is turned on. */
5547 if (flag_inline_atomics)
5548 {
5549 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5550 if (ret)
5551 return ret;
5552 }
5553
5554 /* Return if a different routine isn't needed for the library call. */
5555 if (ext_call == BUILT_IN_NONE)
5556 return NULL_RTX;
5557
5558 /* Change the call to the specified function. */
5559 fndecl = get_callee_fndecl (exp);
5560 addr = CALL_EXPR_FN (exp);
5561 STRIP_NOPS (addr);
5562
5563 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5564 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5565
5566 /* Expand the call here so we can emit trailing code. */
5567 ret = expand_call (exp, target, ignore);
5568
5569 /* Replace the original function just in case it matters. */
5570 TREE_OPERAND (addr, 0) = fndecl;
5571
5572 /* Then issue the arithmetic correction to return the right result. */
5573 if (!ignore)
5574 {
5575 if (code == NOT)
5576 {
5577 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5578 OPTAB_LIB_WIDEN);
5579 ret = expand_simple_unop (mode, NOT, ret, target, true);
5580 }
5581 else
5582 ret = expand_simple_binop (mode, code, ret, val, target, true,
5583 OPTAB_LIB_WIDEN);
5584 }
5585 return ret;
5586 }
5587
5588 /* Expand an atomic clear operation.
5589 void _atomic_clear (BOOL *obj, enum memmodel)
5590 EXP is the call expression. */
5591
5592 static rtx
5593 expand_builtin_atomic_clear (tree exp)
5594 {
5595 machine_mode mode;
5596 rtx mem, ret;
5597 enum memmodel model;
5598
5599 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5600 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5601 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5602
5603 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5604 {
5605 warning (OPT_Winvalid_memory_model,
5606 "invalid memory model for %<__atomic_store%>");
5607 model = MEMMODEL_SEQ_CST;
5608 }
5609
5610 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5611 Failing that, a store is issued by __atomic_store. The only way this can
5612 fail is if the bool type is larger than a word size. Unlikely, but
5613 handle it anyway for completeness. Assume a single threaded model since
5614 there is no atomic support in this case, and no barriers are required. */
5615 ret = expand_atomic_store (mem, const0_rtx, model, true);
5616 if (!ret)
5617 emit_move_insn (mem, const0_rtx);
5618 return const0_rtx;
5619 }
5620
5621 /* Expand an atomic test_and_set operation.
5622 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5623 EXP is the call expression. */
5624
5625 static rtx
5626 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5627 {
5628 rtx mem;
5629 enum memmodel model;
5630 machine_mode mode;
5631
5632 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5633 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5634 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5635
5636 return expand_atomic_test_and_set (target, mem, model);
5637 }
5638
5639
5640 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5641 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5642
5643 static tree
5644 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5645 {
5646 int size;
5647 machine_mode mode;
5648 unsigned int mode_align, type_align;
5649
5650 if (TREE_CODE (arg0) != INTEGER_CST)
5651 return NULL_TREE;
5652
5653 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5654 mode = mode_for_size (size, MODE_INT, 0);
5655 mode_align = GET_MODE_ALIGNMENT (mode);
5656
5657 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5658 type_align = mode_align;
5659 else
5660 {
5661 tree ttype = TREE_TYPE (arg1);
5662
5663 /* This function is usually invoked and folded immediately by the front
5664 end before anything else has a chance to look at it. The pointer
5665 parameter at this point is usually cast to a void *, so check for that
5666 and look past the cast. */
5667 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5668 && VOID_TYPE_P (TREE_TYPE (ttype)))
5669 arg1 = TREE_OPERAND (arg1, 0);
5670
5671 ttype = TREE_TYPE (arg1);
5672 gcc_assert (POINTER_TYPE_P (ttype));
5673
5674 /* Get the underlying type of the object. */
5675 ttype = TREE_TYPE (ttype);
5676 type_align = TYPE_ALIGN (ttype);
5677 }
5678
5679 /* If the object has smaller alignment, the lock free routines cannot
5680 be used. */
5681 if (type_align < mode_align)
5682 return boolean_false_node;
5683
5684 /* Check if a compare_and_swap pattern exists for the mode which represents
5685 the required size. The pattern is not allowed to fail, so the existence
5686 of the pattern indicates support is present. */
5687 if (can_compare_and_swap_p (mode, true))
5688 return boolean_true_node;
5689 else
5690 return boolean_false_node;
5691 }
5692
5693 /* Return true if the parameters to call EXP represent an object which will
5694 always generate lock free instructions. The first argument represents the
5695 size of the object, and the second parameter is a pointer to the object
5696 itself. If NULL is passed for the object, then the result is based on
5697 typical alignment for an object of the specified size. Otherwise return
5698 false. */
5699
5700 static rtx
5701 expand_builtin_atomic_always_lock_free (tree exp)
5702 {
5703 tree size;
5704 tree arg0 = CALL_EXPR_ARG (exp, 0);
5705 tree arg1 = CALL_EXPR_ARG (exp, 1);
5706
5707 if (TREE_CODE (arg0) != INTEGER_CST)
5708 {
5709 error ("non-constant argument 1 to __atomic_always_lock_free");
5710 return const0_rtx;
5711 }
5712
5713 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5714 if (size == boolean_true_node)
5715 return const1_rtx;
5716 return const0_rtx;
5717 }
5718
5719 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5720 is lock free on this architecture. */
5721
5722 static tree
5723 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5724 {
5725 if (!flag_inline_atomics)
5726 return NULL_TREE;
5727
5728 /* If it isn't always lock free, don't generate a result. */
5729 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5730 return boolean_true_node;
5731
5732 return NULL_TREE;
5733 }
5734
5735 /* Return true if the parameters to call EXP represent an object which will
5736 always generate lock free instructions. The first argument represents the
5737 size of the object, and the second parameter is a pointer to the object
5738 itself. If NULL is passed for the object, then the result is based on
5739 typical alignment for an object of the specified size. Otherwise return
5740 NULL*/
5741
5742 static rtx
5743 expand_builtin_atomic_is_lock_free (tree exp)
5744 {
5745 tree size;
5746 tree arg0 = CALL_EXPR_ARG (exp, 0);
5747 tree arg1 = CALL_EXPR_ARG (exp, 1);
5748
5749 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5750 {
5751 error ("non-integer argument 1 to __atomic_is_lock_free");
5752 return NULL_RTX;
5753 }
5754
5755 if (!flag_inline_atomics)
5756 return NULL_RTX;
5757
5758 /* If the value is known at compile time, return the RTX for it. */
5759 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5760 if (size == boolean_true_node)
5761 return const1_rtx;
5762
5763 return NULL_RTX;
5764 }
5765
5766 /* Expand the __atomic_thread_fence intrinsic:
5767 void __atomic_thread_fence (enum memmodel)
5768 EXP is the CALL_EXPR. */
5769
5770 static void
5771 expand_builtin_atomic_thread_fence (tree exp)
5772 {
5773 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5774 expand_mem_thread_fence (model);
5775 }
5776
5777 /* Expand the __atomic_signal_fence intrinsic:
5778 void __atomic_signal_fence (enum memmodel)
5779 EXP is the CALL_EXPR. */
5780
5781 static void
5782 expand_builtin_atomic_signal_fence (tree exp)
5783 {
5784 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5785 expand_mem_signal_fence (model);
5786 }
5787
5788 /* Expand the __sync_synchronize intrinsic. */
5789
5790 static void
5791 expand_builtin_sync_synchronize (void)
5792 {
5793 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5794 }
5795
5796 static rtx
5797 expand_builtin_thread_pointer (tree exp, rtx target)
5798 {
5799 enum insn_code icode;
5800 if (!validate_arglist (exp, VOID_TYPE))
5801 return const0_rtx;
5802 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5803 if (icode != CODE_FOR_nothing)
5804 {
5805 struct expand_operand op;
5806 /* If the target is not sutitable then create a new target. */
5807 if (target == NULL_RTX
5808 || !REG_P (target)
5809 || GET_MODE (target) != Pmode)
5810 target = gen_reg_rtx (Pmode);
5811 create_output_operand (&op, target, Pmode);
5812 expand_insn (icode, 1, &op);
5813 return target;
5814 }
5815 error ("__builtin_thread_pointer is not supported on this target");
5816 return const0_rtx;
5817 }
5818
5819 static void
5820 expand_builtin_set_thread_pointer (tree exp)
5821 {
5822 enum insn_code icode;
5823 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5824 return;
5825 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5826 if (icode != CODE_FOR_nothing)
5827 {
5828 struct expand_operand op;
5829 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5830 Pmode, EXPAND_NORMAL);
5831 create_input_operand (&op, val, Pmode);
5832 expand_insn (icode, 1, &op);
5833 return;
5834 }
5835 error ("__builtin_set_thread_pointer is not supported on this target");
5836 }
5837
5838 \f
5839 /* Emit code to restore the current value of stack. */
5840
5841 static void
5842 expand_stack_restore (tree var)
5843 {
5844 rtx_insn *prev;
5845 rtx sa = expand_normal (var);
5846
5847 sa = convert_memory_address (Pmode, sa);
5848
5849 prev = get_last_insn ();
5850 emit_stack_restore (SAVE_BLOCK, sa);
5851
5852 record_new_stack_level ();
5853
5854 fixup_args_size_notes (prev, get_last_insn (), 0);
5855 }
5856
5857 /* Emit code to save the current value of stack. */
5858
5859 static rtx
5860 expand_stack_save (void)
5861 {
5862 rtx ret = NULL_RTX;
5863
5864 emit_stack_save (SAVE_BLOCK, &ret);
5865 return ret;
5866 }
5867
5868
5869 /* Expand OpenACC acc_on_device.
5870
5871 This has to happen late (that is, not in early folding; expand_builtin_*,
5872 rather than fold_builtin_*), as we have to act differently for host and
5873 acceleration device (ACCEL_COMPILER conditional). */
5874
5875 static rtx
5876 expand_builtin_acc_on_device (tree exp, rtx target)
5877 {
5878 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5879 return NULL_RTX;
5880
5881 tree arg = CALL_EXPR_ARG (exp, 0);
5882
5883 /* Return (arg == v1 || arg == v2) ? 1 : 0. */
5884 machine_mode v_mode = TYPE_MODE (TREE_TYPE (arg));
5885 rtx v = expand_normal (arg), v1, v2;
5886 #ifdef ACCEL_COMPILER
5887 v1 = GEN_INT (GOMP_DEVICE_NOT_HOST);
5888 v2 = GEN_INT (ACCEL_COMPILER_acc_device);
5889 #else
5890 v1 = GEN_INT (GOMP_DEVICE_NONE);
5891 v2 = GEN_INT (GOMP_DEVICE_HOST);
5892 #endif
5893 machine_mode target_mode = TYPE_MODE (integer_type_node);
5894 if (!target || !register_operand (target, target_mode))
5895 target = gen_reg_rtx (target_mode);
5896 emit_move_insn (target, const1_rtx);
5897 rtx_code_label *done_label = gen_label_rtx ();
5898 do_compare_rtx_and_jump (v, v1, EQ, false, v_mode, NULL_RTX,
5899 NULL, done_label, PROB_EVEN);
5900 do_compare_rtx_and_jump (v, v2, EQ, false, v_mode, NULL_RTX,
5901 NULL, done_label, PROB_EVEN);
5902 emit_move_insn (target, const0_rtx);
5903 emit_label (done_label);
5904
5905 return target;
5906 }
5907
5908
5909 /* Expand an expression EXP that calls a built-in function,
5910 with result going to TARGET if that's convenient
5911 (and in mode MODE if that's convenient).
5912 SUBTARGET may be used as the target for computing one of EXP's operands.
5913 IGNORE is nonzero if the value is to be ignored. */
5914
5915 rtx
5916 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5917 int ignore)
5918 {
5919 tree fndecl = get_callee_fndecl (exp);
5920 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5921 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5922 int flags;
5923
5924 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5925 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5926
5927 /* When ASan is enabled, we don't want to expand some memory/string
5928 builtins and rely on libsanitizer's hooks. This allows us to avoid
5929 redundant checks and be sure, that possible overflow will be detected
5930 by ASan. */
5931
5932 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5933 return expand_call (exp, target, ignore);
5934
5935 /* When not optimizing, generate calls to library functions for a certain
5936 set of builtins. */
5937 if (!optimize
5938 && !called_as_built_in (fndecl)
5939 && fcode != BUILT_IN_FORK
5940 && fcode != BUILT_IN_EXECL
5941 && fcode != BUILT_IN_EXECV
5942 && fcode != BUILT_IN_EXECLP
5943 && fcode != BUILT_IN_EXECLE
5944 && fcode != BUILT_IN_EXECVP
5945 && fcode != BUILT_IN_EXECVE
5946 && fcode != BUILT_IN_ALLOCA
5947 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5948 && fcode != BUILT_IN_FREE
5949 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5950 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5951 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5952 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5953 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5954 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5955 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5956 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5957 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5958 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5959 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5960 && fcode != BUILT_IN_CHKP_BNDRET)
5961 return expand_call (exp, target, ignore);
5962
5963 /* The built-in function expanders test for target == const0_rtx
5964 to determine whether the function's result will be ignored. */
5965 if (ignore)
5966 target = const0_rtx;
5967
5968 /* If the result of a pure or const built-in function is ignored, and
5969 none of its arguments are volatile, we can avoid expanding the
5970 built-in call and just evaluate the arguments for side-effects. */
5971 if (target == const0_rtx
5972 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5973 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5974 {
5975 bool volatilep = false;
5976 tree arg;
5977 call_expr_arg_iterator iter;
5978
5979 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5980 if (TREE_THIS_VOLATILE (arg))
5981 {
5982 volatilep = true;
5983 break;
5984 }
5985
5986 if (! volatilep)
5987 {
5988 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5989 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5990 return const0_rtx;
5991 }
5992 }
5993
5994 /* expand_builtin_with_bounds is supposed to be used for
5995 instrumented builtin calls. */
5996 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5997
5998 switch (fcode)
5999 {
6000 CASE_FLT_FN (BUILT_IN_FABS):
6001 case BUILT_IN_FABSD32:
6002 case BUILT_IN_FABSD64:
6003 case BUILT_IN_FABSD128:
6004 target = expand_builtin_fabs (exp, target, subtarget);
6005 if (target)
6006 return target;
6007 break;
6008
6009 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6010 target = expand_builtin_copysign (exp, target, subtarget);
6011 if (target)
6012 return target;
6013 break;
6014
6015 /* Just do a normal library call if we were unable to fold
6016 the values. */
6017 CASE_FLT_FN (BUILT_IN_CABS):
6018 break;
6019
6020 CASE_FLT_FN (BUILT_IN_EXP):
6021 CASE_FLT_FN (BUILT_IN_EXP10):
6022 CASE_FLT_FN (BUILT_IN_POW10):
6023 CASE_FLT_FN (BUILT_IN_EXP2):
6024 CASE_FLT_FN (BUILT_IN_EXPM1):
6025 CASE_FLT_FN (BUILT_IN_LOGB):
6026 CASE_FLT_FN (BUILT_IN_LOG):
6027 CASE_FLT_FN (BUILT_IN_LOG10):
6028 CASE_FLT_FN (BUILT_IN_LOG2):
6029 CASE_FLT_FN (BUILT_IN_LOG1P):
6030 CASE_FLT_FN (BUILT_IN_TAN):
6031 CASE_FLT_FN (BUILT_IN_ASIN):
6032 CASE_FLT_FN (BUILT_IN_ACOS):
6033 CASE_FLT_FN (BUILT_IN_ATAN):
6034 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
6035 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6036 because of possible accuracy problems. */
6037 if (! flag_unsafe_math_optimizations)
6038 break;
6039 CASE_FLT_FN (BUILT_IN_SQRT):
6040 CASE_FLT_FN (BUILT_IN_FLOOR):
6041 CASE_FLT_FN (BUILT_IN_CEIL):
6042 CASE_FLT_FN (BUILT_IN_TRUNC):
6043 CASE_FLT_FN (BUILT_IN_ROUND):
6044 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6045 CASE_FLT_FN (BUILT_IN_RINT):
6046 target = expand_builtin_mathfn (exp, target, subtarget);
6047 if (target)
6048 return target;
6049 break;
6050
6051 CASE_FLT_FN (BUILT_IN_FMA):
6052 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6053 if (target)
6054 return target;
6055 break;
6056
6057 CASE_FLT_FN (BUILT_IN_ILOGB):
6058 if (! flag_unsafe_math_optimizations)
6059 break;
6060 CASE_FLT_FN (BUILT_IN_ISINF):
6061 CASE_FLT_FN (BUILT_IN_FINITE):
6062 case BUILT_IN_ISFINITE:
6063 case BUILT_IN_ISNORMAL:
6064 target = expand_builtin_interclass_mathfn (exp, target);
6065 if (target)
6066 return target;
6067 break;
6068
6069 CASE_FLT_FN (BUILT_IN_ICEIL):
6070 CASE_FLT_FN (BUILT_IN_LCEIL):
6071 CASE_FLT_FN (BUILT_IN_LLCEIL):
6072 CASE_FLT_FN (BUILT_IN_LFLOOR):
6073 CASE_FLT_FN (BUILT_IN_IFLOOR):
6074 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6075 target = expand_builtin_int_roundingfn (exp, target);
6076 if (target)
6077 return target;
6078 break;
6079
6080 CASE_FLT_FN (BUILT_IN_IRINT):
6081 CASE_FLT_FN (BUILT_IN_LRINT):
6082 CASE_FLT_FN (BUILT_IN_LLRINT):
6083 CASE_FLT_FN (BUILT_IN_IROUND):
6084 CASE_FLT_FN (BUILT_IN_LROUND):
6085 CASE_FLT_FN (BUILT_IN_LLROUND):
6086 target = expand_builtin_int_roundingfn_2 (exp, target);
6087 if (target)
6088 return target;
6089 break;
6090
6091 CASE_FLT_FN (BUILT_IN_POWI):
6092 target = expand_builtin_powi (exp, target);
6093 if (target)
6094 return target;
6095 break;
6096
6097 CASE_FLT_FN (BUILT_IN_ATAN2):
6098 CASE_FLT_FN (BUILT_IN_LDEXP):
6099 CASE_FLT_FN (BUILT_IN_SCALB):
6100 CASE_FLT_FN (BUILT_IN_SCALBN):
6101 CASE_FLT_FN (BUILT_IN_SCALBLN):
6102 if (! flag_unsafe_math_optimizations)
6103 break;
6104
6105 CASE_FLT_FN (BUILT_IN_FMOD):
6106 CASE_FLT_FN (BUILT_IN_REMAINDER):
6107 CASE_FLT_FN (BUILT_IN_DREM):
6108 CASE_FLT_FN (BUILT_IN_POW):
6109 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6110 if (target)
6111 return target;
6112 break;
6113
6114 CASE_FLT_FN (BUILT_IN_CEXPI):
6115 target = expand_builtin_cexpi (exp, target);
6116 gcc_assert (target);
6117 return target;
6118
6119 CASE_FLT_FN (BUILT_IN_SIN):
6120 CASE_FLT_FN (BUILT_IN_COS):
6121 if (! flag_unsafe_math_optimizations)
6122 break;
6123 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6124 if (target)
6125 return target;
6126 break;
6127
6128 CASE_FLT_FN (BUILT_IN_SINCOS):
6129 if (! flag_unsafe_math_optimizations)
6130 break;
6131 target = expand_builtin_sincos (exp);
6132 if (target)
6133 return target;
6134 break;
6135
6136 case BUILT_IN_APPLY_ARGS:
6137 return expand_builtin_apply_args ();
6138
6139 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6140 FUNCTION with a copy of the parameters described by
6141 ARGUMENTS, and ARGSIZE. It returns a block of memory
6142 allocated on the stack into which is stored all the registers
6143 that might possibly be used for returning the result of a
6144 function. ARGUMENTS is the value returned by
6145 __builtin_apply_args. ARGSIZE is the number of bytes of
6146 arguments that must be copied. ??? How should this value be
6147 computed? We'll also need a safe worst case value for varargs
6148 functions. */
6149 case BUILT_IN_APPLY:
6150 if (!validate_arglist (exp, POINTER_TYPE,
6151 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6152 && !validate_arglist (exp, REFERENCE_TYPE,
6153 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6154 return const0_rtx;
6155 else
6156 {
6157 rtx ops[3];
6158
6159 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6160 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6161 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6162
6163 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6164 }
6165
6166 /* __builtin_return (RESULT) causes the function to return the
6167 value described by RESULT. RESULT is address of the block of
6168 memory returned by __builtin_apply. */
6169 case BUILT_IN_RETURN:
6170 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6171 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6172 return const0_rtx;
6173
6174 case BUILT_IN_SAVEREGS:
6175 return expand_builtin_saveregs ();
6176
6177 case BUILT_IN_VA_ARG_PACK:
6178 /* All valid uses of __builtin_va_arg_pack () are removed during
6179 inlining. */
6180 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6181 return const0_rtx;
6182
6183 case BUILT_IN_VA_ARG_PACK_LEN:
6184 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6185 inlining. */
6186 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6187 return const0_rtx;
6188
6189 /* Return the address of the first anonymous stack arg. */
6190 case BUILT_IN_NEXT_ARG:
6191 if (fold_builtin_next_arg (exp, false))
6192 return const0_rtx;
6193 return expand_builtin_next_arg ();
6194
6195 case BUILT_IN_CLEAR_CACHE:
6196 target = expand_builtin___clear_cache (exp);
6197 if (target)
6198 return target;
6199 break;
6200
6201 case BUILT_IN_CLASSIFY_TYPE:
6202 return expand_builtin_classify_type (exp);
6203
6204 case BUILT_IN_CONSTANT_P:
6205 return const0_rtx;
6206
6207 case BUILT_IN_FRAME_ADDRESS:
6208 case BUILT_IN_RETURN_ADDRESS:
6209 return expand_builtin_frame_address (fndecl, exp);
6210
6211 /* Returns the address of the area where the structure is returned.
6212 0 otherwise. */
6213 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6214 if (call_expr_nargs (exp) != 0
6215 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6216 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6217 return const0_rtx;
6218 else
6219 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6220
6221 case BUILT_IN_ALLOCA:
6222 case BUILT_IN_ALLOCA_WITH_ALIGN:
6223 /* If the allocation stems from the declaration of a variable-sized
6224 object, it cannot accumulate. */
6225 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6226 if (target)
6227 return target;
6228 break;
6229
6230 case BUILT_IN_STACK_SAVE:
6231 return expand_stack_save ();
6232
6233 case BUILT_IN_STACK_RESTORE:
6234 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6235 return const0_rtx;
6236
6237 case BUILT_IN_BSWAP16:
6238 case BUILT_IN_BSWAP32:
6239 case BUILT_IN_BSWAP64:
6240 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6241 if (target)
6242 return target;
6243 break;
6244
6245 CASE_INT_FN (BUILT_IN_FFS):
6246 target = expand_builtin_unop (target_mode, exp, target,
6247 subtarget, ffs_optab);
6248 if (target)
6249 return target;
6250 break;
6251
6252 CASE_INT_FN (BUILT_IN_CLZ):
6253 target = expand_builtin_unop (target_mode, exp, target,
6254 subtarget, clz_optab);
6255 if (target)
6256 return target;
6257 break;
6258
6259 CASE_INT_FN (BUILT_IN_CTZ):
6260 target = expand_builtin_unop (target_mode, exp, target,
6261 subtarget, ctz_optab);
6262 if (target)
6263 return target;
6264 break;
6265
6266 CASE_INT_FN (BUILT_IN_CLRSB):
6267 target = expand_builtin_unop (target_mode, exp, target,
6268 subtarget, clrsb_optab);
6269 if (target)
6270 return target;
6271 break;
6272
6273 CASE_INT_FN (BUILT_IN_POPCOUNT):
6274 target = expand_builtin_unop (target_mode, exp, target,
6275 subtarget, popcount_optab);
6276 if (target)
6277 return target;
6278 break;
6279
6280 CASE_INT_FN (BUILT_IN_PARITY):
6281 target = expand_builtin_unop (target_mode, exp, target,
6282 subtarget, parity_optab);
6283 if (target)
6284 return target;
6285 break;
6286
6287 case BUILT_IN_STRLEN:
6288 target = expand_builtin_strlen (exp, target, target_mode);
6289 if (target)
6290 return target;
6291 break;
6292
6293 case BUILT_IN_STRCPY:
6294 target = expand_builtin_strcpy (exp, target);
6295 if (target)
6296 return target;
6297 break;
6298
6299 case BUILT_IN_STRNCPY:
6300 target = expand_builtin_strncpy (exp, target);
6301 if (target)
6302 return target;
6303 break;
6304
6305 case BUILT_IN_STPCPY:
6306 target = expand_builtin_stpcpy (exp, target, mode);
6307 if (target)
6308 return target;
6309 break;
6310
6311 case BUILT_IN_MEMCPY:
6312 target = expand_builtin_memcpy (exp, target);
6313 if (target)
6314 return target;
6315 break;
6316
6317 case BUILT_IN_MEMPCPY:
6318 target = expand_builtin_mempcpy (exp, target, mode);
6319 if (target)
6320 return target;
6321 break;
6322
6323 case BUILT_IN_MEMSET:
6324 target = expand_builtin_memset (exp, target, mode);
6325 if (target)
6326 return target;
6327 break;
6328
6329 case BUILT_IN_BZERO:
6330 target = expand_builtin_bzero (exp);
6331 if (target)
6332 return target;
6333 break;
6334
6335 case BUILT_IN_STRCMP:
6336 target = expand_builtin_strcmp (exp, target);
6337 if (target)
6338 return target;
6339 break;
6340
6341 case BUILT_IN_STRNCMP:
6342 target = expand_builtin_strncmp (exp, target, mode);
6343 if (target)
6344 return target;
6345 break;
6346
6347 case BUILT_IN_BCMP:
6348 case BUILT_IN_MEMCMP:
6349 target = expand_builtin_memcmp (exp, target, mode);
6350 if (target)
6351 return target;
6352 break;
6353
6354 case BUILT_IN_SETJMP:
6355 /* This should have been lowered to the builtins below. */
6356 gcc_unreachable ();
6357
6358 case BUILT_IN_SETJMP_SETUP:
6359 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6360 and the receiver label. */
6361 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6362 {
6363 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6364 VOIDmode, EXPAND_NORMAL);
6365 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6366 rtx_insn *label_r = label_rtx (label);
6367
6368 /* This is copied from the handling of non-local gotos. */
6369 expand_builtin_setjmp_setup (buf_addr, label_r);
6370 nonlocal_goto_handler_labels
6371 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6372 nonlocal_goto_handler_labels);
6373 /* ??? Do not let expand_label treat us as such since we would
6374 not want to be both on the list of non-local labels and on
6375 the list of forced labels. */
6376 FORCED_LABEL (label) = 0;
6377 return const0_rtx;
6378 }
6379 break;
6380
6381 case BUILT_IN_SETJMP_RECEIVER:
6382 /* __builtin_setjmp_receiver is passed the receiver label. */
6383 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6384 {
6385 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6386 rtx_insn *label_r = label_rtx (label);
6387
6388 expand_builtin_setjmp_receiver (label_r);
6389 return const0_rtx;
6390 }
6391 break;
6392
6393 /* __builtin_longjmp is passed a pointer to an array of five words.
6394 It's similar to the C library longjmp function but works with
6395 __builtin_setjmp above. */
6396 case BUILT_IN_LONGJMP:
6397 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6398 {
6399 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6400 VOIDmode, EXPAND_NORMAL);
6401 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6402
6403 if (value != const1_rtx)
6404 {
6405 error ("%<__builtin_longjmp%> second argument must be 1");
6406 return const0_rtx;
6407 }
6408
6409 expand_builtin_longjmp (buf_addr, value);
6410 return const0_rtx;
6411 }
6412 break;
6413
6414 case BUILT_IN_NONLOCAL_GOTO:
6415 target = expand_builtin_nonlocal_goto (exp);
6416 if (target)
6417 return target;
6418 break;
6419
6420 /* This updates the setjmp buffer that is its argument with the value
6421 of the current stack pointer. */
6422 case BUILT_IN_UPDATE_SETJMP_BUF:
6423 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6424 {
6425 rtx buf_addr
6426 = expand_normal (CALL_EXPR_ARG (exp, 0));
6427
6428 expand_builtin_update_setjmp_buf (buf_addr);
6429 return const0_rtx;
6430 }
6431 break;
6432
6433 case BUILT_IN_TRAP:
6434 expand_builtin_trap ();
6435 return const0_rtx;
6436
6437 case BUILT_IN_UNREACHABLE:
6438 expand_builtin_unreachable ();
6439 return const0_rtx;
6440
6441 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6442 case BUILT_IN_SIGNBITD32:
6443 case BUILT_IN_SIGNBITD64:
6444 case BUILT_IN_SIGNBITD128:
6445 target = expand_builtin_signbit (exp, target);
6446 if (target)
6447 return target;
6448 break;
6449
6450 /* Various hooks for the DWARF 2 __throw routine. */
6451 case BUILT_IN_UNWIND_INIT:
6452 expand_builtin_unwind_init ();
6453 return const0_rtx;
6454 case BUILT_IN_DWARF_CFA:
6455 return virtual_cfa_rtx;
6456 #ifdef DWARF2_UNWIND_INFO
6457 case BUILT_IN_DWARF_SP_COLUMN:
6458 return expand_builtin_dwarf_sp_column ();
6459 case BUILT_IN_INIT_DWARF_REG_SIZES:
6460 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6461 return const0_rtx;
6462 #endif
6463 case BUILT_IN_FROB_RETURN_ADDR:
6464 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6465 case BUILT_IN_EXTRACT_RETURN_ADDR:
6466 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6467 case BUILT_IN_EH_RETURN:
6468 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6469 CALL_EXPR_ARG (exp, 1));
6470 return const0_rtx;
6471 case BUILT_IN_EH_RETURN_DATA_REGNO:
6472 return expand_builtin_eh_return_data_regno (exp);
6473 case BUILT_IN_EXTEND_POINTER:
6474 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6475 case BUILT_IN_EH_POINTER:
6476 return expand_builtin_eh_pointer (exp);
6477 case BUILT_IN_EH_FILTER:
6478 return expand_builtin_eh_filter (exp);
6479 case BUILT_IN_EH_COPY_VALUES:
6480 return expand_builtin_eh_copy_values (exp);
6481
6482 case BUILT_IN_VA_START:
6483 return expand_builtin_va_start (exp);
6484 case BUILT_IN_VA_END:
6485 return expand_builtin_va_end (exp);
6486 case BUILT_IN_VA_COPY:
6487 return expand_builtin_va_copy (exp);
6488 case BUILT_IN_EXPECT:
6489 return expand_builtin_expect (exp, target);
6490 case BUILT_IN_ASSUME_ALIGNED:
6491 return expand_builtin_assume_aligned (exp, target);
6492 case BUILT_IN_PREFETCH:
6493 expand_builtin_prefetch (exp);
6494 return const0_rtx;
6495
6496 case BUILT_IN_INIT_TRAMPOLINE:
6497 return expand_builtin_init_trampoline (exp, true);
6498 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6499 return expand_builtin_init_trampoline (exp, false);
6500 case BUILT_IN_ADJUST_TRAMPOLINE:
6501 return expand_builtin_adjust_trampoline (exp);
6502
6503 case BUILT_IN_FORK:
6504 case BUILT_IN_EXECL:
6505 case BUILT_IN_EXECV:
6506 case BUILT_IN_EXECLP:
6507 case BUILT_IN_EXECLE:
6508 case BUILT_IN_EXECVP:
6509 case BUILT_IN_EXECVE:
6510 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6511 if (target)
6512 return target;
6513 break;
6514
6515 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6516 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6517 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6518 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6519 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6520 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6521 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6522 if (target)
6523 return target;
6524 break;
6525
6526 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6527 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6528 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6529 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6530 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6531 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6532 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6533 if (target)
6534 return target;
6535 break;
6536
6537 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6538 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6539 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6540 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6541 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6542 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6543 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6544 if (target)
6545 return target;
6546 break;
6547
6548 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6549 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6550 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6551 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6552 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6553 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6554 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6555 if (target)
6556 return target;
6557 break;
6558
6559 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6560 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6561 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6562 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6563 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6564 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6565 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6566 if (target)
6567 return target;
6568 break;
6569
6570 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6571 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6572 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6573 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6574 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6575 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6576 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6577 if (target)
6578 return target;
6579 break;
6580
6581 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6582 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6583 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6584 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6585 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6586 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6587 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6588 if (target)
6589 return target;
6590 break;
6591
6592 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6593 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6594 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6595 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6596 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6597 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6598 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6599 if (target)
6600 return target;
6601 break;
6602
6603 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6604 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6605 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6606 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6607 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6608 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6609 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6610 if (target)
6611 return target;
6612 break;
6613
6614 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6615 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6616 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6617 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6618 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6619 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6620 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6621 if (target)
6622 return target;
6623 break;
6624
6625 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6626 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6627 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6628 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6629 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6630 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6631 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6632 if (target)
6633 return target;
6634 break;
6635
6636 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6637 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6638 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6639 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6640 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6641 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6642 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6643 if (target)
6644 return target;
6645 break;
6646
6647 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6648 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6649 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6650 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6651 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6652 if (mode == VOIDmode)
6653 mode = TYPE_MODE (boolean_type_node);
6654 if (!target || !register_operand (target, mode))
6655 target = gen_reg_rtx (mode);
6656
6657 mode = get_builtin_sync_mode
6658 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6659 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6660 if (target)
6661 return target;
6662 break;
6663
6664 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6665 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6666 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6667 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6668 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6669 mode = get_builtin_sync_mode
6670 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6671 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6672 if (target)
6673 return target;
6674 break;
6675
6676 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6677 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6678 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6679 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6680 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6681 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6682 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6683 if (target)
6684 return target;
6685 break;
6686
6687 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6688 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6689 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6690 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6691 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6692 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6693 expand_builtin_sync_lock_release (mode, exp);
6694 return const0_rtx;
6695
6696 case BUILT_IN_SYNC_SYNCHRONIZE:
6697 expand_builtin_sync_synchronize ();
6698 return const0_rtx;
6699
6700 case BUILT_IN_ATOMIC_EXCHANGE_1:
6701 case BUILT_IN_ATOMIC_EXCHANGE_2:
6702 case BUILT_IN_ATOMIC_EXCHANGE_4:
6703 case BUILT_IN_ATOMIC_EXCHANGE_8:
6704 case BUILT_IN_ATOMIC_EXCHANGE_16:
6705 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6706 target = expand_builtin_atomic_exchange (mode, exp, target);
6707 if (target)
6708 return target;
6709 break;
6710
6711 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6712 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6713 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6714 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6715 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6716 {
6717 unsigned int nargs, z;
6718 vec<tree, va_gc> *vec;
6719
6720 mode =
6721 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6722 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6723 if (target)
6724 return target;
6725
6726 /* If this is turned into an external library call, the weak parameter
6727 must be dropped to match the expected parameter list. */
6728 nargs = call_expr_nargs (exp);
6729 vec_alloc (vec, nargs - 1);
6730 for (z = 0; z < 3; z++)
6731 vec->quick_push (CALL_EXPR_ARG (exp, z));
6732 /* Skip the boolean weak parameter. */
6733 for (z = 4; z < 6; z++)
6734 vec->quick_push (CALL_EXPR_ARG (exp, z));
6735 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6736 break;
6737 }
6738
6739 case BUILT_IN_ATOMIC_LOAD_1:
6740 case BUILT_IN_ATOMIC_LOAD_2:
6741 case BUILT_IN_ATOMIC_LOAD_4:
6742 case BUILT_IN_ATOMIC_LOAD_8:
6743 case BUILT_IN_ATOMIC_LOAD_16:
6744 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6745 target = expand_builtin_atomic_load (mode, exp, target);
6746 if (target)
6747 return target;
6748 break;
6749
6750 case BUILT_IN_ATOMIC_STORE_1:
6751 case BUILT_IN_ATOMIC_STORE_2:
6752 case BUILT_IN_ATOMIC_STORE_4:
6753 case BUILT_IN_ATOMIC_STORE_8:
6754 case BUILT_IN_ATOMIC_STORE_16:
6755 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6756 target = expand_builtin_atomic_store (mode, exp);
6757 if (target)
6758 return const0_rtx;
6759 break;
6760
6761 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6762 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6763 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6764 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6765 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6766 {
6767 enum built_in_function lib;
6768 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6769 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6770 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6771 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6772 ignore, lib);
6773 if (target)
6774 return target;
6775 break;
6776 }
6777 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6778 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6779 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6780 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6781 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6782 {
6783 enum built_in_function lib;
6784 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6785 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6786 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6787 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6788 ignore, lib);
6789 if (target)
6790 return target;
6791 break;
6792 }
6793 case BUILT_IN_ATOMIC_AND_FETCH_1:
6794 case BUILT_IN_ATOMIC_AND_FETCH_2:
6795 case BUILT_IN_ATOMIC_AND_FETCH_4:
6796 case BUILT_IN_ATOMIC_AND_FETCH_8:
6797 case BUILT_IN_ATOMIC_AND_FETCH_16:
6798 {
6799 enum built_in_function lib;
6800 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6801 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6802 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6803 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6804 ignore, lib);
6805 if (target)
6806 return target;
6807 break;
6808 }
6809 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6810 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6811 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6812 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6813 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6814 {
6815 enum built_in_function lib;
6816 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6817 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6818 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6819 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6820 ignore, lib);
6821 if (target)
6822 return target;
6823 break;
6824 }
6825 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6826 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6827 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6828 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6829 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6830 {
6831 enum built_in_function lib;
6832 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6833 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6834 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6835 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6836 ignore, lib);
6837 if (target)
6838 return target;
6839 break;
6840 }
6841 case BUILT_IN_ATOMIC_OR_FETCH_1:
6842 case BUILT_IN_ATOMIC_OR_FETCH_2:
6843 case BUILT_IN_ATOMIC_OR_FETCH_4:
6844 case BUILT_IN_ATOMIC_OR_FETCH_8:
6845 case BUILT_IN_ATOMIC_OR_FETCH_16:
6846 {
6847 enum built_in_function lib;
6848 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6849 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6850 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6851 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6852 ignore, lib);
6853 if (target)
6854 return target;
6855 break;
6856 }
6857 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6858 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6859 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6860 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6861 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6862 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6863 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6864 ignore, BUILT_IN_NONE);
6865 if (target)
6866 return target;
6867 break;
6868
6869 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6870 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6871 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6872 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6873 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6874 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6875 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6876 ignore, BUILT_IN_NONE);
6877 if (target)
6878 return target;
6879 break;
6880
6881 case BUILT_IN_ATOMIC_FETCH_AND_1:
6882 case BUILT_IN_ATOMIC_FETCH_AND_2:
6883 case BUILT_IN_ATOMIC_FETCH_AND_4:
6884 case BUILT_IN_ATOMIC_FETCH_AND_8:
6885 case BUILT_IN_ATOMIC_FETCH_AND_16:
6886 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6887 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6888 ignore, BUILT_IN_NONE);
6889 if (target)
6890 return target;
6891 break;
6892
6893 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6894 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6895 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6896 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6897 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6898 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6899 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6900 ignore, BUILT_IN_NONE);
6901 if (target)
6902 return target;
6903 break;
6904
6905 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6906 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6907 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6908 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6909 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6910 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6911 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6912 ignore, BUILT_IN_NONE);
6913 if (target)
6914 return target;
6915 break;
6916
6917 case BUILT_IN_ATOMIC_FETCH_OR_1:
6918 case BUILT_IN_ATOMIC_FETCH_OR_2:
6919 case BUILT_IN_ATOMIC_FETCH_OR_4:
6920 case BUILT_IN_ATOMIC_FETCH_OR_8:
6921 case BUILT_IN_ATOMIC_FETCH_OR_16:
6922 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6923 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6924 ignore, BUILT_IN_NONE);
6925 if (target)
6926 return target;
6927 break;
6928
6929 case BUILT_IN_ATOMIC_TEST_AND_SET:
6930 return expand_builtin_atomic_test_and_set (exp, target);
6931
6932 case BUILT_IN_ATOMIC_CLEAR:
6933 return expand_builtin_atomic_clear (exp);
6934
6935 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6936 return expand_builtin_atomic_always_lock_free (exp);
6937
6938 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6939 target = expand_builtin_atomic_is_lock_free (exp);
6940 if (target)
6941 return target;
6942 break;
6943
6944 case BUILT_IN_ATOMIC_THREAD_FENCE:
6945 expand_builtin_atomic_thread_fence (exp);
6946 return const0_rtx;
6947
6948 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6949 expand_builtin_atomic_signal_fence (exp);
6950 return const0_rtx;
6951
6952 case BUILT_IN_OBJECT_SIZE:
6953 return expand_builtin_object_size (exp);
6954
6955 case BUILT_IN_MEMCPY_CHK:
6956 case BUILT_IN_MEMPCPY_CHK:
6957 case BUILT_IN_MEMMOVE_CHK:
6958 case BUILT_IN_MEMSET_CHK:
6959 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6960 if (target)
6961 return target;
6962 break;
6963
6964 case BUILT_IN_STRCPY_CHK:
6965 case BUILT_IN_STPCPY_CHK:
6966 case BUILT_IN_STRNCPY_CHK:
6967 case BUILT_IN_STPNCPY_CHK:
6968 case BUILT_IN_STRCAT_CHK:
6969 case BUILT_IN_STRNCAT_CHK:
6970 case BUILT_IN_SNPRINTF_CHK:
6971 case BUILT_IN_VSNPRINTF_CHK:
6972 maybe_emit_chk_warning (exp, fcode);
6973 break;
6974
6975 case BUILT_IN_SPRINTF_CHK:
6976 case BUILT_IN_VSPRINTF_CHK:
6977 maybe_emit_sprintf_chk_warning (exp, fcode);
6978 break;
6979
6980 case BUILT_IN_FREE:
6981 if (warn_free_nonheap_object)
6982 maybe_emit_free_warning (exp);
6983 break;
6984
6985 case BUILT_IN_THREAD_POINTER:
6986 return expand_builtin_thread_pointer (exp, target);
6987
6988 case BUILT_IN_SET_THREAD_POINTER:
6989 expand_builtin_set_thread_pointer (exp);
6990 return const0_rtx;
6991
6992 case BUILT_IN_CILK_DETACH:
6993 expand_builtin_cilk_detach (exp);
6994 return const0_rtx;
6995
6996 case BUILT_IN_CILK_POP_FRAME:
6997 expand_builtin_cilk_pop_frame (exp);
6998 return const0_rtx;
6999
7000 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7001 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7002 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7003 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7004 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7005 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7006 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7007 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7008 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7009 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7010 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7011 /* We allow user CHKP builtins if Pointer Bounds
7012 Checker is off. */
7013 if (!chkp_function_instrumented_p (current_function_decl))
7014 {
7015 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7016 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7017 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7018 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7019 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7020 return expand_normal (CALL_EXPR_ARG (exp, 0));
7021 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7022 return expand_normal (size_zero_node);
7023 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7024 return expand_normal (size_int (-1));
7025 else
7026 return const0_rtx;
7027 }
7028 /* FALLTHROUGH */
7029
7030 case BUILT_IN_CHKP_BNDMK:
7031 case BUILT_IN_CHKP_BNDSTX:
7032 case BUILT_IN_CHKP_BNDCL:
7033 case BUILT_IN_CHKP_BNDCU:
7034 case BUILT_IN_CHKP_BNDLDX:
7035 case BUILT_IN_CHKP_BNDRET:
7036 case BUILT_IN_CHKP_INTERSECT:
7037 case BUILT_IN_CHKP_NARROW:
7038 case BUILT_IN_CHKP_EXTRACT_LOWER:
7039 case BUILT_IN_CHKP_EXTRACT_UPPER:
7040 /* Software implementation of Pointer Bounds Checker is NYI.
7041 Target support is required. */
7042 error ("Your target platform does not support -fcheck-pointer-bounds");
7043 break;
7044
7045 case BUILT_IN_ACC_ON_DEVICE:
7046 target = expand_builtin_acc_on_device (exp, target);
7047 if (target)
7048 return target;
7049 break;
7050
7051 default: /* just do library call, if unknown builtin */
7052 break;
7053 }
7054
7055 /* The switch statement above can drop through to cause the function
7056 to be called normally. */
7057 return expand_call (exp, target, ignore);
7058 }
7059
7060 /* Similar to expand_builtin but is used for instrumented calls. */
7061
7062 rtx
7063 expand_builtin_with_bounds (tree exp, rtx target,
7064 rtx subtarget ATTRIBUTE_UNUSED,
7065 machine_mode mode, int ignore)
7066 {
7067 tree fndecl = get_callee_fndecl (exp);
7068 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7069
7070 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7071
7072 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7073 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7074
7075 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7076 && fcode < END_CHKP_BUILTINS);
7077
7078 switch (fcode)
7079 {
7080 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7081 target = expand_builtin_memcpy_with_bounds (exp, target);
7082 if (target)
7083 return target;
7084 break;
7085
7086 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7087 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7088 if (target)
7089 return target;
7090 break;
7091
7092 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7093 target = expand_builtin_memset_with_bounds (exp, target, mode);
7094 if (target)
7095 return target;
7096 break;
7097
7098 default:
7099 break;
7100 }
7101
7102 /* The switch statement above can drop through to cause the function
7103 to be called normally. */
7104 return expand_call (exp, target, ignore);
7105 }
7106
7107 /* Determine whether a tree node represents a call to a built-in
7108 function. If the tree T is a call to a built-in function with
7109 the right number of arguments of the appropriate types, return
7110 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7111 Otherwise the return value is END_BUILTINS. */
7112
7113 enum built_in_function
7114 builtin_mathfn_code (const_tree t)
7115 {
7116 const_tree fndecl, arg, parmlist;
7117 const_tree argtype, parmtype;
7118 const_call_expr_arg_iterator iter;
7119
7120 if (TREE_CODE (t) != CALL_EXPR
7121 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7122 return END_BUILTINS;
7123
7124 fndecl = get_callee_fndecl (t);
7125 if (fndecl == NULL_TREE
7126 || TREE_CODE (fndecl) != FUNCTION_DECL
7127 || ! DECL_BUILT_IN (fndecl)
7128 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7129 return END_BUILTINS;
7130
7131 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7132 init_const_call_expr_arg_iterator (t, &iter);
7133 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7134 {
7135 /* If a function doesn't take a variable number of arguments,
7136 the last element in the list will have type `void'. */
7137 parmtype = TREE_VALUE (parmlist);
7138 if (VOID_TYPE_P (parmtype))
7139 {
7140 if (more_const_call_expr_args_p (&iter))
7141 return END_BUILTINS;
7142 return DECL_FUNCTION_CODE (fndecl);
7143 }
7144
7145 if (! more_const_call_expr_args_p (&iter))
7146 return END_BUILTINS;
7147
7148 arg = next_const_call_expr_arg (&iter);
7149 argtype = TREE_TYPE (arg);
7150
7151 if (SCALAR_FLOAT_TYPE_P (parmtype))
7152 {
7153 if (! SCALAR_FLOAT_TYPE_P (argtype))
7154 return END_BUILTINS;
7155 }
7156 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7157 {
7158 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7159 return END_BUILTINS;
7160 }
7161 else if (POINTER_TYPE_P (parmtype))
7162 {
7163 if (! POINTER_TYPE_P (argtype))
7164 return END_BUILTINS;
7165 }
7166 else if (INTEGRAL_TYPE_P (parmtype))
7167 {
7168 if (! INTEGRAL_TYPE_P (argtype))
7169 return END_BUILTINS;
7170 }
7171 else
7172 return END_BUILTINS;
7173 }
7174
7175 /* Variable-length argument list. */
7176 return DECL_FUNCTION_CODE (fndecl);
7177 }
7178
7179 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7180 evaluate to a constant. */
7181
7182 static tree
7183 fold_builtin_constant_p (tree arg)
7184 {
7185 /* We return 1 for a numeric type that's known to be a constant
7186 value at compile-time or for an aggregate type that's a
7187 literal constant. */
7188 STRIP_NOPS (arg);
7189
7190 /* If we know this is a constant, emit the constant of one. */
7191 if (CONSTANT_CLASS_P (arg)
7192 || (TREE_CODE (arg) == CONSTRUCTOR
7193 && TREE_CONSTANT (arg)))
7194 return integer_one_node;
7195 if (TREE_CODE (arg) == ADDR_EXPR)
7196 {
7197 tree op = TREE_OPERAND (arg, 0);
7198 if (TREE_CODE (op) == STRING_CST
7199 || (TREE_CODE (op) == ARRAY_REF
7200 && integer_zerop (TREE_OPERAND (op, 1))
7201 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7202 return integer_one_node;
7203 }
7204
7205 /* If this expression has side effects, show we don't know it to be a
7206 constant. Likewise if it's a pointer or aggregate type since in
7207 those case we only want literals, since those are only optimized
7208 when generating RTL, not later.
7209 And finally, if we are compiling an initializer, not code, we
7210 need to return a definite result now; there's not going to be any
7211 more optimization done. */
7212 if (TREE_SIDE_EFFECTS (arg)
7213 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7214 || POINTER_TYPE_P (TREE_TYPE (arg))
7215 || cfun == 0
7216 || folding_initializer
7217 || force_folding_builtin_constant_p)
7218 return integer_zero_node;
7219
7220 return NULL_TREE;
7221 }
7222
7223 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7224 return it as a truthvalue. */
7225
7226 static tree
7227 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7228 tree predictor)
7229 {
7230 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7231
7232 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7233 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7234 ret_type = TREE_TYPE (TREE_TYPE (fn));
7235 pred_type = TREE_VALUE (arg_types);
7236 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7237
7238 pred = fold_convert_loc (loc, pred_type, pred);
7239 expected = fold_convert_loc (loc, expected_type, expected);
7240 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7241 predictor);
7242
7243 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7244 build_int_cst (ret_type, 0));
7245 }
7246
7247 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7248 NULL_TREE if no simplification is possible. */
7249
7250 tree
7251 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7252 {
7253 tree inner, fndecl, inner_arg0;
7254 enum tree_code code;
7255
7256 /* Distribute the expected value over short-circuiting operators.
7257 See through the cast from truthvalue_type_node to long. */
7258 inner_arg0 = arg0;
7259 while (CONVERT_EXPR_P (inner_arg0)
7260 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7261 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7262 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7263
7264 /* If this is a builtin_expect within a builtin_expect keep the
7265 inner one. See through a comparison against a constant. It
7266 might have been added to create a thruthvalue. */
7267 inner = inner_arg0;
7268
7269 if (COMPARISON_CLASS_P (inner)
7270 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7271 inner = TREE_OPERAND (inner, 0);
7272
7273 if (TREE_CODE (inner) == CALL_EXPR
7274 && (fndecl = get_callee_fndecl (inner))
7275 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7276 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7277 return arg0;
7278
7279 inner = inner_arg0;
7280 code = TREE_CODE (inner);
7281 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7282 {
7283 tree op0 = TREE_OPERAND (inner, 0);
7284 tree op1 = TREE_OPERAND (inner, 1);
7285
7286 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7287 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7288 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7289
7290 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7291 }
7292
7293 /* If the argument isn't invariant then there's nothing else we can do. */
7294 if (!TREE_CONSTANT (inner_arg0))
7295 return NULL_TREE;
7296
7297 /* If we expect that a comparison against the argument will fold to
7298 a constant return the constant. In practice, this means a true
7299 constant or the address of a non-weak symbol. */
7300 inner = inner_arg0;
7301 STRIP_NOPS (inner);
7302 if (TREE_CODE (inner) == ADDR_EXPR)
7303 {
7304 do
7305 {
7306 inner = TREE_OPERAND (inner, 0);
7307 }
7308 while (TREE_CODE (inner) == COMPONENT_REF
7309 || TREE_CODE (inner) == ARRAY_REF);
7310 if ((TREE_CODE (inner) == VAR_DECL
7311 || TREE_CODE (inner) == FUNCTION_DECL)
7312 && DECL_WEAK (inner))
7313 return NULL_TREE;
7314 }
7315
7316 /* Otherwise, ARG0 already has the proper type for the return value. */
7317 return arg0;
7318 }
7319
7320 /* Fold a call to __builtin_classify_type with argument ARG. */
7321
7322 static tree
7323 fold_builtin_classify_type (tree arg)
7324 {
7325 if (arg == 0)
7326 return build_int_cst (integer_type_node, no_type_class);
7327
7328 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7329 }
7330
7331 /* Fold a call to __builtin_strlen with argument ARG. */
7332
7333 static tree
7334 fold_builtin_strlen (location_t loc, tree type, tree arg)
7335 {
7336 if (!validate_arg (arg, POINTER_TYPE))
7337 return NULL_TREE;
7338 else
7339 {
7340 tree len = c_strlen (arg, 0);
7341
7342 if (len)
7343 return fold_convert_loc (loc, type, len);
7344
7345 return NULL_TREE;
7346 }
7347 }
7348
7349 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7350
7351 static tree
7352 fold_builtin_inf (location_t loc, tree type, int warn)
7353 {
7354 REAL_VALUE_TYPE real;
7355
7356 /* __builtin_inff is intended to be usable to define INFINITY on all
7357 targets. If an infinity is not available, INFINITY expands "to a
7358 positive constant of type float that overflows at translation
7359 time", footnote "In this case, using INFINITY will violate the
7360 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7361 Thus we pedwarn to ensure this constraint violation is
7362 diagnosed. */
7363 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7364 pedwarn (loc, 0, "target format does not support infinity");
7365
7366 real_inf (&real);
7367 return build_real (type, real);
7368 }
7369
7370 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7371
7372 static tree
7373 fold_builtin_nan (tree arg, tree type, int quiet)
7374 {
7375 REAL_VALUE_TYPE real;
7376 const char *str;
7377
7378 if (!validate_arg (arg, POINTER_TYPE))
7379 return NULL_TREE;
7380 str = c_getstr (arg);
7381 if (!str)
7382 return NULL_TREE;
7383
7384 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7385 return NULL_TREE;
7386
7387 return build_real (type, real);
7388 }
7389
7390 /* Return true if the floating point expression T has an integer value.
7391 We also allow +Inf, -Inf and NaN to be considered integer values. */
7392
7393 static bool
7394 integer_valued_real_p (tree t)
7395 {
7396 switch (TREE_CODE (t))
7397 {
7398 case FLOAT_EXPR:
7399 return true;
7400
7401 case ABS_EXPR:
7402 case SAVE_EXPR:
7403 return integer_valued_real_p (TREE_OPERAND (t, 0));
7404
7405 case COMPOUND_EXPR:
7406 case MODIFY_EXPR:
7407 case BIND_EXPR:
7408 return integer_valued_real_p (TREE_OPERAND (t, 1));
7409
7410 case PLUS_EXPR:
7411 case MINUS_EXPR:
7412 case MULT_EXPR:
7413 case MIN_EXPR:
7414 case MAX_EXPR:
7415 return integer_valued_real_p (TREE_OPERAND (t, 0))
7416 && integer_valued_real_p (TREE_OPERAND (t, 1));
7417
7418 case COND_EXPR:
7419 return integer_valued_real_p (TREE_OPERAND (t, 1))
7420 && integer_valued_real_p (TREE_OPERAND (t, 2));
7421
7422 case REAL_CST:
7423 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7424
7425 CASE_CONVERT:
7426 {
7427 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7428 if (TREE_CODE (type) == INTEGER_TYPE)
7429 return true;
7430 if (TREE_CODE (type) == REAL_TYPE)
7431 return integer_valued_real_p (TREE_OPERAND (t, 0));
7432 break;
7433 }
7434
7435 case CALL_EXPR:
7436 switch (builtin_mathfn_code (t))
7437 {
7438 CASE_FLT_FN (BUILT_IN_CEIL):
7439 CASE_FLT_FN (BUILT_IN_FLOOR):
7440 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7441 CASE_FLT_FN (BUILT_IN_RINT):
7442 CASE_FLT_FN (BUILT_IN_ROUND):
7443 CASE_FLT_FN (BUILT_IN_TRUNC):
7444 return true;
7445
7446 CASE_FLT_FN (BUILT_IN_FMIN):
7447 CASE_FLT_FN (BUILT_IN_FMAX):
7448 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7449 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7450
7451 default:
7452 break;
7453 }
7454 break;
7455
7456 default:
7457 break;
7458 }
7459 return false;
7460 }
7461
7462 /* FNDECL is assumed to be a builtin where truncation can be propagated
7463 across (for instance floor((double)f) == (double)floorf (f).
7464 Do the transformation for a call with argument ARG. */
7465
7466 static tree
7467 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7468 {
7469 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7470
7471 if (!validate_arg (arg, REAL_TYPE))
7472 return NULL_TREE;
7473
7474 /* Integer rounding functions are idempotent. */
7475 if (fcode == builtin_mathfn_code (arg))
7476 return arg;
7477
7478 /* If argument is already integer valued, and we don't need to worry
7479 about setting errno, there's no need to perform rounding. */
7480 if (! flag_errno_math && integer_valued_real_p (arg))
7481 return arg;
7482
7483 if (optimize)
7484 {
7485 tree arg0 = strip_float_extensions (arg);
7486 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7487 tree newtype = TREE_TYPE (arg0);
7488 tree decl;
7489
7490 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7491 && (decl = mathfn_built_in (newtype, fcode)))
7492 return fold_convert_loc (loc, ftype,
7493 build_call_expr_loc (loc, decl, 1,
7494 fold_convert_loc (loc,
7495 newtype,
7496 arg0)));
7497 }
7498 return NULL_TREE;
7499 }
7500
7501 /* FNDECL is assumed to be builtin which can narrow the FP type of
7502 the argument, for instance lround((double)f) -> lroundf (f).
7503 Do the transformation for a call with argument ARG. */
7504
7505 static tree
7506 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7507 {
7508 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7509
7510 if (!validate_arg (arg, REAL_TYPE))
7511 return NULL_TREE;
7512
7513 /* If argument is already integer valued, and we don't need to worry
7514 about setting errno, there's no need to perform rounding. */
7515 if (! flag_errno_math && integer_valued_real_p (arg))
7516 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7517 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7518
7519 if (optimize)
7520 {
7521 tree ftype = TREE_TYPE (arg);
7522 tree arg0 = strip_float_extensions (arg);
7523 tree newtype = TREE_TYPE (arg0);
7524 tree decl;
7525
7526 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7527 && (decl = mathfn_built_in (newtype, fcode)))
7528 return build_call_expr_loc (loc, decl, 1,
7529 fold_convert_loc (loc, newtype, arg0));
7530 }
7531
7532 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7533 sizeof (int) == sizeof (long). */
7534 if (TYPE_PRECISION (integer_type_node)
7535 == TYPE_PRECISION (long_integer_type_node))
7536 {
7537 tree newfn = NULL_TREE;
7538 switch (fcode)
7539 {
7540 CASE_FLT_FN (BUILT_IN_ICEIL):
7541 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7542 break;
7543
7544 CASE_FLT_FN (BUILT_IN_IFLOOR):
7545 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7546 break;
7547
7548 CASE_FLT_FN (BUILT_IN_IROUND):
7549 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7550 break;
7551
7552 CASE_FLT_FN (BUILT_IN_IRINT):
7553 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7554 break;
7555
7556 default:
7557 break;
7558 }
7559
7560 if (newfn)
7561 {
7562 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7563 return fold_convert_loc (loc,
7564 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7565 }
7566 }
7567
7568 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7569 sizeof (long long) == sizeof (long). */
7570 if (TYPE_PRECISION (long_long_integer_type_node)
7571 == TYPE_PRECISION (long_integer_type_node))
7572 {
7573 tree newfn = NULL_TREE;
7574 switch (fcode)
7575 {
7576 CASE_FLT_FN (BUILT_IN_LLCEIL):
7577 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7578 break;
7579
7580 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7581 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7582 break;
7583
7584 CASE_FLT_FN (BUILT_IN_LLROUND):
7585 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7586 break;
7587
7588 CASE_FLT_FN (BUILT_IN_LLRINT):
7589 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7590 break;
7591
7592 default:
7593 break;
7594 }
7595
7596 if (newfn)
7597 {
7598 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7599 return fold_convert_loc (loc,
7600 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7601 }
7602 }
7603
7604 return NULL_TREE;
7605 }
7606
7607 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7608 return type. Return NULL_TREE if no simplification can be made. */
7609
7610 static tree
7611 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7612 {
7613 tree res;
7614
7615 if (!validate_arg (arg, COMPLEX_TYPE)
7616 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7617 return NULL_TREE;
7618
7619 /* Calculate the result when the argument is a constant. */
7620 if (TREE_CODE (arg) == COMPLEX_CST
7621 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7622 type, mpfr_hypot)))
7623 return res;
7624
7625 if (TREE_CODE (arg) == COMPLEX_EXPR)
7626 {
7627 tree real = TREE_OPERAND (arg, 0);
7628 tree imag = TREE_OPERAND (arg, 1);
7629
7630 /* If either part is zero, cabs is fabs of the other. */
7631 if (real_zerop (real))
7632 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7633 if (real_zerop (imag))
7634 return fold_build1_loc (loc, ABS_EXPR, type, real);
7635
7636 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7637 if (flag_unsafe_math_optimizations
7638 && operand_equal_p (real, imag, OEP_PURE_SAME))
7639 {
7640 const REAL_VALUE_TYPE sqrt2_trunc
7641 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7642 STRIP_NOPS (real);
7643 return fold_build2_loc (loc, MULT_EXPR, type,
7644 fold_build1_loc (loc, ABS_EXPR, type, real),
7645 build_real (type, sqrt2_trunc));
7646 }
7647 }
7648
7649 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7650 if (TREE_CODE (arg) == NEGATE_EXPR
7651 || TREE_CODE (arg) == CONJ_EXPR)
7652 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7653
7654 /* Don't do this when optimizing for size. */
7655 if (flag_unsafe_math_optimizations
7656 && optimize && optimize_function_for_speed_p (cfun))
7657 {
7658 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7659
7660 if (sqrtfn != NULL_TREE)
7661 {
7662 tree rpart, ipart, result;
7663
7664 arg = builtin_save_expr (arg);
7665
7666 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7667 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7668
7669 rpart = builtin_save_expr (rpart);
7670 ipart = builtin_save_expr (ipart);
7671
7672 result = fold_build2_loc (loc, PLUS_EXPR, type,
7673 fold_build2_loc (loc, MULT_EXPR, type,
7674 rpart, rpart),
7675 fold_build2_loc (loc, MULT_EXPR, type,
7676 ipart, ipart));
7677
7678 return build_call_expr_loc (loc, sqrtfn, 1, result);
7679 }
7680 }
7681
7682 return NULL_TREE;
7683 }
7684
7685 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7686 complex tree type of the result. If NEG is true, the imaginary
7687 zero is negative. */
7688
7689 static tree
7690 build_complex_cproj (tree type, bool neg)
7691 {
7692 REAL_VALUE_TYPE rinf, rzero = dconst0;
7693
7694 real_inf (&rinf);
7695 rzero.sign = neg;
7696 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7697 build_real (TREE_TYPE (type), rzero));
7698 }
7699
7700 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7701 return type. Return NULL_TREE if no simplification can be made. */
7702
7703 static tree
7704 fold_builtin_cproj (location_t loc, tree arg, tree type)
7705 {
7706 if (!validate_arg (arg, COMPLEX_TYPE)
7707 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7708 return NULL_TREE;
7709
7710 /* If there are no infinities, return arg. */
7711 if (! HONOR_INFINITIES (type))
7712 return non_lvalue_loc (loc, arg);
7713
7714 /* Calculate the result when the argument is a constant. */
7715 if (TREE_CODE (arg) == COMPLEX_CST)
7716 {
7717 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7718 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7719
7720 if (real_isinf (real) || real_isinf (imag))
7721 return build_complex_cproj (type, imag->sign);
7722 else
7723 return arg;
7724 }
7725 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7726 {
7727 tree real = TREE_OPERAND (arg, 0);
7728 tree imag = TREE_OPERAND (arg, 1);
7729
7730 STRIP_NOPS (real);
7731 STRIP_NOPS (imag);
7732
7733 /* If the real part is inf and the imag part is known to be
7734 nonnegative, return (inf + 0i). Remember side-effects are
7735 possible in the imag part. */
7736 if (TREE_CODE (real) == REAL_CST
7737 && real_isinf (TREE_REAL_CST_PTR (real))
7738 && tree_expr_nonnegative_p (imag))
7739 return omit_one_operand_loc (loc, type,
7740 build_complex_cproj (type, false),
7741 arg);
7742
7743 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7744 Remember side-effects are possible in the real part. */
7745 if (TREE_CODE (imag) == REAL_CST
7746 && real_isinf (TREE_REAL_CST_PTR (imag)))
7747 return
7748 omit_one_operand_loc (loc, type,
7749 build_complex_cproj (type, TREE_REAL_CST_PTR
7750 (imag)->sign), arg);
7751 }
7752
7753 return NULL_TREE;
7754 }
7755
7756 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7757 Return NULL_TREE if no simplification can be made. */
7758
7759 static tree
7760 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7761 {
7762
7763 enum built_in_function fcode;
7764 tree res;
7765
7766 if (!validate_arg (arg, REAL_TYPE))
7767 return NULL_TREE;
7768
7769 /* Calculate the result when the argument is a constant. */
7770 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7771 return res;
7772
7773 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7774 fcode = builtin_mathfn_code (arg);
7775 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7776 {
7777 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7778 arg = fold_build2_loc (loc, MULT_EXPR, type,
7779 CALL_EXPR_ARG (arg, 0),
7780 build_real (type, dconsthalf));
7781 return build_call_expr_loc (loc, expfn, 1, arg);
7782 }
7783
7784 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7785 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7786 {
7787 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7788
7789 if (powfn)
7790 {
7791 tree arg0 = CALL_EXPR_ARG (arg, 0);
7792 tree tree_root;
7793 /* The inner root was either sqrt or cbrt. */
7794 /* This was a conditional expression but it triggered a bug
7795 in Sun C 5.5. */
7796 REAL_VALUE_TYPE dconstroot;
7797 if (BUILTIN_SQRT_P (fcode))
7798 dconstroot = dconsthalf;
7799 else
7800 dconstroot = dconst_third ();
7801
7802 /* Adjust for the outer root. */
7803 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7804 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7805 tree_root = build_real (type, dconstroot);
7806 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7807 }
7808 }
7809
7810 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7811 if (flag_unsafe_math_optimizations
7812 && (fcode == BUILT_IN_POW
7813 || fcode == BUILT_IN_POWF
7814 || fcode == BUILT_IN_POWL))
7815 {
7816 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7817 tree arg0 = CALL_EXPR_ARG (arg, 0);
7818 tree arg1 = CALL_EXPR_ARG (arg, 1);
7819 tree narg1;
7820 if (!tree_expr_nonnegative_p (arg0))
7821 arg0 = build1 (ABS_EXPR, type, arg0);
7822 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7823 build_real (type, dconsthalf));
7824 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7825 }
7826
7827 return NULL_TREE;
7828 }
7829
7830 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7831 Return NULL_TREE if no simplification can be made. */
7832
7833 static tree
7834 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7835 {
7836 const enum built_in_function fcode = builtin_mathfn_code (arg);
7837 tree res;
7838
7839 if (!validate_arg (arg, REAL_TYPE))
7840 return NULL_TREE;
7841
7842 /* Calculate the result when the argument is a constant. */
7843 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7844 return res;
7845
7846 if (flag_unsafe_math_optimizations)
7847 {
7848 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7849 if (BUILTIN_EXPONENT_P (fcode))
7850 {
7851 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7852 const REAL_VALUE_TYPE third_trunc =
7853 real_value_truncate (TYPE_MODE (type), dconst_third ());
7854 arg = fold_build2_loc (loc, MULT_EXPR, type,
7855 CALL_EXPR_ARG (arg, 0),
7856 build_real (type, third_trunc));
7857 return build_call_expr_loc (loc, expfn, 1, arg);
7858 }
7859
7860 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7861 if (BUILTIN_SQRT_P (fcode))
7862 {
7863 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7864
7865 if (powfn)
7866 {
7867 tree arg0 = CALL_EXPR_ARG (arg, 0);
7868 tree tree_root;
7869 REAL_VALUE_TYPE dconstroot = dconst_third ();
7870
7871 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7872 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7873 tree_root = build_real (type, dconstroot);
7874 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7875 }
7876 }
7877
7878 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7879 if (BUILTIN_CBRT_P (fcode))
7880 {
7881 tree arg0 = CALL_EXPR_ARG (arg, 0);
7882 if (tree_expr_nonnegative_p (arg0))
7883 {
7884 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7885
7886 if (powfn)
7887 {
7888 tree tree_root;
7889 REAL_VALUE_TYPE dconstroot;
7890
7891 real_arithmetic (&dconstroot, MULT_EXPR,
7892 dconst_third_ptr (), dconst_third_ptr ());
7893 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7894 tree_root = build_real (type, dconstroot);
7895 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7896 }
7897 }
7898 }
7899
7900 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7901 if (fcode == BUILT_IN_POW
7902 || fcode == BUILT_IN_POWF
7903 || fcode == BUILT_IN_POWL)
7904 {
7905 tree arg00 = CALL_EXPR_ARG (arg, 0);
7906 tree arg01 = CALL_EXPR_ARG (arg, 1);
7907 if (tree_expr_nonnegative_p (arg00))
7908 {
7909 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7910 const REAL_VALUE_TYPE dconstroot
7911 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7912 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7913 build_real (type, dconstroot));
7914 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7915 }
7916 }
7917 }
7918 return NULL_TREE;
7919 }
7920
7921 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7922 TYPE is the type of the return value. Return NULL_TREE if no
7923 simplification can be made. */
7924
7925 static tree
7926 fold_builtin_cos (location_t loc,
7927 tree arg, tree type, tree fndecl)
7928 {
7929 tree res, narg;
7930
7931 if (!validate_arg (arg, REAL_TYPE))
7932 return NULL_TREE;
7933
7934 /* Calculate the result when the argument is a constant. */
7935 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7936 return res;
7937
7938 /* Optimize cos(-x) into cos (x). */
7939 if ((narg = fold_strip_sign_ops (arg)))
7940 return build_call_expr_loc (loc, fndecl, 1, narg);
7941
7942 return NULL_TREE;
7943 }
7944
7945 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7946 Return NULL_TREE if no simplification can be made. */
7947
7948 static tree
7949 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7950 {
7951 if (validate_arg (arg, REAL_TYPE))
7952 {
7953 tree res, narg;
7954
7955 /* Calculate the result when the argument is a constant. */
7956 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7957 return res;
7958
7959 /* Optimize cosh(-x) into cosh (x). */
7960 if ((narg = fold_strip_sign_ops (arg)))
7961 return build_call_expr_loc (loc, fndecl, 1, narg);
7962 }
7963
7964 return NULL_TREE;
7965 }
7966
7967 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7968 argument ARG. TYPE is the type of the return value. Return
7969 NULL_TREE if no simplification can be made. */
7970
7971 static tree
7972 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7973 bool hyper)
7974 {
7975 if (validate_arg (arg, COMPLEX_TYPE)
7976 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7977 {
7978 tree tmp;
7979
7980 /* Calculate the result when the argument is a constant. */
7981 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7982 return tmp;
7983
7984 /* Optimize fn(-x) into fn(x). */
7985 if ((tmp = fold_strip_sign_ops (arg)))
7986 return build_call_expr_loc (loc, fndecl, 1, tmp);
7987 }
7988
7989 return NULL_TREE;
7990 }
7991
7992 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7993 Return NULL_TREE if no simplification can be made. */
7994
7995 static tree
7996 fold_builtin_tan (tree arg, tree type)
7997 {
7998 enum built_in_function fcode;
7999 tree res;
8000
8001 if (!validate_arg (arg, REAL_TYPE))
8002 return NULL_TREE;
8003
8004 /* Calculate the result when the argument is a constant. */
8005 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
8006 return res;
8007
8008 /* Optimize tan(atan(x)) = x. */
8009 fcode = builtin_mathfn_code (arg);
8010 if (flag_unsafe_math_optimizations
8011 && (fcode == BUILT_IN_ATAN
8012 || fcode == BUILT_IN_ATANF
8013 || fcode == BUILT_IN_ATANL))
8014 return CALL_EXPR_ARG (arg, 0);
8015
8016 return NULL_TREE;
8017 }
8018
8019 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8020 NULL_TREE if no simplification can be made. */
8021
8022 static tree
8023 fold_builtin_sincos (location_t loc,
8024 tree arg0, tree arg1, tree arg2)
8025 {
8026 tree type;
8027 tree res, fn, call;
8028
8029 if (!validate_arg (arg0, REAL_TYPE)
8030 || !validate_arg (arg1, POINTER_TYPE)
8031 || !validate_arg (arg2, POINTER_TYPE))
8032 return NULL_TREE;
8033
8034 type = TREE_TYPE (arg0);
8035
8036 /* Calculate the result when the argument is a constant. */
8037 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
8038 return res;
8039
8040 /* Canonicalize sincos to cexpi. */
8041 if (!targetm.libc_has_function (function_c99_math_complex))
8042 return NULL_TREE;
8043 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
8044 if (!fn)
8045 return NULL_TREE;
8046
8047 call = build_call_expr_loc (loc, fn, 1, arg0);
8048 call = builtin_save_expr (call);
8049
8050 return build2 (COMPOUND_EXPR, void_type_node,
8051 build2 (MODIFY_EXPR, void_type_node,
8052 build_fold_indirect_ref_loc (loc, arg1),
8053 build1 (IMAGPART_EXPR, type, call)),
8054 build2 (MODIFY_EXPR, void_type_node,
8055 build_fold_indirect_ref_loc (loc, arg2),
8056 build1 (REALPART_EXPR, type, call)));
8057 }
8058
8059 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
8060 NULL_TREE if no simplification can be made. */
8061
8062 static tree
8063 fold_builtin_cexp (location_t loc, tree arg0, tree type)
8064 {
8065 tree rtype;
8066 tree realp, imagp, ifn;
8067 tree res;
8068
8069 if (!validate_arg (arg0, COMPLEX_TYPE)
8070 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
8071 return NULL_TREE;
8072
8073 /* Calculate the result when the argument is a constant. */
8074 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8075 return res;
8076
8077 rtype = TREE_TYPE (TREE_TYPE (arg0));
8078
8079 /* In case we can figure out the real part of arg0 and it is constant zero
8080 fold to cexpi. */
8081 if (!targetm.libc_has_function (function_c99_math_complex))
8082 return NULL_TREE;
8083 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8084 if (!ifn)
8085 return NULL_TREE;
8086
8087 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
8088 && real_zerop (realp))
8089 {
8090 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
8091 return build_call_expr_loc (loc, ifn, 1, narg);
8092 }
8093
8094 /* In case we can easily decompose real and imaginary parts split cexp
8095 to exp (r) * cexpi (i). */
8096 if (flag_unsafe_math_optimizations
8097 && realp)
8098 {
8099 tree rfn, rcall, icall;
8100
8101 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8102 if (!rfn)
8103 return NULL_TREE;
8104
8105 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
8106 if (!imagp)
8107 return NULL_TREE;
8108
8109 icall = build_call_expr_loc (loc, ifn, 1, imagp);
8110 icall = builtin_save_expr (icall);
8111 rcall = build_call_expr_loc (loc, rfn, 1, realp);
8112 rcall = builtin_save_expr (rcall);
8113 return fold_build2_loc (loc, COMPLEX_EXPR, type,
8114 fold_build2_loc (loc, MULT_EXPR, rtype,
8115 rcall,
8116 fold_build1_loc (loc, REALPART_EXPR,
8117 rtype, icall)),
8118 fold_build2_loc (loc, MULT_EXPR, rtype,
8119 rcall,
8120 fold_build1_loc (loc, IMAGPART_EXPR,
8121 rtype, icall)));
8122 }
8123
8124 return NULL_TREE;
8125 }
8126
8127 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8128 Return NULL_TREE if no simplification can be made. */
8129
8130 static tree
8131 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
8132 {
8133 if (!validate_arg (arg, REAL_TYPE))
8134 return NULL_TREE;
8135
8136 /* Optimize trunc of constant value. */
8137 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8138 {
8139 REAL_VALUE_TYPE r, x;
8140 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8141
8142 x = TREE_REAL_CST (arg);
8143 real_trunc (&r, TYPE_MODE (type), &x);
8144 return build_real (type, r);
8145 }
8146
8147 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8148 }
8149
8150 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8151 Return NULL_TREE if no simplification can be made. */
8152
8153 static tree
8154 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
8155 {
8156 if (!validate_arg (arg, REAL_TYPE))
8157 return NULL_TREE;
8158
8159 /* Optimize floor of constant value. */
8160 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8161 {
8162 REAL_VALUE_TYPE x;
8163
8164 x = TREE_REAL_CST (arg);
8165 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8166 {
8167 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8168 REAL_VALUE_TYPE r;
8169
8170 real_floor (&r, TYPE_MODE (type), &x);
8171 return build_real (type, r);
8172 }
8173 }
8174
8175 /* Fold floor (x) where x is nonnegative to trunc (x). */
8176 if (tree_expr_nonnegative_p (arg))
8177 {
8178 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8179 if (truncfn)
8180 return build_call_expr_loc (loc, truncfn, 1, arg);
8181 }
8182
8183 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8184 }
8185
8186 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8187 Return NULL_TREE if no simplification can be made. */
8188
8189 static tree
8190 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
8191 {
8192 if (!validate_arg (arg, REAL_TYPE))
8193 return NULL_TREE;
8194
8195 /* Optimize ceil of constant value. */
8196 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8197 {
8198 REAL_VALUE_TYPE x;
8199
8200 x = TREE_REAL_CST (arg);
8201 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8202 {
8203 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8204 REAL_VALUE_TYPE r;
8205
8206 real_ceil (&r, TYPE_MODE (type), &x);
8207 return build_real (type, r);
8208 }
8209 }
8210
8211 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8212 }
8213
8214 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8215 Return NULL_TREE if no simplification can be made. */
8216
8217 static tree
8218 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8219 {
8220 if (!validate_arg (arg, REAL_TYPE))
8221 return NULL_TREE;
8222
8223 /* Optimize round of constant value. */
8224 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8225 {
8226 REAL_VALUE_TYPE x;
8227
8228 x = TREE_REAL_CST (arg);
8229 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8230 {
8231 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8232 REAL_VALUE_TYPE r;
8233
8234 real_round (&r, TYPE_MODE (type), &x);
8235 return build_real (type, r);
8236 }
8237 }
8238
8239 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8240 }
8241
8242 /* Fold function call to builtin lround, lroundf or lroundl (or the
8243 corresponding long long versions) and other rounding functions. ARG
8244 is the argument to the call. Return NULL_TREE if no simplification
8245 can be made. */
8246
8247 static tree
8248 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8249 {
8250 if (!validate_arg (arg, REAL_TYPE))
8251 return NULL_TREE;
8252
8253 /* Optimize lround of constant value. */
8254 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8255 {
8256 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8257
8258 if (real_isfinite (&x))
8259 {
8260 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8261 tree ftype = TREE_TYPE (arg);
8262 REAL_VALUE_TYPE r;
8263 bool fail = false;
8264
8265 switch (DECL_FUNCTION_CODE (fndecl))
8266 {
8267 CASE_FLT_FN (BUILT_IN_IFLOOR):
8268 CASE_FLT_FN (BUILT_IN_LFLOOR):
8269 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8270 real_floor (&r, TYPE_MODE (ftype), &x);
8271 break;
8272
8273 CASE_FLT_FN (BUILT_IN_ICEIL):
8274 CASE_FLT_FN (BUILT_IN_LCEIL):
8275 CASE_FLT_FN (BUILT_IN_LLCEIL):
8276 real_ceil (&r, TYPE_MODE (ftype), &x);
8277 break;
8278
8279 CASE_FLT_FN (BUILT_IN_IROUND):
8280 CASE_FLT_FN (BUILT_IN_LROUND):
8281 CASE_FLT_FN (BUILT_IN_LLROUND):
8282 real_round (&r, TYPE_MODE (ftype), &x);
8283 break;
8284
8285 default:
8286 gcc_unreachable ();
8287 }
8288
8289 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
8290 if (!fail)
8291 return wide_int_to_tree (itype, val);
8292 }
8293 }
8294
8295 switch (DECL_FUNCTION_CODE (fndecl))
8296 {
8297 CASE_FLT_FN (BUILT_IN_LFLOOR):
8298 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8299 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8300 if (tree_expr_nonnegative_p (arg))
8301 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8302 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8303 break;
8304 default:;
8305 }
8306
8307 return fold_fixed_mathfn (loc, fndecl, arg);
8308 }
8309
8310 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8311 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8312 the argument to the call. Return NULL_TREE if no simplification can
8313 be made. */
8314
8315 static tree
8316 fold_builtin_bitop (tree fndecl, tree arg)
8317 {
8318 if (!validate_arg (arg, INTEGER_TYPE))
8319 return NULL_TREE;
8320
8321 /* Optimize for constant argument. */
8322 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8323 {
8324 tree type = TREE_TYPE (arg);
8325 int result;
8326
8327 switch (DECL_FUNCTION_CODE (fndecl))
8328 {
8329 CASE_INT_FN (BUILT_IN_FFS):
8330 result = wi::ffs (arg);
8331 break;
8332
8333 CASE_INT_FN (BUILT_IN_CLZ):
8334 if (wi::ne_p (arg, 0))
8335 result = wi::clz (arg);
8336 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8337 result = TYPE_PRECISION (type);
8338 break;
8339
8340 CASE_INT_FN (BUILT_IN_CTZ):
8341 if (wi::ne_p (arg, 0))
8342 result = wi::ctz (arg);
8343 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8344 result = TYPE_PRECISION (type);
8345 break;
8346
8347 CASE_INT_FN (BUILT_IN_CLRSB):
8348 result = wi::clrsb (arg);
8349 break;
8350
8351 CASE_INT_FN (BUILT_IN_POPCOUNT):
8352 result = wi::popcount (arg);
8353 break;
8354
8355 CASE_INT_FN (BUILT_IN_PARITY):
8356 result = wi::parity (arg);
8357 break;
8358
8359 default:
8360 gcc_unreachable ();
8361 }
8362
8363 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8364 }
8365
8366 return NULL_TREE;
8367 }
8368
8369 /* Fold function call to builtin_bswap and the short, long and long long
8370 variants. Return NULL_TREE if no simplification can be made. */
8371 static tree
8372 fold_builtin_bswap (tree fndecl, tree arg)
8373 {
8374 if (! validate_arg (arg, INTEGER_TYPE))
8375 return NULL_TREE;
8376
8377 /* Optimize constant value. */
8378 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8379 {
8380 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8381
8382 switch (DECL_FUNCTION_CODE (fndecl))
8383 {
8384 case BUILT_IN_BSWAP16:
8385 case BUILT_IN_BSWAP32:
8386 case BUILT_IN_BSWAP64:
8387 {
8388 signop sgn = TYPE_SIGN (type);
8389 tree result =
8390 wide_int_to_tree (type,
8391 wide_int::from (arg, TYPE_PRECISION (type),
8392 sgn).bswap ());
8393 return result;
8394 }
8395 default:
8396 gcc_unreachable ();
8397 }
8398 }
8399
8400 return NULL_TREE;
8401 }
8402
8403 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8404 NULL_TREE if no simplification can be made. */
8405
8406 static tree
8407 fold_builtin_hypot (location_t loc, tree fndecl,
8408 tree arg0, tree arg1, tree type)
8409 {
8410 tree res, narg0, narg1;
8411
8412 if (!validate_arg (arg0, REAL_TYPE)
8413 || !validate_arg (arg1, REAL_TYPE))
8414 return NULL_TREE;
8415
8416 /* Calculate the result when the argument is a constant. */
8417 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8418 return res;
8419
8420 /* If either argument to hypot has a negate or abs, strip that off.
8421 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8422 narg0 = fold_strip_sign_ops (arg0);
8423 narg1 = fold_strip_sign_ops (arg1);
8424 if (narg0 || narg1)
8425 {
8426 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8427 narg1 ? narg1 : arg1);
8428 }
8429
8430 /* If either argument is zero, hypot is fabs of the other. */
8431 if (real_zerop (arg0))
8432 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8433 else if (real_zerop (arg1))
8434 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8435
8436 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8437 if (flag_unsafe_math_optimizations
8438 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8439 {
8440 const REAL_VALUE_TYPE sqrt2_trunc
8441 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8442 return fold_build2_loc (loc, MULT_EXPR, type,
8443 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8444 build_real (type, sqrt2_trunc));
8445 }
8446
8447 return NULL_TREE;
8448 }
8449
8450
8451 /* Fold a builtin function call to pow, powf, or powl. Return
8452 NULL_TREE if no simplification can be made. */
8453 static tree
8454 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8455 {
8456 tree res;
8457
8458 if (!validate_arg (arg0, REAL_TYPE)
8459 || !validate_arg (arg1, REAL_TYPE))
8460 return NULL_TREE;
8461
8462 /* Calculate the result when the argument is a constant. */
8463 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8464 return res;
8465
8466 /* Optimize pow(1.0,y) = 1.0. */
8467 if (real_onep (arg0))
8468 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8469
8470 if (TREE_CODE (arg1) == REAL_CST
8471 && !TREE_OVERFLOW (arg1))
8472 {
8473 REAL_VALUE_TYPE cint;
8474 REAL_VALUE_TYPE c;
8475 HOST_WIDE_INT n;
8476
8477 c = TREE_REAL_CST (arg1);
8478
8479 /* Optimize pow(x,0.0) = 1.0. */
8480 if (REAL_VALUES_EQUAL (c, dconst0))
8481 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8482 arg0);
8483
8484 /* Optimize pow(x,1.0) = x. */
8485 if (REAL_VALUES_EQUAL (c, dconst1))
8486 return arg0;
8487
8488 /* Optimize pow(x,-1.0) = 1.0/x. */
8489 if (REAL_VALUES_EQUAL (c, dconstm1))
8490 return fold_build2_loc (loc, RDIV_EXPR, type,
8491 build_real (type, dconst1), arg0);
8492
8493 /* Optimize pow(x,0.5) = sqrt(x). */
8494 if (flag_unsafe_math_optimizations
8495 && REAL_VALUES_EQUAL (c, dconsthalf))
8496 {
8497 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8498
8499 if (sqrtfn != NULL_TREE)
8500 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8501 }
8502
8503 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8504 if (flag_unsafe_math_optimizations)
8505 {
8506 const REAL_VALUE_TYPE dconstroot
8507 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8508
8509 if (REAL_VALUES_EQUAL (c, dconstroot))
8510 {
8511 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8512 if (cbrtfn != NULL_TREE)
8513 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8514 }
8515 }
8516
8517 /* Check for an integer exponent. */
8518 n = real_to_integer (&c);
8519 real_from_integer (&cint, VOIDmode, n, SIGNED);
8520 if (real_identical (&c, &cint))
8521 {
8522 /* Attempt to evaluate pow at compile-time, unless this should
8523 raise an exception. */
8524 if (TREE_CODE (arg0) == REAL_CST
8525 && !TREE_OVERFLOW (arg0)
8526 && (n > 0
8527 || (!flag_trapping_math && !flag_errno_math)
8528 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8529 {
8530 REAL_VALUE_TYPE x;
8531 bool inexact;
8532
8533 x = TREE_REAL_CST (arg0);
8534 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8535 if (flag_unsafe_math_optimizations || !inexact)
8536 return build_real (type, x);
8537 }
8538
8539 /* Strip sign ops from even integer powers. */
8540 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8541 {
8542 tree narg0 = fold_strip_sign_ops (arg0);
8543 if (narg0)
8544 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8545 }
8546 }
8547 }
8548
8549 if (flag_unsafe_math_optimizations)
8550 {
8551 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8552
8553 /* Optimize pow(expN(x),y) = expN(x*y). */
8554 if (BUILTIN_EXPONENT_P (fcode))
8555 {
8556 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8557 tree arg = CALL_EXPR_ARG (arg0, 0);
8558 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8559 return build_call_expr_loc (loc, expfn, 1, arg);
8560 }
8561
8562 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8563 if (BUILTIN_SQRT_P (fcode))
8564 {
8565 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8566 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8567 build_real (type, dconsthalf));
8568 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8569 }
8570
8571 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8572 if (BUILTIN_CBRT_P (fcode))
8573 {
8574 tree arg = CALL_EXPR_ARG (arg0, 0);
8575 if (tree_expr_nonnegative_p (arg))
8576 {
8577 const REAL_VALUE_TYPE dconstroot
8578 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8579 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8580 build_real (type, dconstroot));
8581 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8582 }
8583 }
8584
8585 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8586 if (fcode == BUILT_IN_POW
8587 || fcode == BUILT_IN_POWF
8588 || fcode == BUILT_IN_POWL)
8589 {
8590 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8591 if (tree_expr_nonnegative_p (arg00))
8592 {
8593 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8594 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8595 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8596 }
8597 }
8598 }
8599
8600 return NULL_TREE;
8601 }
8602
8603 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8604 Return NULL_TREE if no simplification can be made. */
8605 static tree
8606 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8607 tree arg0, tree arg1, tree type)
8608 {
8609 if (!validate_arg (arg0, REAL_TYPE)
8610 || !validate_arg (arg1, INTEGER_TYPE))
8611 return NULL_TREE;
8612
8613 /* Optimize pow(1.0,y) = 1.0. */
8614 if (real_onep (arg0))
8615 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8616
8617 if (tree_fits_shwi_p (arg1))
8618 {
8619 HOST_WIDE_INT c = tree_to_shwi (arg1);
8620
8621 /* Evaluate powi at compile-time. */
8622 if (TREE_CODE (arg0) == REAL_CST
8623 && !TREE_OVERFLOW (arg0))
8624 {
8625 REAL_VALUE_TYPE x;
8626 x = TREE_REAL_CST (arg0);
8627 real_powi (&x, TYPE_MODE (type), &x, c);
8628 return build_real (type, x);
8629 }
8630
8631 /* Optimize pow(x,0) = 1.0. */
8632 if (c == 0)
8633 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8634 arg0);
8635
8636 /* Optimize pow(x,1) = x. */
8637 if (c == 1)
8638 return arg0;
8639
8640 /* Optimize pow(x,-1) = 1.0/x. */
8641 if (c == -1)
8642 return fold_build2_loc (loc, RDIV_EXPR, type,
8643 build_real (type, dconst1), arg0);
8644 }
8645
8646 return NULL_TREE;
8647 }
8648
8649 /* A subroutine of fold_builtin to fold the various exponent
8650 functions. Return NULL_TREE if no simplification can be made.
8651 FUNC is the corresponding MPFR exponent function. */
8652
8653 static tree
8654 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8655 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8656 {
8657 if (validate_arg (arg, REAL_TYPE))
8658 {
8659 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8660 tree res;
8661
8662 /* Calculate the result when the argument is a constant. */
8663 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8664 return res;
8665
8666 /* Optimize expN(logN(x)) = x. */
8667 if (flag_unsafe_math_optimizations)
8668 {
8669 const enum built_in_function fcode = builtin_mathfn_code (arg);
8670
8671 if ((func == mpfr_exp
8672 && (fcode == BUILT_IN_LOG
8673 || fcode == BUILT_IN_LOGF
8674 || fcode == BUILT_IN_LOGL))
8675 || (func == mpfr_exp2
8676 && (fcode == BUILT_IN_LOG2
8677 || fcode == BUILT_IN_LOG2F
8678 || fcode == BUILT_IN_LOG2L))
8679 || (func == mpfr_exp10
8680 && (fcode == BUILT_IN_LOG10
8681 || fcode == BUILT_IN_LOG10F
8682 || fcode == BUILT_IN_LOG10L)))
8683 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8684 }
8685 }
8686
8687 return NULL_TREE;
8688 }
8689
8690 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8691 arguments to the call, and TYPE is its return type.
8692 Return NULL_TREE if no simplification can be made. */
8693
8694 static tree
8695 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8696 {
8697 if (!validate_arg (arg1, POINTER_TYPE)
8698 || !validate_arg (arg2, INTEGER_TYPE)
8699 || !validate_arg (len, INTEGER_TYPE))
8700 return NULL_TREE;
8701 else
8702 {
8703 const char *p1;
8704
8705 if (TREE_CODE (arg2) != INTEGER_CST
8706 || !tree_fits_uhwi_p (len))
8707 return NULL_TREE;
8708
8709 p1 = c_getstr (arg1);
8710 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8711 {
8712 char c;
8713 const char *r;
8714 tree tem;
8715
8716 if (target_char_cast (arg2, &c))
8717 return NULL_TREE;
8718
8719 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
8720
8721 if (r == NULL)
8722 return build_int_cst (TREE_TYPE (arg1), 0);
8723
8724 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8725 return fold_convert_loc (loc, type, tem);
8726 }
8727 return NULL_TREE;
8728 }
8729 }
8730
8731 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8732 Return NULL_TREE if no simplification can be made. */
8733
8734 static tree
8735 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8736 {
8737 const char *p1, *p2;
8738
8739 if (!validate_arg (arg1, POINTER_TYPE)
8740 || !validate_arg (arg2, POINTER_TYPE)
8741 || !validate_arg (len, INTEGER_TYPE))
8742 return NULL_TREE;
8743
8744 /* If the LEN parameter is zero, return zero. */
8745 if (integer_zerop (len))
8746 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8747 arg1, arg2);
8748
8749 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8750 if (operand_equal_p (arg1, arg2, 0))
8751 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8752
8753 p1 = c_getstr (arg1);
8754 p2 = c_getstr (arg2);
8755
8756 /* If all arguments are constant, and the value of len is not greater
8757 than the lengths of arg1 and arg2, evaluate at compile-time. */
8758 if (tree_fits_uhwi_p (len) && p1 && p2
8759 && compare_tree_int (len, strlen (p1) + 1) <= 0
8760 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8761 {
8762 const int r = memcmp (p1, p2, tree_to_uhwi (len));
8763
8764 if (r > 0)
8765 return integer_one_node;
8766 else if (r < 0)
8767 return integer_minus_one_node;
8768 else
8769 return integer_zero_node;
8770 }
8771
8772 /* If len parameter is one, return an expression corresponding to
8773 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8774 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8775 {
8776 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8777 tree cst_uchar_ptr_node
8778 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8779
8780 tree ind1
8781 = fold_convert_loc (loc, integer_type_node,
8782 build1 (INDIRECT_REF, cst_uchar_node,
8783 fold_convert_loc (loc,
8784 cst_uchar_ptr_node,
8785 arg1)));
8786 tree ind2
8787 = fold_convert_loc (loc, integer_type_node,
8788 build1 (INDIRECT_REF, cst_uchar_node,
8789 fold_convert_loc (loc,
8790 cst_uchar_ptr_node,
8791 arg2)));
8792 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8793 }
8794
8795 return NULL_TREE;
8796 }
8797
8798 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8799 Return NULL_TREE if no simplification can be made. */
8800
8801 static tree
8802 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8803 {
8804 const char *p1, *p2;
8805
8806 if (!validate_arg (arg1, POINTER_TYPE)
8807 || !validate_arg (arg2, POINTER_TYPE))
8808 return NULL_TREE;
8809
8810 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8811 if (operand_equal_p (arg1, arg2, 0))
8812 return integer_zero_node;
8813
8814 p1 = c_getstr (arg1);
8815 p2 = c_getstr (arg2);
8816
8817 if (p1 && p2)
8818 {
8819 const int i = strcmp (p1, p2);
8820 if (i < 0)
8821 return integer_minus_one_node;
8822 else if (i > 0)
8823 return integer_one_node;
8824 else
8825 return integer_zero_node;
8826 }
8827
8828 /* If the second arg is "", return *(const unsigned char*)arg1. */
8829 if (p2 && *p2 == '\0')
8830 {
8831 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8832 tree cst_uchar_ptr_node
8833 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8834
8835 return fold_convert_loc (loc, integer_type_node,
8836 build1 (INDIRECT_REF, cst_uchar_node,
8837 fold_convert_loc (loc,
8838 cst_uchar_ptr_node,
8839 arg1)));
8840 }
8841
8842 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8843 if (p1 && *p1 == '\0')
8844 {
8845 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8846 tree cst_uchar_ptr_node
8847 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8848
8849 tree temp
8850 = fold_convert_loc (loc, integer_type_node,
8851 build1 (INDIRECT_REF, cst_uchar_node,
8852 fold_convert_loc (loc,
8853 cst_uchar_ptr_node,
8854 arg2)));
8855 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8856 }
8857
8858 return NULL_TREE;
8859 }
8860
8861 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8862 Return NULL_TREE if no simplification can be made. */
8863
8864 static tree
8865 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8866 {
8867 const char *p1, *p2;
8868
8869 if (!validate_arg (arg1, POINTER_TYPE)
8870 || !validate_arg (arg2, POINTER_TYPE)
8871 || !validate_arg (len, INTEGER_TYPE))
8872 return NULL_TREE;
8873
8874 /* If the LEN parameter is zero, return zero. */
8875 if (integer_zerop (len))
8876 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8877 arg1, arg2);
8878
8879 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8880 if (operand_equal_p (arg1, arg2, 0))
8881 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8882
8883 p1 = c_getstr (arg1);
8884 p2 = c_getstr (arg2);
8885
8886 if (tree_fits_uhwi_p (len) && p1 && p2)
8887 {
8888 const int i = strncmp (p1, p2, tree_to_uhwi (len));
8889 if (i > 0)
8890 return integer_one_node;
8891 else if (i < 0)
8892 return integer_minus_one_node;
8893 else
8894 return integer_zero_node;
8895 }
8896
8897 /* If the second arg is "", and the length is greater than zero,
8898 return *(const unsigned char*)arg1. */
8899 if (p2 && *p2 == '\0'
8900 && TREE_CODE (len) == INTEGER_CST
8901 && tree_int_cst_sgn (len) == 1)
8902 {
8903 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8904 tree cst_uchar_ptr_node
8905 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8906
8907 return fold_convert_loc (loc, integer_type_node,
8908 build1 (INDIRECT_REF, cst_uchar_node,
8909 fold_convert_loc (loc,
8910 cst_uchar_ptr_node,
8911 arg1)));
8912 }
8913
8914 /* If the first arg is "", and the length is greater than zero,
8915 return -*(const unsigned char*)arg2. */
8916 if (p1 && *p1 == '\0'
8917 && TREE_CODE (len) == INTEGER_CST
8918 && tree_int_cst_sgn (len) == 1)
8919 {
8920 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8921 tree cst_uchar_ptr_node
8922 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8923
8924 tree temp = fold_convert_loc (loc, integer_type_node,
8925 build1 (INDIRECT_REF, cst_uchar_node,
8926 fold_convert_loc (loc,
8927 cst_uchar_ptr_node,
8928 arg2)));
8929 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8930 }
8931
8932 /* If len parameter is one, return an expression corresponding to
8933 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8934 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8935 {
8936 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8937 tree cst_uchar_ptr_node
8938 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8939
8940 tree ind1 = fold_convert_loc (loc, integer_type_node,
8941 build1 (INDIRECT_REF, cst_uchar_node,
8942 fold_convert_loc (loc,
8943 cst_uchar_ptr_node,
8944 arg1)));
8945 tree ind2 = fold_convert_loc (loc, integer_type_node,
8946 build1 (INDIRECT_REF, cst_uchar_node,
8947 fold_convert_loc (loc,
8948 cst_uchar_ptr_node,
8949 arg2)));
8950 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8951 }
8952
8953 return NULL_TREE;
8954 }
8955
8956 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8957 ARG. Return NULL_TREE if no simplification can be made. */
8958
8959 static tree
8960 fold_builtin_signbit (location_t loc, tree arg, tree type)
8961 {
8962 if (!validate_arg (arg, REAL_TYPE))
8963 return NULL_TREE;
8964
8965 /* If ARG is a compile-time constant, determine the result. */
8966 if (TREE_CODE (arg) == REAL_CST
8967 && !TREE_OVERFLOW (arg))
8968 {
8969 REAL_VALUE_TYPE c;
8970
8971 c = TREE_REAL_CST (arg);
8972 return (REAL_VALUE_NEGATIVE (c)
8973 ? build_one_cst (type)
8974 : build_zero_cst (type));
8975 }
8976
8977 /* If ARG is non-negative, the result is always zero. */
8978 if (tree_expr_nonnegative_p (arg))
8979 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8980
8981 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8982 if (!HONOR_SIGNED_ZEROS (arg))
8983 return fold_convert (type,
8984 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
8985 build_real (TREE_TYPE (arg), dconst0)));
8986
8987 return NULL_TREE;
8988 }
8989
8990 /* Fold function call to builtin copysign, copysignf or copysignl with
8991 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8992 be made. */
8993
8994 static tree
8995 fold_builtin_copysign (location_t loc, tree fndecl,
8996 tree arg1, tree arg2, tree type)
8997 {
8998 tree tem;
8999
9000 if (!validate_arg (arg1, REAL_TYPE)
9001 || !validate_arg (arg2, REAL_TYPE))
9002 return NULL_TREE;
9003
9004 /* copysign(X,X) is X. */
9005 if (operand_equal_p (arg1, arg2, 0))
9006 return fold_convert_loc (loc, type, arg1);
9007
9008 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9009 if (TREE_CODE (arg1) == REAL_CST
9010 && TREE_CODE (arg2) == REAL_CST
9011 && !TREE_OVERFLOW (arg1)
9012 && !TREE_OVERFLOW (arg2))
9013 {
9014 REAL_VALUE_TYPE c1, c2;
9015
9016 c1 = TREE_REAL_CST (arg1);
9017 c2 = TREE_REAL_CST (arg2);
9018 /* c1.sign := c2.sign. */
9019 real_copysign (&c1, &c2);
9020 return build_real (type, c1);
9021 }
9022
9023 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9024 Remember to evaluate Y for side-effects. */
9025 if (tree_expr_nonnegative_p (arg2))
9026 return omit_one_operand_loc (loc, type,
9027 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9028 arg2);
9029
9030 /* Strip sign changing operations for the first argument. */
9031 tem = fold_strip_sign_ops (arg1);
9032 if (tem)
9033 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9034
9035 return NULL_TREE;
9036 }
9037
9038 /* Fold a call to builtin isascii with argument ARG. */
9039
9040 static tree
9041 fold_builtin_isascii (location_t loc, tree arg)
9042 {
9043 if (!validate_arg (arg, INTEGER_TYPE))
9044 return NULL_TREE;
9045 else
9046 {
9047 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9048 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9049 build_int_cst (integer_type_node,
9050 ~ (unsigned HOST_WIDE_INT) 0x7f));
9051 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9052 arg, integer_zero_node);
9053 }
9054 }
9055
9056 /* Fold a call to builtin toascii with argument ARG. */
9057
9058 static tree
9059 fold_builtin_toascii (location_t loc, tree arg)
9060 {
9061 if (!validate_arg (arg, INTEGER_TYPE))
9062 return NULL_TREE;
9063
9064 /* Transform toascii(c) -> (c & 0x7f). */
9065 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9066 build_int_cst (integer_type_node, 0x7f));
9067 }
9068
9069 /* Fold a call to builtin isdigit with argument ARG. */
9070
9071 static tree
9072 fold_builtin_isdigit (location_t loc, tree arg)
9073 {
9074 if (!validate_arg (arg, INTEGER_TYPE))
9075 return NULL_TREE;
9076 else
9077 {
9078 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9079 /* According to the C standard, isdigit is unaffected by locale.
9080 However, it definitely is affected by the target character set. */
9081 unsigned HOST_WIDE_INT target_digit0
9082 = lang_hooks.to_target_charset ('0');
9083
9084 if (target_digit0 == 0)
9085 return NULL_TREE;
9086
9087 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9088 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9089 build_int_cst (unsigned_type_node, target_digit0));
9090 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9091 build_int_cst (unsigned_type_node, 9));
9092 }
9093 }
9094
9095 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9096
9097 static tree
9098 fold_builtin_fabs (location_t loc, tree arg, tree type)
9099 {
9100 if (!validate_arg (arg, REAL_TYPE))
9101 return NULL_TREE;
9102
9103 arg = fold_convert_loc (loc, type, arg);
9104 if (TREE_CODE (arg) == REAL_CST)
9105 return fold_abs_const (arg, type);
9106 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9107 }
9108
9109 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9110
9111 static tree
9112 fold_builtin_abs (location_t loc, tree arg, tree type)
9113 {
9114 if (!validate_arg (arg, INTEGER_TYPE))
9115 return NULL_TREE;
9116
9117 arg = fold_convert_loc (loc, type, arg);
9118 if (TREE_CODE (arg) == INTEGER_CST)
9119 return fold_abs_const (arg, type);
9120 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9121 }
9122
9123 /* Fold a fma operation with arguments ARG[012]. */
9124
9125 tree
9126 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9127 tree type, tree arg0, tree arg1, tree arg2)
9128 {
9129 if (TREE_CODE (arg0) == REAL_CST
9130 && TREE_CODE (arg1) == REAL_CST
9131 && TREE_CODE (arg2) == REAL_CST)
9132 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9133
9134 return NULL_TREE;
9135 }
9136
9137 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9138
9139 static tree
9140 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9141 {
9142 if (validate_arg (arg0, REAL_TYPE)
9143 && validate_arg (arg1, REAL_TYPE)
9144 && validate_arg (arg2, REAL_TYPE))
9145 {
9146 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9147 if (tem)
9148 return tem;
9149
9150 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9151 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9152 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9153 }
9154 return NULL_TREE;
9155 }
9156
9157 /* Fold a call to builtin fmin or fmax. */
9158
9159 static tree
9160 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9161 tree type, bool max)
9162 {
9163 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9164 {
9165 /* Calculate the result when the argument is a constant. */
9166 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9167
9168 if (res)
9169 return res;
9170
9171 /* If either argument is NaN, return the other one. Avoid the
9172 transformation if we get (and honor) a signalling NaN. Using
9173 omit_one_operand() ensures we create a non-lvalue. */
9174 if (TREE_CODE (arg0) == REAL_CST
9175 && real_isnan (&TREE_REAL_CST (arg0))
9176 && (! HONOR_SNANS (arg0)
9177 || ! TREE_REAL_CST (arg0).signalling))
9178 return omit_one_operand_loc (loc, type, arg1, arg0);
9179 if (TREE_CODE (arg1) == REAL_CST
9180 && real_isnan (&TREE_REAL_CST (arg1))
9181 && (! HONOR_SNANS (arg1)
9182 || ! TREE_REAL_CST (arg1).signalling))
9183 return omit_one_operand_loc (loc, type, arg0, arg1);
9184
9185 /* Transform fmin/fmax(x,x) -> x. */
9186 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9187 return omit_one_operand_loc (loc, type, arg0, arg1);
9188
9189 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9190 functions to return the numeric arg if the other one is NaN.
9191 These tree codes don't honor that, so only transform if
9192 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9193 handled, so we don't have to worry about it either. */
9194 if (flag_finite_math_only)
9195 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9196 fold_convert_loc (loc, type, arg0),
9197 fold_convert_loc (loc, type, arg1));
9198 }
9199 return NULL_TREE;
9200 }
9201
9202 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9203
9204 static tree
9205 fold_builtin_carg (location_t loc, tree arg, tree type)
9206 {
9207 if (validate_arg (arg, COMPLEX_TYPE)
9208 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9209 {
9210 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9211
9212 if (atan2_fn)
9213 {
9214 tree new_arg = builtin_save_expr (arg);
9215 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9216 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9217 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9218 }
9219 }
9220
9221 return NULL_TREE;
9222 }
9223
9224 /* Fold a call to builtin logb/ilogb. */
9225
9226 static tree
9227 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9228 {
9229 if (! validate_arg (arg, REAL_TYPE))
9230 return NULL_TREE;
9231
9232 STRIP_NOPS (arg);
9233
9234 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9235 {
9236 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9237
9238 switch (value->cl)
9239 {
9240 case rvc_nan:
9241 case rvc_inf:
9242 /* If arg is Inf or NaN and we're logb, return it. */
9243 if (TREE_CODE (rettype) == REAL_TYPE)
9244 {
9245 /* For logb(-Inf) we have to return +Inf. */
9246 if (real_isinf (value) && real_isneg (value))
9247 {
9248 REAL_VALUE_TYPE tem;
9249 real_inf (&tem);
9250 return build_real (rettype, tem);
9251 }
9252 return fold_convert_loc (loc, rettype, arg);
9253 }
9254 /* Fall through... */
9255 case rvc_zero:
9256 /* Zero may set errno and/or raise an exception for logb, also
9257 for ilogb we don't know FP_ILOGB0. */
9258 return NULL_TREE;
9259 case rvc_normal:
9260 /* For normal numbers, proceed iff radix == 2. In GCC,
9261 normalized significands are in the range [0.5, 1.0). We
9262 want the exponent as if they were [1.0, 2.0) so get the
9263 exponent and subtract 1. */
9264 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9265 return fold_convert_loc (loc, rettype,
9266 build_int_cst (integer_type_node,
9267 REAL_EXP (value)-1));
9268 break;
9269 }
9270 }
9271
9272 return NULL_TREE;
9273 }
9274
9275 /* Fold a call to builtin significand, if radix == 2. */
9276
9277 static tree
9278 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9279 {
9280 if (! validate_arg (arg, REAL_TYPE))
9281 return NULL_TREE;
9282
9283 STRIP_NOPS (arg);
9284
9285 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9286 {
9287 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9288
9289 switch (value->cl)
9290 {
9291 case rvc_zero:
9292 case rvc_nan:
9293 case rvc_inf:
9294 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9295 return fold_convert_loc (loc, rettype, arg);
9296 case rvc_normal:
9297 /* For normal numbers, proceed iff radix == 2. */
9298 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9299 {
9300 REAL_VALUE_TYPE result = *value;
9301 /* In GCC, normalized significands are in the range [0.5,
9302 1.0). We want them to be [1.0, 2.0) so set the
9303 exponent to 1. */
9304 SET_REAL_EXP (&result, 1);
9305 return build_real (rettype, result);
9306 }
9307 break;
9308 }
9309 }
9310
9311 return NULL_TREE;
9312 }
9313
9314 /* Fold a call to builtin frexp, we can assume the base is 2. */
9315
9316 static tree
9317 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9318 {
9319 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9320 return NULL_TREE;
9321
9322 STRIP_NOPS (arg0);
9323
9324 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9325 return NULL_TREE;
9326
9327 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9328
9329 /* Proceed if a valid pointer type was passed in. */
9330 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9331 {
9332 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9333 tree frac, exp;
9334
9335 switch (value->cl)
9336 {
9337 case rvc_zero:
9338 /* For +-0, return (*exp = 0, +-0). */
9339 exp = integer_zero_node;
9340 frac = arg0;
9341 break;
9342 case rvc_nan:
9343 case rvc_inf:
9344 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9345 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9346 case rvc_normal:
9347 {
9348 /* Since the frexp function always expects base 2, and in
9349 GCC normalized significands are already in the range
9350 [0.5, 1.0), we have exactly what frexp wants. */
9351 REAL_VALUE_TYPE frac_rvt = *value;
9352 SET_REAL_EXP (&frac_rvt, 0);
9353 frac = build_real (rettype, frac_rvt);
9354 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9355 }
9356 break;
9357 default:
9358 gcc_unreachable ();
9359 }
9360
9361 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9362 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9363 TREE_SIDE_EFFECTS (arg1) = 1;
9364 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9365 }
9366
9367 return NULL_TREE;
9368 }
9369
9370 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9371 then we can assume the base is two. If it's false, then we have to
9372 check the mode of the TYPE parameter in certain cases. */
9373
9374 static tree
9375 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9376 tree type, bool ldexp)
9377 {
9378 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9379 {
9380 STRIP_NOPS (arg0);
9381 STRIP_NOPS (arg1);
9382
9383 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9384 if (real_zerop (arg0) || integer_zerop (arg1)
9385 || (TREE_CODE (arg0) == REAL_CST
9386 && !real_isfinite (&TREE_REAL_CST (arg0))))
9387 return omit_one_operand_loc (loc, type, arg0, arg1);
9388
9389 /* If both arguments are constant, then try to evaluate it. */
9390 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9391 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9392 && tree_fits_shwi_p (arg1))
9393 {
9394 /* Bound the maximum adjustment to twice the range of the
9395 mode's valid exponents. Use abs to ensure the range is
9396 positive as a sanity check. */
9397 const long max_exp_adj = 2 *
9398 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9399 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9400
9401 /* Get the user-requested adjustment. */
9402 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9403
9404 /* The requested adjustment must be inside this range. This
9405 is a preliminary cap to avoid things like overflow, we
9406 may still fail to compute the result for other reasons. */
9407 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9408 {
9409 REAL_VALUE_TYPE initial_result;
9410
9411 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9412
9413 /* Ensure we didn't overflow. */
9414 if (! real_isinf (&initial_result))
9415 {
9416 const REAL_VALUE_TYPE trunc_result
9417 = real_value_truncate (TYPE_MODE (type), initial_result);
9418
9419 /* Only proceed if the target mode can hold the
9420 resulting value. */
9421 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9422 return build_real (type, trunc_result);
9423 }
9424 }
9425 }
9426 }
9427
9428 return NULL_TREE;
9429 }
9430
9431 /* Fold a call to builtin modf. */
9432
9433 static tree
9434 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9435 {
9436 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9437 return NULL_TREE;
9438
9439 STRIP_NOPS (arg0);
9440
9441 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9442 return NULL_TREE;
9443
9444 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9445
9446 /* Proceed if a valid pointer type was passed in. */
9447 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9448 {
9449 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9450 REAL_VALUE_TYPE trunc, frac;
9451
9452 switch (value->cl)
9453 {
9454 case rvc_nan:
9455 case rvc_zero:
9456 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9457 trunc = frac = *value;
9458 break;
9459 case rvc_inf:
9460 /* For +-Inf, return (*arg1 = arg0, +-0). */
9461 frac = dconst0;
9462 frac.sign = value->sign;
9463 trunc = *value;
9464 break;
9465 case rvc_normal:
9466 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9467 real_trunc (&trunc, VOIDmode, value);
9468 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9469 /* If the original number was negative and already
9470 integral, then the fractional part is -0.0. */
9471 if (value->sign && frac.cl == rvc_zero)
9472 frac.sign = value->sign;
9473 break;
9474 }
9475
9476 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9477 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9478 build_real (rettype, trunc));
9479 TREE_SIDE_EFFECTS (arg1) = 1;
9480 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9481 build_real (rettype, frac));
9482 }
9483
9484 return NULL_TREE;
9485 }
9486
9487 /* Given a location LOC, an interclass builtin function decl FNDECL
9488 and its single argument ARG, return an folded expression computing
9489 the same, or NULL_TREE if we either couldn't or didn't want to fold
9490 (the latter happen if there's an RTL instruction available). */
9491
9492 static tree
9493 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9494 {
9495 machine_mode mode;
9496
9497 if (!validate_arg (arg, REAL_TYPE))
9498 return NULL_TREE;
9499
9500 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9501 return NULL_TREE;
9502
9503 mode = TYPE_MODE (TREE_TYPE (arg));
9504
9505 /* If there is no optab, try generic code. */
9506 switch (DECL_FUNCTION_CODE (fndecl))
9507 {
9508 tree result;
9509
9510 CASE_FLT_FN (BUILT_IN_ISINF):
9511 {
9512 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9513 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9514 tree const type = TREE_TYPE (arg);
9515 REAL_VALUE_TYPE r;
9516 char buf[128];
9517
9518 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9519 real_from_string (&r, buf);
9520 result = build_call_expr (isgr_fn, 2,
9521 fold_build1_loc (loc, ABS_EXPR, type, arg),
9522 build_real (type, r));
9523 return result;
9524 }
9525 CASE_FLT_FN (BUILT_IN_FINITE):
9526 case BUILT_IN_ISFINITE:
9527 {
9528 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9529 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9530 tree const type = TREE_TYPE (arg);
9531 REAL_VALUE_TYPE r;
9532 char buf[128];
9533
9534 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9535 real_from_string (&r, buf);
9536 result = build_call_expr (isle_fn, 2,
9537 fold_build1_loc (loc, ABS_EXPR, type, arg),
9538 build_real (type, r));
9539 /*result = fold_build2_loc (loc, UNGT_EXPR,
9540 TREE_TYPE (TREE_TYPE (fndecl)),
9541 fold_build1_loc (loc, ABS_EXPR, type, arg),
9542 build_real (type, r));
9543 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9544 TREE_TYPE (TREE_TYPE (fndecl)),
9545 result);*/
9546 return result;
9547 }
9548 case BUILT_IN_ISNORMAL:
9549 {
9550 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9551 islessequal(fabs(x),DBL_MAX). */
9552 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9553 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9554 tree const type = TREE_TYPE (arg);
9555 REAL_VALUE_TYPE rmax, rmin;
9556 char buf[128];
9557
9558 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9559 real_from_string (&rmax, buf);
9560 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9561 real_from_string (&rmin, buf);
9562 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9563 result = build_call_expr (isle_fn, 2, arg,
9564 build_real (type, rmax));
9565 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9566 build_call_expr (isge_fn, 2, arg,
9567 build_real (type, rmin)));
9568 return result;
9569 }
9570 default:
9571 break;
9572 }
9573
9574 return NULL_TREE;
9575 }
9576
9577 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9578 ARG is the argument for the call. */
9579
9580 static tree
9581 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9582 {
9583 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9584 REAL_VALUE_TYPE r;
9585
9586 if (!validate_arg (arg, REAL_TYPE))
9587 return NULL_TREE;
9588
9589 switch (builtin_index)
9590 {
9591 case BUILT_IN_ISINF:
9592 if (!HONOR_INFINITIES (arg))
9593 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9594
9595 if (TREE_CODE (arg) == REAL_CST)
9596 {
9597 r = TREE_REAL_CST (arg);
9598 if (real_isinf (&r))
9599 return real_compare (GT_EXPR, &r, &dconst0)
9600 ? integer_one_node : integer_minus_one_node;
9601 else
9602 return integer_zero_node;
9603 }
9604
9605 return NULL_TREE;
9606
9607 case BUILT_IN_ISINF_SIGN:
9608 {
9609 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9610 /* In a boolean context, GCC will fold the inner COND_EXPR to
9611 1. So e.g. "if (isinf_sign(x))" would be folded to just
9612 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9613 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9614 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9615 tree tmp = NULL_TREE;
9616
9617 arg = builtin_save_expr (arg);
9618
9619 if (signbit_fn && isinf_fn)
9620 {
9621 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9622 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9623
9624 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9625 signbit_call, integer_zero_node);
9626 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9627 isinf_call, integer_zero_node);
9628
9629 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9630 integer_minus_one_node, integer_one_node);
9631 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9632 isinf_call, tmp,
9633 integer_zero_node);
9634 }
9635
9636 return tmp;
9637 }
9638
9639 case BUILT_IN_ISFINITE:
9640 if (!HONOR_NANS (arg)
9641 && !HONOR_INFINITIES (arg))
9642 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9643
9644 if (TREE_CODE (arg) == REAL_CST)
9645 {
9646 r = TREE_REAL_CST (arg);
9647 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9648 }
9649
9650 return NULL_TREE;
9651
9652 case BUILT_IN_ISNAN:
9653 if (!HONOR_NANS (arg))
9654 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9655
9656 if (TREE_CODE (arg) == REAL_CST)
9657 {
9658 r = TREE_REAL_CST (arg);
9659 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9660 }
9661
9662 arg = builtin_save_expr (arg);
9663 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9664
9665 default:
9666 gcc_unreachable ();
9667 }
9668 }
9669
9670 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9671 This builtin will generate code to return the appropriate floating
9672 point classification depending on the value of the floating point
9673 number passed in. The possible return values must be supplied as
9674 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9675 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9676 one floating point argument which is "type generic". */
9677
9678 static tree
9679 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9680 {
9681 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9682 arg, type, res, tmp;
9683 machine_mode mode;
9684 REAL_VALUE_TYPE r;
9685 char buf[128];
9686
9687 /* Verify the required arguments in the original call. */
9688 if (nargs != 6
9689 || !validate_arg (args[0], INTEGER_TYPE)
9690 || !validate_arg (args[1], INTEGER_TYPE)
9691 || !validate_arg (args[2], INTEGER_TYPE)
9692 || !validate_arg (args[3], INTEGER_TYPE)
9693 || !validate_arg (args[4], INTEGER_TYPE)
9694 || !validate_arg (args[5], REAL_TYPE))
9695 return NULL_TREE;
9696
9697 fp_nan = args[0];
9698 fp_infinite = args[1];
9699 fp_normal = args[2];
9700 fp_subnormal = args[3];
9701 fp_zero = args[4];
9702 arg = args[5];
9703 type = TREE_TYPE (arg);
9704 mode = TYPE_MODE (type);
9705 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9706
9707 /* fpclassify(x) ->
9708 isnan(x) ? FP_NAN :
9709 (fabs(x) == Inf ? FP_INFINITE :
9710 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9711 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9712
9713 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9714 build_real (type, dconst0));
9715 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9716 tmp, fp_zero, fp_subnormal);
9717
9718 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9719 real_from_string (&r, buf);
9720 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9721 arg, build_real (type, r));
9722 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9723
9724 if (HONOR_INFINITIES (mode))
9725 {
9726 real_inf (&r);
9727 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9728 build_real (type, r));
9729 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9730 fp_infinite, res);
9731 }
9732
9733 if (HONOR_NANS (mode))
9734 {
9735 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9736 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9737 }
9738
9739 return res;
9740 }
9741
9742 /* Fold a call to an unordered comparison function such as
9743 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9744 being called and ARG0 and ARG1 are the arguments for the call.
9745 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9746 the opposite of the desired result. UNORDERED_CODE is used
9747 for modes that can hold NaNs and ORDERED_CODE is used for
9748 the rest. */
9749
9750 static tree
9751 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9752 enum tree_code unordered_code,
9753 enum tree_code ordered_code)
9754 {
9755 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9756 enum tree_code code;
9757 tree type0, type1;
9758 enum tree_code code0, code1;
9759 tree cmp_type = NULL_TREE;
9760
9761 type0 = TREE_TYPE (arg0);
9762 type1 = TREE_TYPE (arg1);
9763
9764 code0 = TREE_CODE (type0);
9765 code1 = TREE_CODE (type1);
9766
9767 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9768 /* Choose the wider of two real types. */
9769 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9770 ? type0 : type1;
9771 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9772 cmp_type = type0;
9773 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9774 cmp_type = type1;
9775
9776 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9777 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9778
9779 if (unordered_code == UNORDERED_EXPR)
9780 {
9781 if (!HONOR_NANS (arg0))
9782 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9783 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9784 }
9785
9786 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9787 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9788 fold_build2_loc (loc, code, type, arg0, arg1));
9789 }
9790
9791 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9792 arithmetics if it can never overflow, or into internal functions that
9793 return both result of arithmetics and overflowed boolean flag in
9794 a complex integer result, or some other check for overflow. */
9795
9796 static tree
9797 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9798 tree arg0, tree arg1, tree arg2)
9799 {
9800 enum internal_fn ifn = IFN_LAST;
9801 tree type = TREE_TYPE (TREE_TYPE (arg2));
9802 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9803 switch (fcode)
9804 {
9805 case BUILT_IN_ADD_OVERFLOW:
9806 case BUILT_IN_SADD_OVERFLOW:
9807 case BUILT_IN_SADDL_OVERFLOW:
9808 case BUILT_IN_SADDLL_OVERFLOW:
9809 case BUILT_IN_UADD_OVERFLOW:
9810 case BUILT_IN_UADDL_OVERFLOW:
9811 case BUILT_IN_UADDLL_OVERFLOW:
9812 ifn = IFN_ADD_OVERFLOW;
9813 break;
9814 case BUILT_IN_SUB_OVERFLOW:
9815 case BUILT_IN_SSUB_OVERFLOW:
9816 case BUILT_IN_SSUBL_OVERFLOW:
9817 case BUILT_IN_SSUBLL_OVERFLOW:
9818 case BUILT_IN_USUB_OVERFLOW:
9819 case BUILT_IN_USUBL_OVERFLOW:
9820 case BUILT_IN_USUBLL_OVERFLOW:
9821 ifn = IFN_SUB_OVERFLOW;
9822 break;
9823 case BUILT_IN_MUL_OVERFLOW:
9824 case BUILT_IN_SMUL_OVERFLOW:
9825 case BUILT_IN_SMULL_OVERFLOW:
9826 case BUILT_IN_SMULLL_OVERFLOW:
9827 case BUILT_IN_UMUL_OVERFLOW:
9828 case BUILT_IN_UMULL_OVERFLOW:
9829 case BUILT_IN_UMULLL_OVERFLOW:
9830 ifn = IFN_MUL_OVERFLOW;
9831 break;
9832 default:
9833 gcc_unreachable ();
9834 }
9835 tree ctype = build_complex_type (type);
9836 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9837 2, arg0, arg1);
9838 tree tgt = save_expr (call);
9839 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9840 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9841 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9842 tree store
9843 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9844 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9845 }
9846
9847 /* Fold a call to built-in function FNDECL with 0 arguments.
9848 This function returns NULL_TREE if no simplification was possible. */
9849
9850 static tree
9851 fold_builtin_0 (location_t loc, tree fndecl)
9852 {
9853 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9854 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9855 switch (fcode)
9856 {
9857 CASE_FLT_FN (BUILT_IN_INF):
9858 case BUILT_IN_INFD32:
9859 case BUILT_IN_INFD64:
9860 case BUILT_IN_INFD128:
9861 return fold_builtin_inf (loc, type, true);
9862
9863 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9864 return fold_builtin_inf (loc, type, false);
9865
9866 case BUILT_IN_CLASSIFY_TYPE:
9867 return fold_builtin_classify_type (NULL_TREE);
9868
9869 default:
9870 break;
9871 }
9872 return NULL_TREE;
9873 }
9874
9875 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9876 This function returns NULL_TREE if no simplification was possible. */
9877
9878 static tree
9879 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9880 {
9881 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9882 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9883 switch (fcode)
9884 {
9885 case BUILT_IN_CONSTANT_P:
9886 {
9887 tree val = fold_builtin_constant_p (arg0);
9888
9889 /* Gimplification will pull the CALL_EXPR for the builtin out of
9890 an if condition. When not optimizing, we'll not CSE it back.
9891 To avoid link error types of regressions, return false now. */
9892 if (!val && !optimize)
9893 val = integer_zero_node;
9894
9895 return val;
9896 }
9897
9898 case BUILT_IN_CLASSIFY_TYPE:
9899 return fold_builtin_classify_type (arg0);
9900
9901 case BUILT_IN_STRLEN:
9902 return fold_builtin_strlen (loc, type, arg0);
9903
9904 CASE_FLT_FN (BUILT_IN_FABS):
9905 case BUILT_IN_FABSD32:
9906 case BUILT_IN_FABSD64:
9907 case BUILT_IN_FABSD128:
9908 return fold_builtin_fabs (loc, arg0, type);
9909
9910 case BUILT_IN_ABS:
9911 case BUILT_IN_LABS:
9912 case BUILT_IN_LLABS:
9913 case BUILT_IN_IMAXABS:
9914 return fold_builtin_abs (loc, arg0, type);
9915
9916 CASE_FLT_FN (BUILT_IN_CONJ):
9917 if (validate_arg (arg0, COMPLEX_TYPE)
9918 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9919 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9920 break;
9921
9922 CASE_FLT_FN (BUILT_IN_CREAL):
9923 if (validate_arg (arg0, COMPLEX_TYPE)
9924 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9925 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9926 break;
9927
9928 CASE_FLT_FN (BUILT_IN_CIMAG):
9929 if (validate_arg (arg0, COMPLEX_TYPE)
9930 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9931 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9932 break;
9933
9934 CASE_FLT_FN (BUILT_IN_CCOS):
9935 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
9936
9937 CASE_FLT_FN (BUILT_IN_CCOSH):
9938 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
9939
9940 CASE_FLT_FN (BUILT_IN_CPROJ):
9941 return fold_builtin_cproj (loc, arg0, type);
9942
9943 CASE_FLT_FN (BUILT_IN_CSIN):
9944 if (validate_arg (arg0, COMPLEX_TYPE)
9945 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9946 return do_mpc_arg1 (arg0, type, mpc_sin);
9947 break;
9948
9949 CASE_FLT_FN (BUILT_IN_CSINH):
9950 if (validate_arg (arg0, COMPLEX_TYPE)
9951 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9952 return do_mpc_arg1 (arg0, type, mpc_sinh);
9953 break;
9954
9955 CASE_FLT_FN (BUILT_IN_CTAN):
9956 if (validate_arg (arg0, COMPLEX_TYPE)
9957 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9958 return do_mpc_arg1 (arg0, type, mpc_tan);
9959 break;
9960
9961 CASE_FLT_FN (BUILT_IN_CTANH):
9962 if (validate_arg (arg0, COMPLEX_TYPE)
9963 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9964 return do_mpc_arg1 (arg0, type, mpc_tanh);
9965 break;
9966
9967 CASE_FLT_FN (BUILT_IN_CLOG):
9968 if (validate_arg (arg0, COMPLEX_TYPE)
9969 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9970 return do_mpc_arg1 (arg0, type, mpc_log);
9971 break;
9972
9973 CASE_FLT_FN (BUILT_IN_CSQRT):
9974 if (validate_arg (arg0, COMPLEX_TYPE)
9975 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9976 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9977 break;
9978
9979 CASE_FLT_FN (BUILT_IN_CASIN):
9980 if (validate_arg (arg0, COMPLEX_TYPE)
9981 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9982 return do_mpc_arg1 (arg0, type, mpc_asin);
9983 break;
9984
9985 CASE_FLT_FN (BUILT_IN_CACOS):
9986 if (validate_arg (arg0, COMPLEX_TYPE)
9987 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9988 return do_mpc_arg1 (arg0, type, mpc_acos);
9989 break;
9990
9991 CASE_FLT_FN (BUILT_IN_CATAN):
9992 if (validate_arg (arg0, COMPLEX_TYPE)
9993 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9994 return do_mpc_arg1 (arg0, type, mpc_atan);
9995 break;
9996
9997 CASE_FLT_FN (BUILT_IN_CASINH):
9998 if (validate_arg (arg0, COMPLEX_TYPE)
9999 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10000 return do_mpc_arg1 (arg0, type, mpc_asinh);
10001 break;
10002
10003 CASE_FLT_FN (BUILT_IN_CACOSH):
10004 if (validate_arg (arg0, COMPLEX_TYPE)
10005 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10006 return do_mpc_arg1 (arg0, type, mpc_acosh);
10007 break;
10008
10009 CASE_FLT_FN (BUILT_IN_CATANH):
10010 if (validate_arg (arg0, COMPLEX_TYPE)
10011 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10012 return do_mpc_arg1 (arg0, type, mpc_atanh);
10013 break;
10014
10015 CASE_FLT_FN (BUILT_IN_CABS):
10016 return fold_builtin_cabs (loc, arg0, type, fndecl);
10017
10018 CASE_FLT_FN (BUILT_IN_CARG):
10019 return fold_builtin_carg (loc, arg0, type);
10020
10021 CASE_FLT_FN (BUILT_IN_SQRT):
10022 return fold_builtin_sqrt (loc, arg0, type);
10023
10024 CASE_FLT_FN (BUILT_IN_CBRT):
10025 return fold_builtin_cbrt (loc, arg0, type);
10026
10027 CASE_FLT_FN (BUILT_IN_ASIN):
10028 if (validate_arg (arg0, REAL_TYPE))
10029 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10030 &dconstm1, &dconst1, true);
10031 break;
10032
10033 CASE_FLT_FN (BUILT_IN_ACOS):
10034 if (validate_arg (arg0, REAL_TYPE))
10035 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10036 &dconstm1, &dconst1, true);
10037 break;
10038
10039 CASE_FLT_FN (BUILT_IN_ATAN):
10040 if (validate_arg (arg0, REAL_TYPE))
10041 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10042 break;
10043
10044 CASE_FLT_FN (BUILT_IN_ASINH):
10045 if (validate_arg (arg0, REAL_TYPE))
10046 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10047 break;
10048
10049 CASE_FLT_FN (BUILT_IN_ACOSH):
10050 if (validate_arg (arg0, REAL_TYPE))
10051 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10052 &dconst1, NULL, true);
10053 break;
10054
10055 CASE_FLT_FN (BUILT_IN_ATANH):
10056 if (validate_arg (arg0, REAL_TYPE))
10057 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10058 &dconstm1, &dconst1, false);
10059 break;
10060
10061 CASE_FLT_FN (BUILT_IN_SIN):
10062 if (validate_arg (arg0, REAL_TYPE))
10063 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10064 break;
10065
10066 CASE_FLT_FN (BUILT_IN_COS):
10067 return fold_builtin_cos (loc, arg0, type, fndecl);
10068
10069 CASE_FLT_FN (BUILT_IN_TAN):
10070 return fold_builtin_tan (arg0, type);
10071
10072 CASE_FLT_FN (BUILT_IN_CEXP):
10073 return fold_builtin_cexp (loc, arg0, type);
10074
10075 CASE_FLT_FN (BUILT_IN_CEXPI):
10076 if (validate_arg (arg0, REAL_TYPE))
10077 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10078 break;
10079
10080 CASE_FLT_FN (BUILT_IN_SINH):
10081 if (validate_arg (arg0, REAL_TYPE))
10082 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10083 break;
10084
10085 CASE_FLT_FN (BUILT_IN_COSH):
10086 return fold_builtin_cosh (loc, arg0, type, fndecl);
10087
10088 CASE_FLT_FN (BUILT_IN_TANH):
10089 if (validate_arg (arg0, REAL_TYPE))
10090 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10091 break;
10092
10093 CASE_FLT_FN (BUILT_IN_ERF):
10094 if (validate_arg (arg0, REAL_TYPE))
10095 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10096 break;
10097
10098 CASE_FLT_FN (BUILT_IN_ERFC):
10099 if (validate_arg (arg0, REAL_TYPE))
10100 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10101 break;
10102
10103 CASE_FLT_FN (BUILT_IN_TGAMMA):
10104 if (validate_arg (arg0, REAL_TYPE))
10105 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10106 break;
10107
10108 CASE_FLT_FN (BUILT_IN_EXP):
10109 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10110
10111 CASE_FLT_FN (BUILT_IN_EXP2):
10112 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10113
10114 CASE_FLT_FN (BUILT_IN_EXP10):
10115 CASE_FLT_FN (BUILT_IN_POW10):
10116 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10117
10118 CASE_FLT_FN (BUILT_IN_EXPM1):
10119 if (validate_arg (arg0, REAL_TYPE))
10120 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10121 break;
10122
10123 CASE_FLT_FN (BUILT_IN_LOG):
10124 if (validate_arg (arg0, REAL_TYPE))
10125 return do_mpfr_arg1 (arg0, type, mpfr_log, &dconst0, NULL, false);
10126 break;
10127
10128 CASE_FLT_FN (BUILT_IN_LOG2):
10129 if (validate_arg (arg0, REAL_TYPE))
10130 return do_mpfr_arg1 (arg0, type, mpfr_log2, &dconst0, NULL, false);
10131 break;
10132
10133 CASE_FLT_FN (BUILT_IN_LOG10):
10134 if (validate_arg (arg0, REAL_TYPE))
10135 return do_mpfr_arg1 (arg0, type, mpfr_log10, &dconst0, NULL, false);
10136 break;
10137
10138 CASE_FLT_FN (BUILT_IN_LOG1P):
10139 if (validate_arg (arg0, REAL_TYPE))
10140 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10141 &dconstm1, NULL, false);
10142 break;
10143
10144 CASE_FLT_FN (BUILT_IN_J0):
10145 if (validate_arg (arg0, REAL_TYPE))
10146 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10147 NULL, NULL, 0);
10148 break;
10149
10150 CASE_FLT_FN (BUILT_IN_J1):
10151 if (validate_arg (arg0, REAL_TYPE))
10152 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10153 NULL, NULL, 0);
10154 break;
10155
10156 CASE_FLT_FN (BUILT_IN_Y0):
10157 if (validate_arg (arg0, REAL_TYPE))
10158 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10159 &dconst0, NULL, false);
10160 break;
10161
10162 CASE_FLT_FN (BUILT_IN_Y1):
10163 if (validate_arg (arg0, REAL_TYPE))
10164 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10165 &dconst0, NULL, false);
10166 break;
10167
10168 CASE_FLT_FN (BUILT_IN_NAN):
10169 case BUILT_IN_NAND32:
10170 case BUILT_IN_NAND64:
10171 case BUILT_IN_NAND128:
10172 return fold_builtin_nan (arg0, type, true);
10173
10174 CASE_FLT_FN (BUILT_IN_NANS):
10175 return fold_builtin_nan (arg0, type, false);
10176
10177 CASE_FLT_FN (BUILT_IN_FLOOR):
10178 return fold_builtin_floor (loc, fndecl, arg0);
10179
10180 CASE_FLT_FN (BUILT_IN_CEIL):
10181 return fold_builtin_ceil (loc, fndecl, arg0);
10182
10183 CASE_FLT_FN (BUILT_IN_TRUNC):
10184 return fold_builtin_trunc (loc, fndecl, arg0);
10185
10186 CASE_FLT_FN (BUILT_IN_ROUND):
10187 return fold_builtin_round (loc, fndecl, arg0);
10188
10189 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10190 CASE_FLT_FN (BUILT_IN_RINT):
10191 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10192
10193 CASE_FLT_FN (BUILT_IN_ICEIL):
10194 CASE_FLT_FN (BUILT_IN_LCEIL):
10195 CASE_FLT_FN (BUILT_IN_LLCEIL):
10196 CASE_FLT_FN (BUILT_IN_LFLOOR):
10197 CASE_FLT_FN (BUILT_IN_IFLOOR):
10198 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10199 CASE_FLT_FN (BUILT_IN_IROUND):
10200 CASE_FLT_FN (BUILT_IN_LROUND):
10201 CASE_FLT_FN (BUILT_IN_LLROUND):
10202 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10203
10204 CASE_FLT_FN (BUILT_IN_IRINT):
10205 CASE_FLT_FN (BUILT_IN_LRINT):
10206 CASE_FLT_FN (BUILT_IN_LLRINT):
10207 return fold_fixed_mathfn (loc, fndecl, arg0);
10208
10209 case BUILT_IN_BSWAP16:
10210 case BUILT_IN_BSWAP32:
10211 case BUILT_IN_BSWAP64:
10212 return fold_builtin_bswap (fndecl, arg0);
10213
10214 CASE_INT_FN (BUILT_IN_FFS):
10215 CASE_INT_FN (BUILT_IN_CLZ):
10216 CASE_INT_FN (BUILT_IN_CTZ):
10217 CASE_INT_FN (BUILT_IN_CLRSB):
10218 CASE_INT_FN (BUILT_IN_POPCOUNT):
10219 CASE_INT_FN (BUILT_IN_PARITY):
10220 return fold_builtin_bitop (fndecl, arg0);
10221
10222 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10223 return fold_builtin_signbit (loc, arg0, type);
10224
10225 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10226 return fold_builtin_significand (loc, arg0, type);
10227
10228 CASE_FLT_FN (BUILT_IN_ILOGB):
10229 CASE_FLT_FN (BUILT_IN_LOGB):
10230 return fold_builtin_logb (loc, arg0, type);
10231
10232 case BUILT_IN_ISASCII:
10233 return fold_builtin_isascii (loc, arg0);
10234
10235 case BUILT_IN_TOASCII:
10236 return fold_builtin_toascii (loc, arg0);
10237
10238 case BUILT_IN_ISDIGIT:
10239 return fold_builtin_isdigit (loc, arg0);
10240
10241 CASE_FLT_FN (BUILT_IN_FINITE):
10242 case BUILT_IN_FINITED32:
10243 case BUILT_IN_FINITED64:
10244 case BUILT_IN_FINITED128:
10245 case BUILT_IN_ISFINITE:
10246 {
10247 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10248 if (ret)
10249 return ret;
10250 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10251 }
10252
10253 CASE_FLT_FN (BUILT_IN_ISINF):
10254 case BUILT_IN_ISINFD32:
10255 case BUILT_IN_ISINFD64:
10256 case BUILT_IN_ISINFD128:
10257 {
10258 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10259 if (ret)
10260 return ret;
10261 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10262 }
10263
10264 case BUILT_IN_ISNORMAL:
10265 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10266
10267 case BUILT_IN_ISINF_SIGN:
10268 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10269
10270 CASE_FLT_FN (BUILT_IN_ISNAN):
10271 case BUILT_IN_ISNAND32:
10272 case BUILT_IN_ISNAND64:
10273 case BUILT_IN_ISNAND128:
10274 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10275
10276 case BUILT_IN_FREE:
10277 if (integer_zerop (arg0))
10278 return build_empty_stmt (loc);
10279 break;
10280
10281 default:
10282 break;
10283 }
10284
10285 return NULL_TREE;
10286
10287 }
10288
10289 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10290 This function returns NULL_TREE if no simplification was possible. */
10291
10292 static tree
10293 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
10294 {
10295 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10296 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10297
10298 switch (fcode)
10299 {
10300 CASE_FLT_FN (BUILT_IN_JN):
10301 if (validate_arg (arg0, INTEGER_TYPE)
10302 && validate_arg (arg1, REAL_TYPE))
10303 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10304 break;
10305
10306 CASE_FLT_FN (BUILT_IN_YN):
10307 if (validate_arg (arg0, INTEGER_TYPE)
10308 && validate_arg (arg1, REAL_TYPE))
10309 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10310 &dconst0, false);
10311 break;
10312
10313 CASE_FLT_FN (BUILT_IN_DREM):
10314 CASE_FLT_FN (BUILT_IN_REMAINDER):
10315 if (validate_arg (arg0, REAL_TYPE)
10316 && validate_arg (arg1, REAL_TYPE))
10317 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10318 break;
10319
10320 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10321 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10322 if (validate_arg (arg0, REAL_TYPE)
10323 && validate_arg (arg1, POINTER_TYPE))
10324 return do_mpfr_lgamma_r (arg0, arg1, type);
10325 break;
10326
10327 CASE_FLT_FN (BUILT_IN_ATAN2):
10328 if (validate_arg (arg0, REAL_TYPE)
10329 && validate_arg (arg1, REAL_TYPE))
10330 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10331 break;
10332
10333 CASE_FLT_FN (BUILT_IN_FDIM):
10334 if (validate_arg (arg0, REAL_TYPE)
10335 && validate_arg (arg1, REAL_TYPE))
10336 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10337 break;
10338
10339 CASE_FLT_FN (BUILT_IN_HYPOT):
10340 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10341
10342 CASE_FLT_FN (BUILT_IN_CPOW):
10343 if (validate_arg (arg0, COMPLEX_TYPE)
10344 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10345 && validate_arg (arg1, COMPLEX_TYPE)
10346 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10347 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10348 break;
10349
10350 CASE_FLT_FN (BUILT_IN_LDEXP):
10351 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10352 CASE_FLT_FN (BUILT_IN_SCALBN):
10353 CASE_FLT_FN (BUILT_IN_SCALBLN):
10354 return fold_builtin_load_exponent (loc, arg0, arg1,
10355 type, /*ldexp=*/false);
10356
10357 CASE_FLT_FN (BUILT_IN_FREXP):
10358 return fold_builtin_frexp (loc, arg0, arg1, type);
10359
10360 CASE_FLT_FN (BUILT_IN_MODF):
10361 return fold_builtin_modf (loc, arg0, arg1, type);
10362
10363 case BUILT_IN_STRSTR:
10364 return fold_builtin_strstr (loc, arg0, arg1, type);
10365
10366 case BUILT_IN_STRSPN:
10367 return fold_builtin_strspn (loc, arg0, arg1);
10368
10369 case BUILT_IN_STRCSPN:
10370 return fold_builtin_strcspn (loc, arg0, arg1);
10371
10372 case BUILT_IN_STRCHR:
10373 case BUILT_IN_INDEX:
10374 return fold_builtin_strchr (loc, arg0, arg1, type);
10375
10376 case BUILT_IN_STRRCHR:
10377 case BUILT_IN_RINDEX:
10378 return fold_builtin_strrchr (loc, arg0, arg1, type);
10379
10380 case BUILT_IN_STRCMP:
10381 return fold_builtin_strcmp (loc, arg0, arg1);
10382
10383 case BUILT_IN_STRPBRK:
10384 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10385
10386 case BUILT_IN_EXPECT:
10387 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10388
10389 CASE_FLT_FN (BUILT_IN_POW):
10390 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10391
10392 CASE_FLT_FN (BUILT_IN_POWI):
10393 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10394
10395 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10396 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10397
10398 CASE_FLT_FN (BUILT_IN_FMIN):
10399 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10400
10401 CASE_FLT_FN (BUILT_IN_FMAX):
10402 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10403
10404 case BUILT_IN_ISGREATER:
10405 return fold_builtin_unordered_cmp (loc, fndecl,
10406 arg0, arg1, UNLE_EXPR, LE_EXPR);
10407 case BUILT_IN_ISGREATEREQUAL:
10408 return fold_builtin_unordered_cmp (loc, fndecl,
10409 arg0, arg1, UNLT_EXPR, LT_EXPR);
10410 case BUILT_IN_ISLESS:
10411 return fold_builtin_unordered_cmp (loc, fndecl,
10412 arg0, arg1, UNGE_EXPR, GE_EXPR);
10413 case BUILT_IN_ISLESSEQUAL:
10414 return fold_builtin_unordered_cmp (loc, fndecl,
10415 arg0, arg1, UNGT_EXPR, GT_EXPR);
10416 case BUILT_IN_ISLESSGREATER:
10417 return fold_builtin_unordered_cmp (loc, fndecl,
10418 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10419 case BUILT_IN_ISUNORDERED:
10420 return fold_builtin_unordered_cmp (loc, fndecl,
10421 arg0, arg1, UNORDERED_EXPR,
10422 NOP_EXPR);
10423
10424 /* We do the folding for va_start in the expander. */
10425 case BUILT_IN_VA_START:
10426 break;
10427
10428 case BUILT_IN_OBJECT_SIZE:
10429 return fold_builtin_object_size (arg0, arg1);
10430
10431 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10432 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10433
10434 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10435 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10436
10437 default:
10438 break;
10439 }
10440 return NULL_TREE;
10441 }
10442
10443 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10444 and ARG2.
10445 This function returns NULL_TREE if no simplification was possible. */
10446
10447 static tree
10448 fold_builtin_3 (location_t loc, tree fndecl,
10449 tree arg0, tree arg1, tree arg2)
10450 {
10451 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10452 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10453 switch (fcode)
10454 {
10455
10456 CASE_FLT_FN (BUILT_IN_SINCOS):
10457 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10458
10459 CASE_FLT_FN (BUILT_IN_FMA):
10460 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10461 break;
10462
10463 CASE_FLT_FN (BUILT_IN_REMQUO):
10464 if (validate_arg (arg0, REAL_TYPE)
10465 && validate_arg (arg1, REAL_TYPE)
10466 && validate_arg (arg2, POINTER_TYPE))
10467 return do_mpfr_remquo (arg0, arg1, arg2);
10468 break;
10469
10470 case BUILT_IN_STRNCMP:
10471 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10472
10473 case BUILT_IN_MEMCHR:
10474 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10475
10476 case BUILT_IN_BCMP:
10477 case BUILT_IN_MEMCMP:
10478 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10479
10480 case BUILT_IN_EXPECT:
10481 return fold_builtin_expect (loc, arg0, arg1, arg2);
10482
10483 case BUILT_IN_ADD_OVERFLOW:
10484 case BUILT_IN_SUB_OVERFLOW:
10485 case BUILT_IN_MUL_OVERFLOW:
10486 case BUILT_IN_SADD_OVERFLOW:
10487 case BUILT_IN_SADDL_OVERFLOW:
10488 case BUILT_IN_SADDLL_OVERFLOW:
10489 case BUILT_IN_SSUB_OVERFLOW:
10490 case BUILT_IN_SSUBL_OVERFLOW:
10491 case BUILT_IN_SSUBLL_OVERFLOW:
10492 case BUILT_IN_SMUL_OVERFLOW:
10493 case BUILT_IN_SMULL_OVERFLOW:
10494 case BUILT_IN_SMULLL_OVERFLOW:
10495 case BUILT_IN_UADD_OVERFLOW:
10496 case BUILT_IN_UADDL_OVERFLOW:
10497 case BUILT_IN_UADDLL_OVERFLOW:
10498 case BUILT_IN_USUB_OVERFLOW:
10499 case BUILT_IN_USUBL_OVERFLOW:
10500 case BUILT_IN_USUBLL_OVERFLOW:
10501 case BUILT_IN_UMUL_OVERFLOW:
10502 case BUILT_IN_UMULL_OVERFLOW:
10503 case BUILT_IN_UMULLL_OVERFLOW:
10504 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10505
10506 default:
10507 break;
10508 }
10509 return NULL_TREE;
10510 }
10511
10512 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10513 arguments. IGNORE is true if the result of the
10514 function call is ignored. This function returns NULL_TREE if no
10515 simplification was possible. */
10516
10517 tree
10518 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
10519 {
10520 tree ret = NULL_TREE;
10521
10522 switch (nargs)
10523 {
10524 case 0:
10525 ret = fold_builtin_0 (loc, fndecl);
10526 break;
10527 case 1:
10528 ret = fold_builtin_1 (loc, fndecl, args[0]);
10529 break;
10530 case 2:
10531 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
10532 break;
10533 case 3:
10534 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10535 break;
10536 default:
10537 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10538 break;
10539 }
10540 if (ret)
10541 {
10542 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10543 SET_EXPR_LOCATION (ret, loc);
10544 TREE_NO_WARNING (ret) = 1;
10545 return ret;
10546 }
10547 return NULL_TREE;
10548 }
10549
10550 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10551 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10552 of arguments in ARGS to be omitted. OLDNARGS is the number of
10553 elements in ARGS. */
10554
10555 static tree
10556 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10557 int skip, tree fndecl, int n, va_list newargs)
10558 {
10559 int nargs = oldnargs - skip + n;
10560 tree *buffer;
10561
10562 if (n > 0)
10563 {
10564 int i, j;
10565
10566 buffer = XALLOCAVEC (tree, nargs);
10567 for (i = 0; i < n; i++)
10568 buffer[i] = va_arg (newargs, tree);
10569 for (j = skip; j < oldnargs; j++, i++)
10570 buffer[i] = args[j];
10571 }
10572 else
10573 buffer = args + skip;
10574
10575 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10576 }
10577
10578 /* Return true if FNDECL shouldn't be folded right now.
10579 If a built-in function has an inline attribute always_inline
10580 wrapper, defer folding it after always_inline functions have
10581 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10582 might not be performed. */
10583
10584 bool
10585 avoid_folding_inline_builtin (tree fndecl)
10586 {
10587 return (DECL_DECLARED_INLINE_P (fndecl)
10588 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10589 && cfun
10590 && !cfun->always_inline_functions_inlined
10591 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10592 }
10593
10594 /* A wrapper function for builtin folding that prevents warnings for
10595 "statement without effect" and the like, caused by removing the
10596 call node earlier than the warning is generated. */
10597
10598 tree
10599 fold_call_expr (location_t loc, tree exp, bool ignore)
10600 {
10601 tree ret = NULL_TREE;
10602 tree fndecl = get_callee_fndecl (exp);
10603 if (fndecl
10604 && TREE_CODE (fndecl) == FUNCTION_DECL
10605 && DECL_BUILT_IN (fndecl)
10606 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10607 yet. Defer folding until we see all the arguments
10608 (after inlining). */
10609 && !CALL_EXPR_VA_ARG_PACK (exp))
10610 {
10611 int nargs = call_expr_nargs (exp);
10612
10613 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10614 instead last argument is __builtin_va_arg_pack (). Defer folding
10615 even in that case, until arguments are finalized. */
10616 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10617 {
10618 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10619 if (fndecl2
10620 && TREE_CODE (fndecl2) == FUNCTION_DECL
10621 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10622 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10623 return NULL_TREE;
10624 }
10625
10626 if (avoid_folding_inline_builtin (fndecl))
10627 return NULL_TREE;
10628
10629 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10630 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10631 CALL_EXPR_ARGP (exp), ignore);
10632 else
10633 {
10634 tree *args = CALL_EXPR_ARGP (exp);
10635 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10636 if (ret)
10637 return ret;
10638 }
10639 }
10640 return NULL_TREE;
10641 }
10642
10643 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10644 N arguments are passed in the array ARGARRAY. Return a folded
10645 expression or NULL_TREE if no simplification was possible. */
10646
10647 tree
10648 fold_builtin_call_array (location_t loc, tree,
10649 tree fn,
10650 int n,
10651 tree *argarray)
10652 {
10653 if (TREE_CODE (fn) != ADDR_EXPR)
10654 return NULL_TREE;
10655
10656 tree fndecl = TREE_OPERAND (fn, 0);
10657 if (TREE_CODE (fndecl) == FUNCTION_DECL
10658 && DECL_BUILT_IN (fndecl))
10659 {
10660 /* If last argument is __builtin_va_arg_pack (), arguments to this
10661 function are not finalized yet. Defer folding until they are. */
10662 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10663 {
10664 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10665 if (fndecl2
10666 && TREE_CODE (fndecl2) == FUNCTION_DECL
10667 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10668 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10669 return NULL_TREE;
10670 }
10671 if (avoid_folding_inline_builtin (fndecl))
10672 return NULL_TREE;
10673 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10674 return targetm.fold_builtin (fndecl, n, argarray, false);
10675 else
10676 return fold_builtin_n (loc, fndecl, argarray, n, false);
10677 }
10678
10679 return NULL_TREE;
10680 }
10681
10682 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10683 along with N new arguments specified as the "..." parameters. SKIP
10684 is the number of arguments in EXP to be omitted. This function is used
10685 to do varargs-to-varargs transformations. */
10686
10687 static tree
10688 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10689 {
10690 va_list ap;
10691 tree t;
10692
10693 va_start (ap, n);
10694 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10695 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10696 va_end (ap);
10697
10698 return t;
10699 }
10700
10701 /* Validate a single argument ARG against a tree code CODE representing
10702 a type. */
10703
10704 static bool
10705 validate_arg (const_tree arg, enum tree_code code)
10706 {
10707 if (!arg)
10708 return false;
10709 else if (code == POINTER_TYPE)
10710 return POINTER_TYPE_P (TREE_TYPE (arg));
10711 else if (code == INTEGER_TYPE)
10712 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10713 return code == TREE_CODE (TREE_TYPE (arg));
10714 }
10715
10716 /* This function validates the types of a function call argument list
10717 against a specified list of tree_codes. If the last specifier is a 0,
10718 that represents an ellipses, otherwise the last specifier must be a
10719 VOID_TYPE.
10720
10721 This is the GIMPLE version of validate_arglist. Eventually we want to
10722 completely convert builtins.c to work from GIMPLEs and the tree based
10723 validate_arglist will then be removed. */
10724
10725 bool
10726 validate_gimple_arglist (const gcall *call, ...)
10727 {
10728 enum tree_code code;
10729 bool res = 0;
10730 va_list ap;
10731 const_tree arg;
10732 size_t i;
10733
10734 va_start (ap, call);
10735 i = 0;
10736
10737 do
10738 {
10739 code = (enum tree_code) va_arg (ap, int);
10740 switch (code)
10741 {
10742 case 0:
10743 /* This signifies an ellipses, any further arguments are all ok. */
10744 res = true;
10745 goto end;
10746 case VOID_TYPE:
10747 /* This signifies an endlink, if no arguments remain, return
10748 true, otherwise return false. */
10749 res = (i == gimple_call_num_args (call));
10750 goto end;
10751 default:
10752 /* If no parameters remain or the parameter's code does not
10753 match the specified code, return false. Otherwise continue
10754 checking any remaining arguments. */
10755 arg = gimple_call_arg (call, i++);
10756 if (!validate_arg (arg, code))
10757 goto end;
10758 break;
10759 }
10760 }
10761 while (1);
10762
10763 /* We need gotos here since we can only have one VA_CLOSE in a
10764 function. */
10765 end: ;
10766 va_end (ap);
10767
10768 return res;
10769 }
10770
10771 /* Default target-specific builtin expander that does nothing. */
10772
10773 rtx
10774 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10775 rtx target ATTRIBUTE_UNUSED,
10776 rtx subtarget ATTRIBUTE_UNUSED,
10777 machine_mode mode ATTRIBUTE_UNUSED,
10778 int ignore ATTRIBUTE_UNUSED)
10779 {
10780 return NULL_RTX;
10781 }
10782
10783 /* Returns true is EXP represents data that would potentially reside
10784 in a readonly section. */
10785
10786 bool
10787 readonly_data_expr (tree exp)
10788 {
10789 STRIP_NOPS (exp);
10790
10791 if (TREE_CODE (exp) != ADDR_EXPR)
10792 return false;
10793
10794 exp = get_base_address (TREE_OPERAND (exp, 0));
10795 if (!exp)
10796 return false;
10797
10798 /* Make sure we call decl_readonly_section only for trees it
10799 can handle (since it returns true for everything it doesn't
10800 understand). */
10801 if (TREE_CODE (exp) == STRING_CST
10802 || TREE_CODE (exp) == CONSTRUCTOR
10803 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10804 return decl_readonly_section (exp, 0);
10805 else
10806 return false;
10807 }
10808
10809 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10810 to the call, and TYPE is its return type.
10811
10812 Return NULL_TREE if no simplification was possible, otherwise return the
10813 simplified form of the call as a tree.
10814
10815 The simplified form may be a constant or other expression which
10816 computes the same value, but in a more efficient manner (including
10817 calls to other builtin functions).
10818
10819 The call may contain arguments which need to be evaluated, but
10820 which are not useful to determine the result of the call. In
10821 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10822 COMPOUND_EXPR will be an argument which must be evaluated.
10823 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10824 COMPOUND_EXPR in the chain will contain the tree for the simplified
10825 form of the builtin function call. */
10826
10827 static tree
10828 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10829 {
10830 if (!validate_arg (s1, POINTER_TYPE)
10831 || !validate_arg (s2, POINTER_TYPE))
10832 return NULL_TREE;
10833 else
10834 {
10835 tree fn;
10836 const char *p1, *p2;
10837
10838 p2 = c_getstr (s2);
10839 if (p2 == NULL)
10840 return NULL_TREE;
10841
10842 p1 = c_getstr (s1);
10843 if (p1 != NULL)
10844 {
10845 const char *r = strstr (p1, p2);
10846 tree tem;
10847
10848 if (r == NULL)
10849 return build_int_cst (TREE_TYPE (s1), 0);
10850
10851 /* Return an offset into the constant string argument. */
10852 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10853 return fold_convert_loc (loc, type, tem);
10854 }
10855
10856 /* The argument is const char *, and the result is char *, so we need
10857 a type conversion here to avoid a warning. */
10858 if (p2[0] == '\0')
10859 return fold_convert_loc (loc, type, s1);
10860
10861 if (p2[1] != '\0')
10862 return NULL_TREE;
10863
10864 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10865 if (!fn)
10866 return NULL_TREE;
10867
10868 /* New argument list transforming strstr(s1, s2) to
10869 strchr(s1, s2[0]). */
10870 return build_call_expr_loc (loc, fn, 2, s1,
10871 build_int_cst (integer_type_node, p2[0]));
10872 }
10873 }
10874
10875 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10876 the call, and TYPE is its return type.
10877
10878 Return NULL_TREE if no simplification was possible, otherwise return the
10879 simplified form of the call as a tree.
10880
10881 The simplified form may be a constant or other expression which
10882 computes the same value, but in a more efficient manner (including
10883 calls to other builtin functions).
10884
10885 The call may contain arguments which need to be evaluated, but
10886 which are not useful to determine the result of the call. In
10887 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10888 COMPOUND_EXPR will be an argument which must be evaluated.
10889 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10890 COMPOUND_EXPR in the chain will contain the tree for the simplified
10891 form of the builtin function call. */
10892
10893 static tree
10894 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10895 {
10896 if (!validate_arg (s1, POINTER_TYPE)
10897 || !validate_arg (s2, INTEGER_TYPE))
10898 return NULL_TREE;
10899 else
10900 {
10901 const char *p1;
10902
10903 if (TREE_CODE (s2) != INTEGER_CST)
10904 return NULL_TREE;
10905
10906 p1 = c_getstr (s1);
10907 if (p1 != NULL)
10908 {
10909 char c;
10910 const char *r;
10911 tree tem;
10912
10913 if (target_char_cast (s2, &c))
10914 return NULL_TREE;
10915
10916 r = strchr (p1, c);
10917
10918 if (r == NULL)
10919 return build_int_cst (TREE_TYPE (s1), 0);
10920
10921 /* Return an offset into the constant string argument. */
10922 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10923 return fold_convert_loc (loc, type, tem);
10924 }
10925 return NULL_TREE;
10926 }
10927 }
10928
10929 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10930 the call, and TYPE is its return type.
10931
10932 Return NULL_TREE if no simplification was possible, otherwise return the
10933 simplified form of the call as a tree.
10934
10935 The simplified form may be a constant or other expression which
10936 computes the same value, but in a more efficient manner (including
10937 calls to other builtin functions).
10938
10939 The call may contain arguments which need to be evaluated, but
10940 which are not useful to determine the result of the call. In
10941 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10942 COMPOUND_EXPR will be an argument which must be evaluated.
10943 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10944 COMPOUND_EXPR in the chain will contain the tree for the simplified
10945 form of the builtin function call. */
10946
10947 static tree
10948 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10949 {
10950 if (!validate_arg (s1, POINTER_TYPE)
10951 || !validate_arg (s2, INTEGER_TYPE))
10952 return NULL_TREE;
10953 else
10954 {
10955 tree fn;
10956 const char *p1;
10957
10958 if (TREE_CODE (s2) != INTEGER_CST)
10959 return NULL_TREE;
10960
10961 p1 = c_getstr (s1);
10962 if (p1 != NULL)
10963 {
10964 char c;
10965 const char *r;
10966 tree tem;
10967
10968 if (target_char_cast (s2, &c))
10969 return NULL_TREE;
10970
10971 r = strrchr (p1, c);
10972
10973 if (r == NULL)
10974 return build_int_cst (TREE_TYPE (s1), 0);
10975
10976 /* Return an offset into the constant string argument. */
10977 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10978 return fold_convert_loc (loc, type, tem);
10979 }
10980
10981 if (! integer_zerop (s2))
10982 return NULL_TREE;
10983
10984 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10985 if (!fn)
10986 return NULL_TREE;
10987
10988 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10989 return build_call_expr_loc (loc, fn, 2, s1, s2);
10990 }
10991 }
10992
10993 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10994 to the call, and TYPE is its return type.
10995
10996 Return NULL_TREE if no simplification was possible, otherwise return the
10997 simplified form of the call as a tree.
10998
10999 The simplified form may be a constant or other expression which
11000 computes the same value, but in a more efficient manner (including
11001 calls to other builtin functions).
11002
11003 The call may contain arguments which need to be evaluated, but
11004 which are not useful to determine the result of the call. In
11005 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11006 COMPOUND_EXPR will be an argument which must be evaluated.
11007 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11008 COMPOUND_EXPR in the chain will contain the tree for the simplified
11009 form of the builtin function call. */
11010
11011 static tree
11012 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11013 {
11014 if (!validate_arg (s1, POINTER_TYPE)
11015 || !validate_arg (s2, POINTER_TYPE))
11016 return NULL_TREE;
11017 else
11018 {
11019 tree fn;
11020 const char *p1, *p2;
11021
11022 p2 = c_getstr (s2);
11023 if (p2 == NULL)
11024 return NULL_TREE;
11025
11026 p1 = c_getstr (s1);
11027 if (p1 != NULL)
11028 {
11029 const char *r = strpbrk (p1, p2);
11030 tree tem;
11031
11032 if (r == NULL)
11033 return build_int_cst (TREE_TYPE (s1), 0);
11034
11035 /* Return an offset into the constant string argument. */
11036 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11037 return fold_convert_loc (loc, type, tem);
11038 }
11039
11040 if (p2[0] == '\0')
11041 /* strpbrk(x, "") == NULL.
11042 Evaluate and ignore s1 in case it had side-effects. */
11043 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11044
11045 if (p2[1] != '\0')
11046 return NULL_TREE; /* Really call strpbrk. */
11047
11048 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11049 if (!fn)
11050 return NULL_TREE;
11051
11052 /* New argument list transforming strpbrk(s1, s2) to
11053 strchr(s1, s2[0]). */
11054 return build_call_expr_loc (loc, fn, 2, s1,
11055 build_int_cst (integer_type_node, p2[0]));
11056 }
11057 }
11058
11059 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11060 to the call.
11061
11062 Return NULL_TREE if no simplification was possible, otherwise return the
11063 simplified form of the call as a tree.
11064
11065 The simplified form may be a constant or other expression which
11066 computes the same value, but in a more efficient manner (including
11067 calls to other builtin functions).
11068
11069 The call may contain arguments which need to be evaluated, but
11070 which are not useful to determine the result of the call. In
11071 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11072 COMPOUND_EXPR will be an argument which must be evaluated.
11073 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11074 COMPOUND_EXPR in the chain will contain the tree for the simplified
11075 form of the builtin function call. */
11076
11077 static tree
11078 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11079 {
11080 if (!validate_arg (s1, POINTER_TYPE)
11081 || !validate_arg (s2, POINTER_TYPE))
11082 return NULL_TREE;
11083 else
11084 {
11085 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11086
11087 /* If both arguments are constants, evaluate at compile-time. */
11088 if (p1 && p2)
11089 {
11090 const size_t r = strspn (p1, p2);
11091 return build_int_cst (size_type_node, r);
11092 }
11093
11094 /* If either argument is "", return NULL_TREE. */
11095 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11096 /* Evaluate and ignore both arguments in case either one has
11097 side-effects. */
11098 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11099 s1, s2);
11100 return NULL_TREE;
11101 }
11102 }
11103
11104 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11105 to the call.
11106
11107 Return NULL_TREE if no simplification was possible, otherwise return the
11108 simplified form of the call as a tree.
11109
11110 The simplified form may be a constant or other expression which
11111 computes the same value, but in a more efficient manner (including
11112 calls to other builtin functions).
11113
11114 The call may contain arguments which need to be evaluated, but
11115 which are not useful to determine the result of the call. In
11116 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11117 COMPOUND_EXPR will be an argument which must be evaluated.
11118 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11119 COMPOUND_EXPR in the chain will contain the tree for the simplified
11120 form of the builtin function call. */
11121
11122 static tree
11123 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11124 {
11125 if (!validate_arg (s1, POINTER_TYPE)
11126 || !validate_arg (s2, POINTER_TYPE))
11127 return NULL_TREE;
11128 else
11129 {
11130 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11131
11132 /* If both arguments are constants, evaluate at compile-time. */
11133 if (p1 && p2)
11134 {
11135 const size_t r = strcspn (p1, p2);
11136 return build_int_cst (size_type_node, r);
11137 }
11138
11139 /* If the first argument is "", return NULL_TREE. */
11140 if (p1 && *p1 == '\0')
11141 {
11142 /* Evaluate and ignore argument s2 in case it has
11143 side-effects. */
11144 return omit_one_operand_loc (loc, size_type_node,
11145 size_zero_node, s2);
11146 }
11147
11148 /* If the second argument is "", return __builtin_strlen(s1). */
11149 if (p2 && *p2 == '\0')
11150 {
11151 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11152
11153 /* If the replacement _DECL isn't initialized, don't do the
11154 transformation. */
11155 if (!fn)
11156 return NULL_TREE;
11157
11158 return build_call_expr_loc (loc, fn, 1, s1);
11159 }
11160 return NULL_TREE;
11161 }
11162 }
11163
11164 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11165 produced. False otherwise. This is done so that we don't output the error
11166 or warning twice or three times. */
11167
11168 bool
11169 fold_builtin_next_arg (tree exp, bool va_start_p)
11170 {
11171 tree fntype = TREE_TYPE (current_function_decl);
11172 int nargs = call_expr_nargs (exp);
11173 tree arg;
11174 /* There is good chance the current input_location points inside the
11175 definition of the va_start macro (perhaps on the token for
11176 builtin) in a system header, so warnings will not be emitted.
11177 Use the location in real source code. */
11178 source_location current_location =
11179 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11180 NULL);
11181
11182 if (!stdarg_p (fntype))
11183 {
11184 error ("%<va_start%> used in function with fixed args");
11185 return true;
11186 }
11187
11188 if (va_start_p)
11189 {
11190 if (va_start_p && (nargs != 2))
11191 {
11192 error ("wrong number of arguments to function %<va_start%>");
11193 return true;
11194 }
11195 arg = CALL_EXPR_ARG (exp, 1);
11196 }
11197 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11198 when we checked the arguments and if needed issued a warning. */
11199 else
11200 {
11201 if (nargs == 0)
11202 {
11203 /* Evidently an out of date version of <stdarg.h>; can't validate
11204 va_start's second argument, but can still work as intended. */
11205 warning_at (current_location,
11206 OPT_Wvarargs,
11207 "%<__builtin_next_arg%> called without an argument");
11208 return true;
11209 }
11210 else if (nargs > 1)
11211 {
11212 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11213 return true;
11214 }
11215 arg = CALL_EXPR_ARG (exp, 0);
11216 }
11217
11218 if (TREE_CODE (arg) == SSA_NAME)
11219 arg = SSA_NAME_VAR (arg);
11220
11221 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11222 or __builtin_next_arg (0) the first time we see it, after checking
11223 the arguments and if needed issuing a warning. */
11224 if (!integer_zerop (arg))
11225 {
11226 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11227
11228 /* Strip off all nops for the sake of the comparison. This
11229 is not quite the same as STRIP_NOPS. It does more.
11230 We must also strip off INDIRECT_EXPR for C++ reference
11231 parameters. */
11232 while (CONVERT_EXPR_P (arg)
11233 || TREE_CODE (arg) == INDIRECT_REF)
11234 arg = TREE_OPERAND (arg, 0);
11235 if (arg != last_parm)
11236 {
11237 /* FIXME: Sometimes with the tree optimizers we can get the
11238 not the last argument even though the user used the last
11239 argument. We just warn and set the arg to be the last
11240 argument so that we will get wrong-code because of
11241 it. */
11242 warning_at (current_location,
11243 OPT_Wvarargs,
11244 "second parameter of %<va_start%> not last named argument");
11245 }
11246
11247 /* Undefined by C99 7.15.1.4p4 (va_start):
11248 "If the parameter parmN is declared with the register storage
11249 class, with a function or array type, or with a type that is
11250 not compatible with the type that results after application of
11251 the default argument promotions, the behavior is undefined."
11252 */
11253 else if (DECL_REGISTER (arg))
11254 {
11255 warning_at (current_location,
11256 OPT_Wvarargs,
11257 "undefined behaviour when second parameter of "
11258 "%<va_start%> is declared with %<register%> storage");
11259 }
11260
11261 /* We want to verify the second parameter just once before the tree
11262 optimizers are run and then avoid keeping it in the tree,
11263 as otherwise we could warn even for correct code like:
11264 void foo (int i, ...)
11265 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11266 if (va_start_p)
11267 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11268 else
11269 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11270 }
11271 return false;
11272 }
11273
11274
11275 /* Expand a call EXP to __builtin_object_size. */
11276
11277 static rtx
11278 expand_builtin_object_size (tree exp)
11279 {
11280 tree ost;
11281 int object_size_type;
11282 tree fndecl = get_callee_fndecl (exp);
11283
11284 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11285 {
11286 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11287 exp, fndecl);
11288 expand_builtin_trap ();
11289 return const0_rtx;
11290 }
11291
11292 ost = CALL_EXPR_ARG (exp, 1);
11293 STRIP_NOPS (ost);
11294
11295 if (TREE_CODE (ost) != INTEGER_CST
11296 || tree_int_cst_sgn (ost) < 0
11297 || compare_tree_int (ost, 3) > 0)
11298 {
11299 error ("%Klast argument of %D is not integer constant between 0 and 3",
11300 exp, fndecl);
11301 expand_builtin_trap ();
11302 return const0_rtx;
11303 }
11304
11305 object_size_type = tree_to_shwi (ost);
11306
11307 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11308 }
11309
11310 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11311 FCODE is the BUILT_IN_* to use.
11312 Return NULL_RTX if we failed; the caller should emit a normal call,
11313 otherwise try to get the result in TARGET, if convenient (and in
11314 mode MODE if that's convenient). */
11315
11316 static rtx
11317 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11318 enum built_in_function fcode)
11319 {
11320 tree dest, src, len, size;
11321
11322 if (!validate_arglist (exp,
11323 POINTER_TYPE,
11324 fcode == BUILT_IN_MEMSET_CHK
11325 ? INTEGER_TYPE : POINTER_TYPE,
11326 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11327 return NULL_RTX;
11328
11329 dest = CALL_EXPR_ARG (exp, 0);
11330 src = CALL_EXPR_ARG (exp, 1);
11331 len = CALL_EXPR_ARG (exp, 2);
11332 size = CALL_EXPR_ARG (exp, 3);
11333
11334 if (! tree_fits_uhwi_p (size))
11335 return NULL_RTX;
11336
11337 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11338 {
11339 tree fn;
11340
11341 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11342 {
11343 warning_at (tree_nonartificial_location (exp),
11344 0, "%Kcall to %D will always overflow destination buffer",
11345 exp, get_callee_fndecl (exp));
11346 return NULL_RTX;
11347 }
11348
11349 fn = NULL_TREE;
11350 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11351 mem{cpy,pcpy,move,set} is available. */
11352 switch (fcode)
11353 {
11354 case BUILT_IN_MEMCPY_CHK:
11355 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11356 break;
11357 case BUILT_IN_MEMPCPY_CHK:
11358 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11359 break;
11360 case BUILT_IN_MEMMOVE_CHK:
11361 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11362 break;
11363 case BUILT_IN_MEMSET_CHK:
11364 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11365 break;
11366 default:
11367 break;
11368 }
11369
11370 if (! fn)
11371 return NULL_RTX;
11372
11373 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11374 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11375 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11376 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11377 }
11378 else if (fcode == BUILT_IN_MEMSET_CHK)
11379 return NULL_RTX;
11380 else
11381 {
11382 unsigned int dest_align = get_pointer_alignment (dest);
11383
11384 /* If DEST is not a pointer type, call the normal function. */
11385 if (dest_align == 0)
11386 return NULL_RTX;
11387
11388 /* If SRC and DEST are the same (and not volatile), do nothing. */
11389 if (operand_equal_p (src, dest, 0))
11390 {
11391 tree expr;
11392
11393 if (fcode != BUILT_IN_MEMPCPY_CHK)
11394 {
11395 /* Evaluate and ignore LEN in case it has side-effects. */
11396 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11397 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11398 }
11399
11400 expr = fold_build_pointer_plus (dest, len);
11401 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11402 }
11403
11404 /* __memmove_chk special case. */
11405 if (fcode == BUILT_IN_MEMMOVE_CHK)
11406 {
11407 unsigned int src_align = get_pointer_alignment (src);
11408
11409 if (src_align == 0)
11410 return NULL_RTX;
11411
11412 /* If src is categorized for a readonly section we can use
11413 normal __memcpy_chk. */
11414 if (readonly_data_expr (src))
11415 {
11416 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11417 if (!fn)
11418 return NULL_RTX;
11419 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11420 dest, src, len, size);
11421 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11422 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11423 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11424 }
11425 }
11426 return NULL_RTX;
11427 }
11428 }
11429
11430 /* Emit warning if a buffer overflow is detected at compile time. */
11431
11432 static void
11433 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11434 {
11435 int is_strlen = 0;
11436 tree len, size;
11437 location_t loc = tree_nonartificial_location (exp);
11438
11439 switch (fcode)
11440 {
11441 case BUILT_IN_STRCPY_CHK:
11442 case BUILT_IN_STPCPY_CHK:
11443 /* For __strcat_chk the warning will be emitted only if overflowing
11444 by at least strlen (dest) + 1 bytes. */
11445 case BUILT_IN_STRCAT_CHK:
11446 len = CALL_EXPR_ARG (exp, 1);
11447 size = CALL_EXPR_ARG (exp, 2);
11448 is_strlen = 1;
11449 break;
11450 case BUILT_IN_STRNCAT_CHK:
11451 case BUILT_IN_STRNCPY_CHK:
11452 case BUILT_IN_STPNCPY_CHK:
11453 len = CALL_EXPR_ARG (exp, 2);
11454 size = CALL_EXPR_ARG (exp, 3);
11455 break;
11456 case BUILT_IN_SNPRINTF_CHK:
11457 case BUILT_IN_VSNPRINTF_CHK:
11458 len = CALL_EXPR_ARG (exp, 1);
11459 size = CALL_EXPR_ARG (exp, 3);
11460 break;
11461 default:
11462 gcc_unreachable ();
11463 }
11464
11465 if (!len || !size)
11466 return;
11467
11468 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11469 return;
11470
11471 if (is_strlen)
11472 {
11473 len = c_strlen (len, 1);
11474 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11475 return;
11476 }
11477 else if (fcode == BUILT_IN_STRNCAT_CHK)
11478 {
11479 tree src = CALL_EXPR_ARG (exp, 1);
11480 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11481 return;
11482 src = c_strlen (src, 1);
11483 if (! src || ! tree_fits_uhwi_p (src))
11484 {
11485 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11486 exp, get_callee_fndecl (exp));
11487 return;
11488 }
11489 else if (tree_int_cst_lt (src, size))
11490 return;
11491 }
11492 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
11493 return;
11494
11495 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11496 exp, get_callee_fndecl (exp));
11497 }
11498
11499 /* Emit warning if a buffer overflow is detected at compile time
11500 in __sprintf_chk/__vsprintf_chk calls. */
11501
11502 static void
11503 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11504 {
11505 tree size, len, fmt;
11506 const char *fmt_str;
11507 int nargs = call_expr_nargs (exp);
11508
11509 /* Verify the required arguments in the original call. */
11510
11511 if (nargs < 4)
11512 return;
11513 size = CALL_EXPR_ARG (exp, 2);
11514 fmt = CALL_EXPR_ARG (exp, 3);
11515
11516 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11517 return;
11518
11519 /* Check whether the format is a literal string constant. */
11520 fmt_str = c_getstr (fmt);
11521 if (fmt_str == NULL)
11522 return;
11523
11524 if (!init_target_chars ())
11525 return;
11526
11527 /* If the format doesn't contain % args or %%, we know its size. */
11528 if (strchr (fmt_str, target_percent) == 0)
11529 len = build_int_cstu (size_type_node, strlen (fmt_str));
11530 /* If the format is "%s" and first ... argument is a string literal,
11531 we know it too. */
11532 else if (fcode == BUILT_IN_SPRINTF_CHK
11533 && strcmp (fmt_str, target_percent_s) == 0)
11534 {
11535 tree arg;
11536
11537 if (nargs < 5)
11538 return;
11539 arg = CALL_EXPR_ARG (exp, 4);
11540 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11541 return;
11542
11543 len = c_strlen (arg, 1);
11544 if (!len || ! tree_fits_uhwi_p (len))
11545 return;
11546 }
11547 else
11548 return;
11549
11550 if (! tree_int_cst_lt (len, size))
11551 warning_at (tree_nonartificial_location (exp),
11552 0, "%Kcall to %D will always overflow destination buffer",
11553 exp, get_callee_fndecl (exp));
11554 }
11555
11556 /* Emit warning if a free is called with address of a variable. */
11557
11558 static void
11559 maybe_emit_free_warning (tree exp)
11560 {
11561 tree arg = CALL_EXPR_ARG (exp, 0);
11562
11563 STRIP_NOPS (arg);
11564 if (TREE_CODE (arg) != ADDR_EXPR)
11565 return;
11566
11567 arg = get_base_address (TREE_OPERAND (arg, 0));
11568 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11569 return;
11570
11571 if (SSA_VAR_P (arg))
11572 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11573 "%Kattempt to free a non-heap object %qD", exp, arg);
11574 else
11575 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11576 "%Kattempt to free a non-heap object", exp);
11577 }
11578
11579 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11580 if possible. */
11581
11582 static tree
11583 fold_builtin_object_size (tree ptr, tree ost)
11584 {
11585 unsigned HOST_WIDE_INT bytes;
11586 int object_size_type;
11587
11588 if (!validate_arg (ptr, POINTER_TYPE)
11589 || !validate_arg (ost, INTEGER_TYPE))
11590 return NULL_TREE;
11591
11592 STRIP_NOPS (ost);
11593
11594 if (TREE_CODE (ost) != INTEGER_CST
11595 || tree_int_cst_sgn (ost) < 0
11596 || compare_tree_int (ost, 3) > 0)
11597 return NULL_TREE;
11598
11599 object_size_type = tree_to_shwi (ost);
11600
11601 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11602 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11603 and (size_t) 0 for types 2 and 3. */
11604 if (TREE_SIDE_EFFECTS (ptr))
11605 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11606
11607 if (TREE_CODE (ptr) == ADDR_EXPR)
11608 {
11609 bytes = compute_builtin_object_size (ptr, object_size_type);
11610 if (wi::fits_to_tree_p (bytes, size_type_node))
11611 return build_int_cstu (size_type_node, bytes);
11612 }
11613 else if (TREE_CODE (ptr) == SSA_NAME)
11614 {
11615 /* If object size is not known yet, delay folding until
11616 later. Maybe subsequent passes will help determining
11617 it. */
11618 bytes = compute_builtin_object_size (ptr, object_size_type);
11619 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11620 && wi::fits_to_tree_p (bytes, size_type_node))
11621 return build_int_cstu (size_type_node, bytes);
11622 }
11623
11624 return NULL_TREE;
11625 }
11626
11627 /* Builtins with folding operations that operate on "..." arguments
11628 need special handling; we need to store the arguments in a convenient
11629 data structure before attempting any folding. Fortunately there are
11630 only a few builtins that fall into this category. FNDECL is the
11631 function, EXP is the CALL_EXPR for the call. */
11632
11633 static tree
11634 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11635 {
11636 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11637 tree ret = NULL_TREE;
11638
11639 switch (fcode)
11640 {
11641 case BUILT_IN_FPCLASSIFY:
11642 ret = fold_builtin_fpclassify (loc, args, nargs);
11643 break;
11644
11645 default:
11646 break;
11647 }
11648 if (ret)
11649 {
11650 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11651 SET_EXPR_LOCATION (ret, loc);
11652 TREE_NO_WARNING (ret) = 1;
11653 return ret;
11654 }
11655 return NULL_TREE;
11656 }
11657
11658 /* Initialize format string characters in the target charset. */
11659
11660 bool
11661 init_target_chars (void)
11662 {
11663 static bool init;
11664 if (!init)
11665 {
11666 target_newline = lang_hooks.to_target_charset ('\n');
11667 target_percent = lang_hooks.to_target_charset ('%');
11668 target_c = lang_hooks.to_target_charset ('c');
11669 target_s = lang_hooks.to_target_charset ('s');
11670 if (target_newline == 0 || target_percent == 0 || target_c == 0
11671 || target_s == 0)
11672 return false;
11673
11674 target_percent_c[0] = target_percent;
11675 target_percent_c[1] = target_c;
11676 target_percent_c[2] = '\0';
11677
11678 target_percent_s[0] = target_percent;
11679 target_percent_s[1] = target_s;
11680 target_percent_s[2] = '\0';
11681
11682 target_percent_s_newline[0] = target_percent;
11683 target_percent_s_newline[1] = target_s;
11684 target_percent_s_newline[2] = target_newline;
11685 target_percent_s_newline[3] = '\0';
11686
11687 init = true;
11688 }
11689 return true;
11690 }
11691
11692 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11693 and no overflow/underflow occurred. INEXACT is true if M was not
11694 exactly calculated. TYPE is the tree type for the result. This
11695 function assumes that you cleared the MPFR flags and then
11696 calculated M to see if anything subsequently set a flag prior to
11697 entering this function. Return NULL_TREE if any checks fail. */
11698
11699 static tree
11700 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11701 {
11702 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11703 overflow/underflow occurred. If -frounding-math, proceed iff the
11704 result of calling FUNC was exact. */
11705 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11706 && (!flag_rounding_math || !inexact))
11707 {
11708 REAL_VALUE_TYPE rr;
11709
11710 real_from_mpfr (&rr, m, type, GMP_RNDN);
11711 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11712 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11713 but the mpft_t is not, then we underflowed in the
11714 conversion. */
11715 if (real_isfinite (&rr)
11716 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11717 {
11718 REAL_VALUE_TYPE rmode;
11719
11720 real_convert (&rmode, TYPE_MODE (type), &rr);
11721 /* Proceed iff the specified mode can hold the value. */
11722 if (real_identical (&rmode, &rr))
11723 return build_real (type, rmode);
11724 }
11725 }
11726 return NULL_TREE;
11727 }
11728
11729 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11730 number and no overflow/underflow occurred. INEXACT is true if M
11731 was not exactly calculated. TYPE is the tree type for the result.
11732 This function assumes that you cleared the MPFR flags and then
11733 calculated M to see if anything subsequently set a flag prior to
11734 entering this function. Return NULL_TREE if any checks fail, if
11735 FORCE_CONVERT is true, then bypass the checks. */
11736
11737 static tree
11738 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11739 {
11740 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11741 overflow/underflow occurred. If -frounding-math, proceed iff the
11742 result of calling FUNC was exact. */
11743 if (force_convert
11744 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11745 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11746 && (!flag_rounding_math || !inexact)))
11747 {
11748 REAL_VALUE_TYPE re, im;
11749
11750 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
11751 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
11752 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11753 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11754 but the mpft_t is not, then we underflowed in the
11755 conversion. */
11756 if (force_convert
11757 || (real_isfinite (&re) && real_isfinite (&im)
11758 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11759 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11760 {
11761 REAL_VALUE_TYPE re_mode, im_mode;
11762
11763 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11764 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11765 /* Proceed iff the specified mode can hold the value. */
11766 if (force_convert
11767 || (real_identical (&re_mode, &re)
11768 && real_identical (&im_mode, &im)))
11769 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11770 build_real (TREE_TYPE (type), im_mode));
11771 }
11772 }
11773 return NULL_TREE;
11774 }
11775
11776 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11777 FUNC on it and return the resulting value as a tree with type TYPE.
11778 If MIN and/or MAX are not NULL, then the supplied ARG must be
11779 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11780 acceptable values, otherwise they are not. The mpfr precision is
11781 set to the precision of TYPE. We assume that function FUNC returns
11782 zero if the result could be calculated exactly within the requested
11783 precision. */
11784
11785 static tree
11786 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
11787 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
11788 bool inclusive)
11789 {
11790 tree result = NULL_TREE;
11791
11792 STRIP_NOPS (arg);
11793
11794 /* To proceed, MPFR must exactly represent the target floating point
11795 format, which only happens when the target base equals two. */
11796 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11797 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
11798 {
11799 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11800
11801 if (real_isfinite (ra)
11802 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
11803 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
11804 {
11805 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11806 const int prec = fmt->p;
11807 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11808 int inexact;
11809 mpfr_t m;
11810
11811 mpfr_init2 (m, prec);
11812 mpfr_from_real (m, ra, GMP_RNDN);
11813 mpfr_clear_flags ();
11814 inexact = func (m, m, rnd);
11815 result = do_mpfr_ckconv (m, type, inexact);
11816 mpfr_clear (m);
11817 }
11818 }
11819
11820 return result;
11821 }
11822
11823 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
11824 FUNC on it and return the resulting value as a tree with type TYPE.
11825 The mpfr precision is set to the precision of TYPE. We assume that
11826 function FUNC returns zero if the result could be calculated
11827 exactly within the requested precision. */
11828
11829 static tree
11830 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
11831 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11832 {
11833 tree result = NULL_TREE;
11834
11835 STRIP_NOPS (arg1);
11836 STRIP_NOPS (arg2);
11837
11838 /* To proceed, MPFR must exactly represent the target floating point
11839 format, which only happens when the target base equals two. */
11840 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11841 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11842 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
11843 {
11844 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11845 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11846
11847 if (real_isfinite (ra1) && real_isfinite (ra2))
11848 {
11849 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11850 const int prec = fmt->p;
11851 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11852 int inexact;
11853 mpfr_t m1, m2;
11854
11855 mpfr_inits2 (prec, m1, m2, NULL);
11856 mpfr_from_real (m1, ra1, GMP_RNDN);
11857 mpfr_from_real (m2, ra2, GMP_RNDN);
11858 mpfr_clear_flags ();
11859 inexact = func (m1, m1, m2, rnd);
11860 result = do_mpfr_ckconv (m1, type, inexact);
11861 mpfr_clears (m1, m2, NULL);
11862 }
11863 }
11864
11865 return result;
11866 }
11867
11868 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
11869 FUNC on it and return the resulting value as a tree with type TYPE.
11870 The mpfr precision is set to the precision of TYPE. We assume that
11871 function FUNC returns zero if the result could be calculated
11872 exactly within the requested precision. */
11873
11874 static tree
11875 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
11876 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11877 {
11878 tree result = NULL_TREE;
11879
11880 STRIP_NOPS (arg1);
11881 STRIP_NOPS (arg2);
11882 STRIP_NOPS (arg3);
11883
11884 /* To proceed, MPFR must exactly represent the target floating point
11885 format, which only happens when the target base equals two. */
11886 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11887 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11888 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
11889 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
11890 {
11891 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11892 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11893 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
11894
11895 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
11896 {
11897 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11898 const int prec = fmt->p;
11899 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11900 int inexact;
11901 mpfr_t m1, m2, m3;
11902
11903 mpfr_inits2 (prec, m1, m2, m3, NULL);
11904 mpfr_from_real (m1, ra1, GMP_RNDN);
11905 mpfr_from_real (m2, ra2, GMP_RNDN);
11906 mpfr_from_real (m3, ra3, GMP_RNDN);
11907 mpfr_clear_flags ();
11908 inexact = func (m1, m1, m2, m3, rnd);
11909 result = do_mpfr_ckconv (m1, type, inexact);
11910 mpfr_clears (m1, m2, m3, NULL);
11911 }
11912 }
11913
11914 return result;
11915 }
11916
11917 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11918 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
11919 If ARG_SINP and ARG_COSP are NULL then the result is returned
11920 as a complex value.
11921 The type is taken from the type of ARG and is used for setting the
11922 precision of the calculation and results. */
11923
11924 static tree
11925 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
11926 {
11927 tree const type = TREE_TYPE (arg);
11928 tree result = NULL_TREE;
11929
11930 STRIP_NOPS (arg);
11931
11932 /* To proceed, MPFR must exactly represent the target floating point
11933 format, which only happens when the target base equals two. */
11934 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11935 && TREE_CODE (arg) == REAL_CST
11936 && !TREE_OVERFLOW (arg))
11937 {
11938 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11939
11940 if (real_isfinite (ra))
11941 {
11942 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11943 const int prec = fmt->p;
11944 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11945 tree result_s, result_c;
11946 int inexact;
11947 mpfr_t m, ms, mc;
11948
11949 mpfr_inits2 (prec, m, ms, mc, NULL);
11950 mpfr_from_real (m, ra, GMP_RNDN);
11951 mpfr_clear_flags ();
11952 inexact = mpfr_sin_cos (ms, mc, m, rnd);
11953 result_s = do_mpfr_ckconv (ms, type, inexact);
11954 result_c = do_mpfr_ckconv (mc, type, inexact);
11955 mpfr_clears (m, ms, mc, NULL);
11956 if (result_s && result_c)
11957 {
11958 /* If we are to return in a complex value do so. */
11959 if (!arg_sinp && !arg_cosp)
11960 return build_complex (build_complex_type (type),
11961 result_c, result_s);
11962
11963 /* Dereference the sin/cos pointer arguments. */
11964 arg_sinp = build_fold_indirect_ref (arg_sinp);
11965 arg_cosp = build_fold_indirect_ref (arg_cosp);
11966 /* Proceed if valid pointer type were passed in. */
11967 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
11968 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
11969 {
11970 /* Set the values. */
11971 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
11972 result_s);
11973 TREE_SIDE_EFFECTS (result_s) = 1;
11974 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
11975 result_c);
11976 TREE_SIDE_EFFECTS (result_c) = 1;
11977 /* Combine the assignments into a compound expr. */
11978 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11979 result_s, result_c));
11980 }
11981 }
11982 }
11983 }
11984 return result;
11985 }
11986
11987 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
11988 two-argument mpfr order N Bessel function FUNC on them and return
11989 the resulting value as a tree with type TYPE. The mpfr precision
11990 is set to the precision of TYPE. We assume that function FUNC
11991 returns zero if the result could be calculated exactly within the
11992 requested precision. */
11993 static tree
11994 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
11995 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
11996 const REAL_VALUE_TYPE *min, bool inclusive)
11997 {
11998 tree result = NULL_TREE;
11999
12000 STRIP_NOPS (arg1);
12001 STRIP_NOPS (arg2);
12002
12003 /* To proceed, MPFR must exactly represent the target floating point
12004 format, which only happens when the target base equals two. */
12005 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12006 && tree_fits_shwi_p (arg1)
12007 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12008 {
12009 const HOST_WIDE_INT n = tree_to_shwi (arg1);
12010 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12011
12012 if (n == (long)n
12013 && real_isfinite (ra)
12014 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12015 {
12016 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12017 const int prec = fmt->p;
12018 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12019 int inexact;
12020 mpfr_t m;
12021
12022 mpfr_init2 (m, prec);
12023 mpfr_from_real (m, ra, GMP_RNDN);
12024 mpfr_clear_flags ();
12025 inexact = func (m, n, m, rnd);
12026 result = do_mpfr_ckconv (m, type, inexact);
12027 mpfr_clear (m);
12028 }
12029 }
12030
12031 return result;
12032 }
12033
12034 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12035 the pointer *(ARG_QUO) and return the result. The type is taken
12036 from the type of ARG0 and is used for setting the precision of the
12037 calculation and results. */
12038
12039 static tree
12040 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12041 {
12042 tree const type = TREE_TYPE (arg0);
12043 tree result = NULL_TREE;
12044
12045 STRIP_NOPS (arg0);
12046 STRIP_NOPS (arg1);
12047
12048 /* To proceed, MPFR must exactly represent the target floating point
12049 format, which only happens when the target base equals two. */
12050 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12051 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12052 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12053 {
12054 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12055 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12056
12057 if (real_isfinite (ra0) && real_isfinite (ra1))
12058 {
12059 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12060 const int prec = fmt->p;
12061 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12062 tree result_rem;
12063 long integer_quo;
12064 mpfr_t m0, m1;
12065
12066 mpfr_inits2 (prec, m0, m1, NULL);
12067 mpfr_from_real (m0, ra0, GMP_RNDN);
12068 mpfr_from_real (m1, ra1, GMP_RNDN);
12069 mpfr_clear_flags ();
12070 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12071 /* Remquo is independent of the rounding mode, so pass
12072 inexact=0 to do_mpfr_ckconv(). */
12073 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12074 mpfr_clears (m0, m1, NULL);
12075 if (result_rem)
12076 {
12077 /* MPFR calculates quo in the host's long so it may
12078 return more bits in quo than the target int can hold
12079 if sizeof(host long) > sizeof(target int). This can
12080 happen even for native compilers in LP64 mode. In
12081 these cases, modulo the quo value with the largest
12082 number that the target int can hold while leaving one
12083 bit for the sign. */
12084 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12085 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12086
12087 /* Dereference the quo pointer argument. */
12088 arg_quo = build_fold_indirect_ref (arg_quo);
12089 /* Proceed iff a valid pointer type was passed in. */
12090 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12091 {
12092 /* Set the value. */
12093 tree result_quo
12094 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12095 build_int_cst (TREE_TYPE (arg_quo),
12096 integer_quo));
12097 TREE_SIDE_EFFECTS (result_quo) = 1;
12098 /* Combine the quo assignment with the rem. */
12099 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12100 result_quo, result_rem));
12101 }
12102 }
12103 }
12104 }
12105 return result;
12106 }
12107
12108 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12109 resulting value as a tree with type TYPE. The mpfr precision is
12110 set to the precision of TYPE. We assume that this mpfr function
12111 returns zero if the result could be calculated exactly within the
12112 requested precision. In addition, the integer pointer represented
12113 by ARG_SG will be dereferenced and set to the appropriate signgam
12114 (-1,1) value. */
12115
12116 static tree
12117 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12118 {
12119 tree result = NULL_TREE;
12120
12121 STRIP_NOPS (arg);
12122
12123 /* To proceed, MPFR must exactly represent the target floating point
12124 format, which only happens when the target base equals two. Also
12125 verify ARG is a constant and that ARG_SG is an int pointer. */
12126 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12127 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12128 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12129 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12130 {
12131 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12132
12133 /* In addition to NaN and Inf, the argument cannot be zero or a
12134 negative integer. */
12135 if (real_isfinite (ra)
12136 && ra->cl != rvc_zero
12137 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12138 {
12139 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12140 const int prec = fmt->p;
12141 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12142 int inexact, sg;
12143 mpfr_t m;
12144 tree result_lg;
12145
12146 mpfr_init2 (m, prec);
12147 mpfr_from_real (m, ra, GMP_RNDN);
12148 mpfr_clear_flags ();
12149 inexact = mpfr_lgamma (m, &sg, m, rnd);
12150 result_lg = do_mpfr_ckconv (m, type, inexact);
12151 mpfr_clear (m);
12152 if (result_lg)
12153 {
12154 tree result_sg;
12155
12156 /* Dereference the arg_sg pointer argument. */
12157 arg_sg = build_fold_indirect_ref (arg_sg);
12158 /* Assign the signgam value into *arg_sg. */
12159 result_sg = fold_build2 (MODIFY_EXPR,
12160 TREE_TYPE (arg_sg), arg_sg,
12161 build_int_cst (TREE_TYPE (arg_sg), sg));
12162 TREE_SIDE_EFFECTS (result_sg) = 1;
12163 /* Combine the signgam assignment with the lgamma result. */
12164 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12165 result_sg, result_lg));
12166 }
12167 }
12168 }
12169
12170 return result;
12171 }
12172
12173 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12174 function FUNC on it and return the resulting value as a tree with
12175 type TYPE. The mpfr precision is set to the precision of TYPE. We
12176 assume that function FUNC returns zero if the result could be
12177 calculated exactly within the requested precision. */
12178
12179 static tree
12180 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12181 {
12182 tree result = NULL_TREE;
12183
12184 STRIP_NOPS (arg);
12185
12186 /* To proceed, MPFR must exactly represent the target floating point
12187 format, which only happens when the target base equals two. */
12188 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12189 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12190 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12191 {
12192 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12193 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12194
12195 if (real_isfinite (re) && real_isfinite (im))
12196 {
12197 const struct real_format *const fmt =
12198 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12199 const int prec = fmt->p;
12200 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12201 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12202 int inexact;
12203 mpc_t m;
12204
12205 mpc_init2 (m, prec);
12206 mpfr_from_real (mpc_realref (m), re, rnd);
12207 mpfr_from_real (mpc_imagref (m), im, rnd);
12208 mpfr_clear_flags ();
12209 inexact = func (m, m, crnd);
12210 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
12211 mpc_clear (m);
12212 }
12213 }
12214
12215 return result;
12216 }
12217
12218 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12219 mpc function FUNC on it and return the resulting value as a tree
12220 with type TYPE. The mpfr precision is set to the precision of
12221 TYPE. We assume that function FUNC returns zero if the result
12222 could be calculated exactly within the requested precision. If
12223 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12224 in the arguments and/or results. */
12225
12226 tree
12227 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12228 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12229 {
12230 tree result = NULL_TREE;
12231
12232 STRIP_NOPS (arg0);
12233 STRIP_NOPS (arg1);
12234
12235 /* To proceed, MPFR must exactly represent the target floating point
12236 format, which only happens when the target base equals two. */
12237 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12238 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12239 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12240 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12241 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12242 {
12243 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12244 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12245 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12246 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12247
12248 if (do_nonfinite
12249 || (real_isfinite (re0) && real_isfinite (im0)
12250 && real_isfinite (re1) && real_isfinite (im1)))
12251 {
12252 const struct real_format *const fmt =
12253 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12254 const int prec = fmt->p;
12255 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12256 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12257 int inexact;
12258 mpc_t m0, m1;
12259
12260 mpc_init2 (m0, prec);
12261 mpc_init2 (m1, prec);
12262 mpfr_from_real (mpc_realref (m0), re0, rnd);
12263 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12264 mpfr_from_real (mpc_realref (m1), re1, rnd);
12265 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12266 mpfr_clear_flags ();
12267 inexact = func (m0, m0, m1, crnd);
12268 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12269 mpc_clear (m0);
12270 mpc_clear (m1);
12271 }
12272 }
12273
12274 return result;
12275 }
12276
12277 /* A wrapper function for builtin folding that prevents warnings for
12278 "statement without effect" and the like, caused by removing the
12279 call node earlier than the warning is generated. */
12280
12281 tree
12282 fold_call_stmt (gcall *stmt, bool ignore)
12283 {
12284 tree ret = NULL_TREE;
12285 tree fndecl = gimple_call_fndecl (stmt);
12286 location_t loc = gimple_location (stmt);
12287 if (fndecl
12288 && TREE_CODE (fndecl) == FUNCTION_DECL
12289 && DECL_BUILT_IN (fndecl)
12290 && !gimple_call_va_arg_pack_p (stmt))
12291 {
12292 int nargs = gimple_call_num_args (stmt);
12293 tree *args = (nargs > 0
12294 ? gimple_call_arg_ptr (stmt, 0)
12295 : &error_mark_node);
12296
12297 if (avoid_folding_inline_builtin (fndecl))
12298 return NULL_TREE;
12299 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12300 {
12301 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12302 }
12303 else
12304 {
12305 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
12306 if (ret)
12307 {
12308 /* Propagate location information from original call to
12309 expansion of builtin. Otherwise things like
12310 maybe_emit_chk_warning, that operate on the expansion
12311 of a builtin, will use the wrong location information. */
12312 if (gimple_has_location (stmt))
12313 {
12314 tree realret = ret;
12315 if (TREE_CODE (ret) == NOP_EXPR)
12316 realret = TREE_OPERAND (ret, 0);
12317 if (CAN_HAVE_LOCATION_P (realret)
12318 && !EXPR_HAS_LOCATION (realret))
12319 SET_EXPR_LOCATION (realret, loc);
12320 return realret;
12321 }
12322 return ret;
12323 }
12324 }
12325 }
12326 return NULL_TREE;
12327 }
12328
12329 /* Look up the function in builtin_decl that corresponds to DECL
12330 and set ASMSPEC as its user assembler name. DECL must be a
12331 function decl that declares a builtin. */
12332
12333 void
12334 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12335 {
12336 tree builtin;
12337 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12338 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12339 && asmspec != 0);
12340
12341 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12342 set_user_assembler_name (builtin, asmspec);
12343 switch (DECL_FUNCTION_CODE (decl))
12344 {
12345 case BUILT_IN_MEMCPY:
12346 init_block_move_fn (asmspec);
12347 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12348 break;
12349 case BUILT_IN_MEMSET:
12350 init_block_clear_fn (asmspec);
12351 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12352 break;
12353 case BUILT_IN_MEMMOVE:
12354 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12355 break;
12356 case BUILT_IN_MEMCMP:
12357 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12358 break;
12359 case BUILT_IN_ABORT:
12360 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12361 break;
12362 case BUILT_IN_FFS:
12363 if (INT_TYPE_SIZE < BITS_PER_WORD)
12364 {
12365 set_user_assembler_libfunc ("ffs", asmspec);
12366 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12367 MODE_INT, 0), "ffs");
12368 }
12369 break;
12370 default:
12371 break;
12372 }
12373 }
12374
12375 /* Return true if DECL is a builtin that expands to a constant or similarly
12376 simple code. */
12377 bool
12378 is_simple_builtin (tree decl)
12379 {
12380 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12381 switch (DECL_FUNCTION_CODE (decl))
12382 {
12383 /* Builtins that expand to constants. */
12384 case BUILT_IN_CONSTANT_P:
12385 case BUILT_IN_EXPECT:
12386 case BUILT_IN_OBJECT_SIZE:
12387 case BUILT_IN_UNREACHABLE:
12388 /* Simple register moves or loads from stack. */
12389 case BUILT_IN_ASSUME_ALIGNED:
12390 case BUILT_IN_RETURN_ADDRESS:
12391 case BUILT_IN_EXTRACT_RETURN_ADDR:
12392 case BUILT_IN_FROB_RETURN_ADDR:
12393 case BUILT_IN_RETURN:
12394 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12395 case BUILT_IN_FRAME_ADDRESS:
12396 case BUILT_IN_VA_END:
12397 case BUILT_IN_STACK_SAVE:
12398 case BUILT_IN_STACK_RESTORE:
12399 /* Exception state returns or moves registers around. */
12400 case BUILT_IN_EH_FILTER:
12401 case BUILT_IN_EH_POINTER:
12402 case BUILT_IN_EH_COPY_VALUES:
12403 return true;
12404
12405 default:
12406 return false;
12407 }
12408
12409 return false;
12410 }
12411
12412 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12413 most probably expanded inline into reasonably simple code. This is a
12414 superset of is_simple_builtin. */
12415 bool
12416 is_inexpensive_builtin (tree decl)
12417 {
12418 if (!decl)
12419 return false;
12420 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12421 return true;
12422 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12423 switch (DECL_FUNCTION_CODE (decl))
12424 {
12425 case BUILT_IN_ABS:
12426 case BUILT_IN_ALLOCA:
12427 case BUILT_IN_ALLOCA_WITH_ALIGN:
12428 case BUILT_IN_BSWAP16:
12429 case BUILT_IN_BSWAP32:
12430 case BUILT_IN_BSWAP64:
12431 case BUILT_IN_CLZ:
12432 case BUILT_IN_CLZIMAX:
12433 case BUILT_IN_CLZL:
12434 case BUILT_IN_CLZLL:
12435 case BUILT_IN_CTZ:
12436 case BUILT_IN_CTZIMAX:
12437 case BUILT_IN_CTZL:
12438 case BUILT_IN_CTZLL:
12439 case BUILT_IN_FFS:
12440 case BUILT_IN_FFSIMAX:
12441 case BUILT_IN_FFSL:
12442 case BUILT_IN_FFSLL:
12443 case BUILT_IN_IMAXABS:
12444 case BUILT_IN_FINITE:
12445 case BUILT_IN_FINITEF:
12446 case BUILT_IN_FINITEL:
12447 case BUILT_IN_FINITED32:
12448 case BUILT_IN_FINITED64:
12449 case BUILT_IN_FINITED128:
12450 case BUILT_IN_FPCLASSIFY:
12451 case BUILT_IN_ISFINITE:
12452 case BUILT_IN_ISINF_SIGN:
12453 case BUILT_IN_ISINF:
12454 case BUILT_IN_ISINFF:
12455 case BUILT_IN_ISINFL:
12456 case BUILT_IN_ISINFD32:
12457 case BUILT_IN_ISINFD64:
12458 case BUILT_IN_ISINFD128:
12459 case BUILT_IN_ISNAN:
12460 case BUILT_IN_ISNANF:
12461 case BUILT_IN_ISNANL:
12462 case BUILT_IN_ISNAND32:
12463 case BUILT_IN_ISNAND64:
12464 case BUILT_IN_ISNAND128:
12465 case BUILT_IN_ISNORMAL:
12466 case BUILT_IN_ISGREATER:
12467 case BUILT_IN_ISGREATEREQUAL:
12468 case BUILT_IN_ISLESS:
12469 case BUILT_IN_ISLESSEQUAL:
12470 case BUILT_IN_ISLESSGREATER:
12471 case BUILT_IN_ISUNORDERED:
12472 case BUILT_IN_VA_ARG_PACK:
12473 case BUILT_IN_VA_ARG_PACK_LEN:
12474 case BUILT_IN_VA_COPY:
12475 case BUILT_IN_TRAP:
12476 case BUILT_IN_SAVEREGS:
12477 case BUILT_IN_POPCOUNTL:
12478 case BUILT_IN_POPCOUNTLL:
12479 case BUILT_IN_POPCOUNTIMAX:
12480 case BUILT_IN_POPCOUNT:
12481 case BUILT_IN_PARITYL:
12482 case BUILT_IN_PARITYLL:
12483 case BUILT_IN_PARITYIMAX:
12484 case BUILT_IN_PARITY:
12485 case BUILT_IN_LABS:
12486 case BUILT_IN_LLABS:
12487 case BUILT_IN_PREFETCH:
12488 case BUILT_IN_ACC_ON_DEVICE:
12489 return true;
12490
12491 default:
12492 return is_simple_builtin (decl);
12493 }
12494
12495 return false;
12496 }